diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index 1a1e46d55f7a4..aff0add62a2b6 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -7,19 +7,14 @@ steps: matrix: setup: image: - - debian-11 - debian-12 - opensuse-leap-15 - - oraclelinux-7 - oraclelinux-8 - - sles-12 - sles-15 - - ubuntu-1804 - ubuntu-2004 - ubuntu-2204 - rocky-8 - rocky-9 - - rhel-7 - rhel-8 - rhel-9 - almalinux-8 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index c1b10a46c62a7..9bcd61ac1273c 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -8,19 +8,14 @@ steps: matrix: setup: image: - - debian-11 - debian-12 - opensuse-leap-15 - - oraclelinux-7 - oraclelinux-8 - - sles-12 - sles-15 - - ubuntu-1804 - ubuntu-2004 - ubuntu-2204 - rocky-8 - rocky-9 - - rhel-7 - rhel-8 - rhel-9 - almalinux-8 diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index 79e5a2e8dcdbb..8bee3a78f8316 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -7,19 +7,14 @@ steps: matrix: setup: image: - - debian-11 - debian-12 - opensuse-leap-15 - - oraclelinux-7 - oraclelinux-8 - - sles-12 - sles-15 - - ubuntu-1804 - ubuntu-2004 - ubuntu-2204 - rocky-8 - rocky-9 - - rhel-7 - rhel-8 - rhel-9 - almalinux-8 @@ -90,7 +85,6 @@ steps: setup: image: - amazonlinux-2023 - - amazonlinux-2 agents: provider: aws imagePrefix: elasticsearch-{{matrix.image}} diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml index ffc1350aceab3..6c7dadfd454ed 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -10,19 +10,14 @@ steps: matrix: setup: image: - - debian-11 - debian-12 - opensuse-leap-15 - - oraclelinux-7 - oraclelinux-8 - - sles-12 - sles-15 - - ubuntu-1804 - ubuntu-2004 - ubuntu-2204 - rocky-8 - rocky-9 - - rhel-7 - rhel-8 - rhel-9 - almalinux-8 diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 0fb75b59b6096..0d7bcea168df8 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -13,8 +13,8 @@ * This class models the different Docker base images that are used to build Docker distributions of Elasticsearch. */ public enum DockerBase { - // "latest" here is intentional, since the image name specifies "8" - DEFAULT("docker.elastic.co/ubi8/ubi-minimal:latest", "", "microdnf"), + // "latest" here is intentional, since the image name specifies "9" + DEFAULT("docker.elastic.co/ubi9/ubi-minimal:latest", "", "microdnf"), // The Iron Bank base image is UBI (albeit hardened), but we are required to parameterize the Docker build IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank", "yum"), diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index 4446952fec2bb..720d6a7c2efb6 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -20,6 +20,7 @@ import org.elasticsearch.gradle.test.GradleTestPolicySetupPlugin; import org.elasticsearch.gradle.test.SystemPropertyCommandLineArgumentProvider; import org.gradle.api.Action; +import org.gradle.api.JavaVersion; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; @@ -112,7 +113,6 @@ public void execute(Task t) { test.jvmArgs( "-Xmx" + System.getProperty("tests.heap.size", "512m"), "-Xms" + System.getProperty("tests.heap.size", "512m"), - "-Djava.security.manager=allow", "-Dtests.testfeatures.enabled=true", "--add-opens=java.base/java.util=ALL-UNNAMED", // TODO: only open these for mockito when it is modularized @@ -127,6 +127,13 @@ public void execute(Task t) { ); test.getJvmArgumentProviders().add(new SimpleCommandLineArgumentProvider("-XX:HeapDumpPath=" + heapdumpDir)); + test.getJvmArgumentProviders().add(() -> { + if (test.getJavaVersion().compareTo(JavaVersion.VERSION_23) <= 0) { + return List.of("-Djava.security.manager=allow"); + } else { + return List.of(); + } + }); String argline = System.getProperty("tests.jvm.argline"); if (argline != null) { diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index aaf654a37dd22..ede1b392b8a41 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -35,7 +35,7 @@ commonscodec = 1.15 protobuf = 3.25.5 # test dependencies -randomizedrunner = 2.8.0 +randomizedrunner = 2.8.2 junit = 4.13.2 junit5 = 5.7.1 hamcrest = 2.1 diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/test/GradleTestPolicySetupPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/test/GradleTestPolicySetupPlugin.java index 2068ee4447971..2107156902487 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/test/GradleTestPolicySetupPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/test/GradleTestPolicySetupPlugin.java @@ -9,11 +9,14 @@ package org.elasticsearch.gradle.test; +import org.gradle.api.JavaVersion; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.invocation.Gradle; import org.gradle.api.tasks.testing.Test; +import java.util.List; + public class GradleTestPolicySetupPlugin implements Plugin { @Override @@ -23,8 +26,13 @@ public void apply(Project project) { test.systemProperty("tests.gradle", true); test.systemProperty("tests.task", test.getPath()); - // Flag is required for later Java versions since our tests use a custom security manager - test.jvmArgs("-Djava.security.manager=allow"); + test.getJvmArgumentProviders().add(() -> { + if (test.getJavaVersion().compareTo(JavaVersion.VERSION_23) <= 0) { + return List.of("-Djava.security.manager=allow"); + } else { + return List.of(); + } + }); SystemPropertyCommandLineArgumentProvider nonInputProperties = new SystemPropertyCommandLineArgumentProvider(); // don't track these as inputs since they contain absolute paths and break cache relocatability diff --git a/dev-tools/publish_zstd_binaries.sh b/dev-tools/publish_zstd_binaries.sh index d5be5c4aaec60..25d4aed6255be 100755 --- a/dev-tools/publish_zstd_binaries.sh +++ b/dev-tools/publish_zstd_binaries.sh @@ -79,8 +79,8 @@ build_linux_jar() { } echo 'Building Linux jars...' -LINUX_ARM_JAR=$(build_linux_jar "linux/amd64" "x86-64") -LINUX_X86_JAR=$(build_linux_jar "linux/arm64" "aarch64") +LINUX_ARM_JAR=$(build_linux_jar "linux/arm64" "aarch64") +LINUX_X86_JAR=$(build_linux_jar "linux/amd64" "x86-64") build_windows_jar() { ARTIFACT="$TEMP/zstd-$VERSION-windows-x86-64.jar" diff --git a/distribution/build.gradle b/distribution/build.gradle index bfbf10ac85e2f..e65d07dcfc2b4 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -70,10 +70,10 @@ tasks.register("generateDependenciesReport", ConcatFilesTask) { // Explicitly add the dependency on the RHEL UBI Docker base image String[] rhelUbiFields = [ 'Red Hat Universal Base Image minimal', - '8', - 'https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8', + '9', + 'https://catalog.redhat.com/software/containers/ubi9-minimal/61832888c0d15aff4912fe0d', 'Custom;https://www.redhat.com/licenses/EULA_Red_Hat_Universal_Base_Image_English_20190422.pdf', - 'https://oss-dependencies.elastic.co/red-hat-universal-base-image-minimal/8/ubi-minimal-8-source.tar.gz' + 'https://oss-dependencies.elastic.co/red-hat-universal-base-image-minimal/9/ubi-minimal-9-source.tar.gz' ] additionalLines << rhelUbiFields.join(',') } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java index b17ad7c87e3ff..fe0f82560894c 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java @@ -11,6 +11,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.jdk.RuntimeVersionFeature; import java.io.IOException; import java.nio.file.Files; @@ -137,9 +139,13 @@ private static Stream maybeWorkaroundG1Bug() { return Stream.of(); } + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) private static Stream maybeAllowSecurityManager() { - // Will become conditional on useEntitlements once entitlements can run without SM - return Stream.of("-Djava.security.manager=allow"); + if (RuntimeVersionFeature.isSecurityManagerAvailable()) { + // Will become conditional on useEntitlements once entitlements can run without SM + return Stream.of("-Djava.security.manager=allow"); + } + return Stream.of(); } private static Stream maybeAttachEntitlementAgent(boolean useEntitlements) { diff --git a/docs/changelog/118025.yaml b/docs/changelog/118025.yaml new file mode 100644 index 0000000000000..9b615f4d5e621 --- /dev/null +++ b/docs/changelog/118025.yaml @@ -0,0 +1,5 @@ +pr: 118025 +summary: Update sparse text embeddings API route for Inference Service +area: Inference +type: enhancement +issues: [] diff --git a/docs/changelog/118104.yaml b/docs/changelog/118104.yaml new file mode 100644 index 0000000000000..eb8ac661e9f93 --- /dev/null +++ b/docs/changelog/118104.yaml @@ -0,0 +1,12 @@ +pr: 118104 +summary: Remove old `_knn_search` tech preview API in v9 +area: Vector Search +type: breaking +issues: [] +breaking: + title: Remove old `_knn_search` tech preview API in v9 + area: REST API + details: The original, tech-preview api for vector search, `_knn_search`, has been removed in v9. For all vector search + operations, you should utilize the `_search` endpoint. + impact: The `_knn_search` API is now inaccessible without providing a compatible-with flag for v8. + notable: false diff --git a/docs/changelog/118114.yaml b/docs/changelog/118114.yaml new file mode 100644 index 0000000000000..1b7532d5df981 --- /dev/null +++ b/docs/changelog/118114.yaml @@ -0,0 +1,5 @@ +pr: 118114 +summary: Enable physical plan verification +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/118177.yaml b/docs/changelog/118177.yaml new file mode 100644 index 0000000000000..5201fec3db306 --- /dev/null +++ b/docs/changelog/118177.yaml @@ -0,0 +1,6 @@ +pr: 118177 +summary: Fixing bedrock event executor terminated cache issue +area: Machine Learning +type: bug +issues: + - 117916 diff --git a/docs/changelog/118267.yaml b/docs/changelog/118267.yaml new file mode 100644 index 0000000000000..3e3920caeb0f9 --- /dev/null +++ b/docs/changelog/118267.yaml @@ -0,0 +1,5 @@ +pr: 118267 +summary: Adding get migration reindex status +area: Data streams +type: enhancement +issues: [] diff --git a/docs/reference/inference/service-openai.asciidoc b/docs/reference/inference/service-openai.asciidoc index 21643133553e1..9211e2d08e88b 100644 --- a/docs/reference/inference/service-openai.asciidoc +++ b/docs/reference/inference/service-openai.asciidoc @@ -76,6 +76,12 @@ https://platform.openai.com/api-keys[API keys section]. include::inference-shared.asciidoc[tag=api-key-admonition] -- +`dimensions`::: +(Optional, integer) +The number of dimensions the resulting output embeddings should have. +Only supported in `text-embedding-3` and later models. +If not set the OpenAI defined default for the model is used. + `model_id`::: (Required, string) The name of the model to use for the {infer} task. @@ -134,8 +140,8 @@ Specifies the user issuing the request, which can be used for abuse detection. [[inference-example-openai]] ==== OpenAI service example -The following example shows how to create an {infer} endpoint called -`openai-embeddings` to perform a `text_embedding` task type. +The following example shows how to create an {infer} endpoint called `openai-embeddings` to perform a `text_embedding` task type. +The embeddings created by requests to this endpoint will have 128 dimensions. [source,console] ------------------------------------------------------------ @@ -144,14 +150,14 @@ PUT _inference/text_embedding/openai-embeddings "service": "openai", "service_settings": { "api_key": "", - "model_id": "text-embedding-ada-002" + "model_id": "text-embedding-3-small", + "dimensions": 128 } } ------------------------------------------------------------ // TEST[skip:TBD] -The next example shows how to create an {infer} endpoint called -`openai-completion` to perform a `completion` task type. +The next example shows how to create an {infer} endpoint called `openai-completion` to perform a `completion` task type. [source,console] ------------------------------------------------------------ diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index e6e11d6dd539f..199a59a5b143c 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -232,8 +232,8 @@ it will be set to the length of the first vector added to the field. `index`:: (Optional, Boolean) -If `true`, you can search this field using the <>. Defaults to `true`. +If `true`, you can search this field using the <> +or <> . Defaults to `true`. [[dense-vector-similarity]] `similarity`:: diff --git a/docs/reference/migration/migrate_9_0.asciidoc b/docs/reference/migration/migrate_9_0.asciidoc index 5048220966bba..8f0b16e31b56e 100644 --- a/docs/reference/migration/migrate_9_0.asciidoc +++ b/docs/reference/migration/migrate_9_0.asciidoc @@ -244,6 +244,25 @@ The deprecated highlighting `force_source` parameter is no longer supported. Users should remove usages of the `force_source` parameter from their search requests. ==== +[discrete] +[[breaking_90_transforms_changes]] +==== {transforms-cap} changes + +[[updating_deprecated_transform_roles]] +.Updating deprecated {transform} roles (`data_frame_transforms_admin` and `data_frame_transforms_user`) +[%collapsible] +==== +*Details* + +The `data_frame_transforms_admin` and `data_frame_transforms_user` {transform} roles have been deprecated. + +*Impact* + +Users must update any existing {transforms} that use deprecated {transform} roles (`data_frame_transforms_admin` or `data_frame_transforms_user`) to use the new equivalent {transform} roles (`transform_admin` or `transform_user`). +To update the {transform} roles: + +1. Switch to a user with the `transform_admin` role (to replace `data_frame_transforms_admin`) or the `transform_user` role (to replace `data_frame_transforms_user`). +2. Call the <> with that user. +==== + [discrete] [[deprecated-9.0]] diff --git a/docs/reference/migration/migrate_9_0/transforms-migration-guide.asciidoc b/docs/reference/migration/migrate_9_0/transforms-migration-guide.asciidoc deleted file mode 100644 index d41c524d68d5c..0000000000000 --- a/docs/reference/migration/migrate_9_0/transforms-migration-guide.asciidoc +++ /dev/null @@ -1,9 +0,0 @@ -[[transforms-migration-guide]] -== {transforms-cap} migration guide -This migration guide helps you upgrade your {transforms} to work with the 9.0 release. Each section outlines a breaking change and any manual steps needed to upgrade your {transforms} to be compatible with 9.0. - - -=== Updating deprecated {transform} roles (`data_frame_transforms_admin` and `data_frame_transforms_user`) -If you have existing {transforms} that use deprecated {transform} roles (`data_frame_transforms_admin` or `data_frame_transforms_user`) you must update them to use the new equivalent {transform} roles (`transform_admin` or `transform_user`). To update your {transform} roles: -1. Switch to a user with the `transform_admin` role (to replace `data_frame_transforms_admin`) or the `transform_user` role (to replace `data_frame_transforms_user`). -2. Call the <> with that user. diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 506dff7891ad2..c3bf84fa600d2 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -1942,3 +1942,8 @@ Refer to <>. === Delete geoip database configuration API Refer to <>. + +[role="exclude",id="knn-search-api"] +=== Delete _knn_search API + +Refer to <>. diff --git a/docs/reference/search.asciidoc b/docs/reference/search.asciidoc index b39afff876eed..70ffe02e44d95 100644 --- a/docs/reference/search.asciidoc +++ b/docs/reference/search.asciidoc @@ -50,8 +50,6 @@ include::search/async-search.asciidoc[] include::search/point-in-time-api.asciidoc[] -include::search/knn-search.asciidoc[] - include::search/retriever.asciidoc[] include::search/rrf.asciidoc[] diff --git a/docs/reference/search/knn-search.asciidoc b/docs/reference/search/knn-search.asciidoc deleted file mode 100644 index 78e3e13b09fee..0000000000000 --- a/docs/reference/search/knn-search.asciidoc +++ /dev/null @@ -1,146 +0,0 @@ -[[knn-search-api]] -=== kNN search API -++++ -kNN search -++++ - -deprecated::[8.4.0,"The kNN search API has been replaced by the <> in the search API."] - -Performs a k-nearest neighbor (kNN) search and returns the matching documents. - -//// -[source,console] ----- -PUT my-index -{ - "mappings": { - "properties": { - "image_vector": { - "type": "dense_vector", - "dims": 3, - "index": true, - "similarity": "l2_norm" - } - } - } -} - -PUT my-index/_doc/1?refresh -{ - "image_vector" : [0.5, 10, 6] -} ----- -//// - -[source,console] ----- -GET my-index/_knn_search -{ - "knn": { - "field": "image_vector", - "query_vector": [0.3, 0.1, 1.2], - "k": 10, - "num_candidates": 100 - }, - "_source": ["name", "file_type"] -} ----- -// TEST[continued] -// TEST[warning:The kNN search API has been replaced by the `knn` option in the search API.] - -[[knn-search-api-request]] -==== {api-request-title} - -`GET /_knn_search` - -`POST /_knn_search` - -[[knn-search-api-prereqs]] -==== {api-prereq-title} - -* If the {es} {security-features} are enabled, you must have the `read` -<> for the target data stream, index, -or alias. - -[[knn-search-api-desc]] -==== {api-description-title} - -The kNN search API performs a k-nearest neighbor (kNN) search on a -<> field. Given a query vector, it finds the _k_ -closest vectors and returns those documents as search hits. - -//tag::hnsw-algorithm[] -{es} uses the https://arxiv.org/abs/1603.09320[HNSW algorithm] to support -efficient kNN search. Like most kNN algorithms, HNSW is an approximate method -that sacrifices result accuracy for improved search speed. This means the -results returned are not always the true _k_ closest neighbors. -//end::hnsw-algorithm[] - -The kNN search API supports restricting the search using a filter. The search -will return the top `k` documents that also match the filter query. - -[[knn-search-api-path-params]] -==== {api-path-parms-title} - -``:: -(Optional, string) Comma-separated list of data streams, indices, and aliases -to search. Supports wildcards (`*`). To search all data streams and indices, -use `*` or `_all`. - -[role="child_attributes"] -[[knn-search-api-query-params]] -==== {api-query-parms-title} - -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] - -[role="child_attributes"] -[[knn-search-api-request-body]] -==== {api-request-body-title} - -`filter`:: -(Optional, <>) -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-filter] - -`knn`:: -(Required, object) -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn] -+ -.Properties of `knn` object -[%collapsible%open] -==== -`field`:: -(Required, string) -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-field] - -`k`:: -(Optional, integer) -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-k] - -`num_candidates`:: -(Optional, integer) -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-num-candidates] - -`query_vector`:: -(Required, array of floats or string) -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-query-vector] -==== - -include::{es-ref-dir}/search/search.asciidoc[tag=docvalue-fields-def] -include::{es-ref-dir}/search/search.asciidoc[tag=fields-param-def] -include::{es-ref-dir}/search/search.asciidoc[tag=source-filtering-def] -include::{es-ref-dir}/search/search.asciidoc[tag=stored-fields-def] - -[role="child_attributes"] -[[knn-search-api-response-body]] -==== {api-response-body-title} - -A kNN search response has the exact same structure as a -<>. However, certain sections -have a meaning specific to kNN search: - -* The <> is determined by -the similarity between the query and document vector. See -<>. -* The `hits.total` object contains the total number of nearest neighbor -candidates considered, which is `num_candidates * num_shards`. The -`hits.total.relation` will always be `eq`, indicating an exact value. diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index 6fb7f1747051f..59a903b95e4f8 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -1058,8 +1058,10 @@ PUT image-index * When using kNN search in <>, the <> option is not supported. -* {blank} -include::{es-ref-dir}/search/knn-search.asciidoc[tag=hnsw-algorithm] +* {es} uses the https://arxiv.org/abs/1603.09320[HNSW algorithm] to support +efficient kNN search. Like most kNN algorithms, HNSW is an approximate method +that sacrifices result accuracy for improved search speed. This means the +results returned are not always the true _k_ closest neighbors. NOTE: Approximate kNN search always uses the <> search type in order to gather diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 58feb55f32e2f..8694d7f5b46c6 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -39,7 +39,7 @@ adjust memory usage in Docker Desktop by going to **Settings > Resources**. ---- docker network create elastic ---- - +// REVIEWED[DEC.10.24] . Pull the {es} Docker image. + -- @@ -52,10 +52,11 @@ endif::[] ---- docker pull {docker-image} ---- +// REVIEWED[DEC.10.24] -- . Optional: Install -https://docs.sigstore.dev/system_config/installation/[Cosign] for your +https://docs.sigstore.dev/cosign/system_config/installation/[Cosign] for your environment. Then use Cosign to verify the {es} image's signature. + [[docker-verify-signature]] @@ -64,6 +65,7 @@ environment. Then use Cosign to verify the {es} image's signature. wget https://artifacts.elastic.co/cosign.pub cosign verify --key cosign.pub {docker-image} ---- +// REVIEWED[DEC.10.24] + The `cosign` command prints the check results and the signature payload in JSON format: + @@ -75,6 +77,7 @@ The following checks were performed on each of these signatures: - Existence of the claims in the transparency log was verified offline - The signatures were verified against the specified public key ---- +// REVIEWED[DEC.10.24] . Start an {es} container. + @@ -82,6 +85,7 @@ The following checks were performed on each of these signatures: ---- docker run --name es01 --net elastic -p 9200:9200 -it -m 1GB {docker-image} ---- +// REVIEWED[DEC.10.24] + TIP: Use the `-m` flag to set a memory limit for the container. This removes the need to <>. @@ -95,6 +99,7 @@ If you intend to use the {ml} capabilities, then start the container with this c ---- docker run --name es01 --net elastic -p 9200:9200 -it -m 6GB -e "xpack.ml.use_auto_machine_memory_percent=true" {docker-image} ---- +// REVIEWED[DEC.10.24] The command prints the `elastic` user password and an enrollment token for {kib}. . Copy the generated `elastic` password and enrollment token. These credentials @@ -106,6 +111,7 @@ credentials using the following commands. docker exec -it es01 /usr/share/elasticsearch/bin/elasticsearch-reset-password -u elastic docker exec -it es01 /usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token -s kibana ---- +// REVIEWED[DEC.10.24] + We recommend storing the `elastic` password as an environment variable in your shell. Example: + @@ -113,6 +119,7 @@ We recommend storing the `elastic` password as an environment variable in your s ---- export ELASTIC_PASSWORD="your_password" ---- +// REVIEWED[DEC.10.24] . Copy the `http_ca.crt` SSL certificate from the container to your local machine. + @@ -120,6 +127,7 @@ export ELASTIC_PASSWORD="your_password" ---- docker cp es01:/usr/share/elasticsearch/config/certs/http_ca.crt . ---- +// REVIEWED[DEC.10.24] . Make a REST API call to {es} to ensure the {es} container is running. + @@ -128,6 +136,7 @@ docker cp es01:/usr/share/elasticsearch/config/certs/http_ca.crt . curl --cacert http_ca.crt -u elastic:$ELASTIC_PASSWORD https://localhost:9200 ---- // NOTCONSOLE +// REVIEWED[DEC.10.24] ===== Add more nodes @@ -137,6 +146,7 @@ curl --cacert http_ca.crt -u elastic:$ELASTIC_PASSWORD https://localhost:9200 ---- docker exec -it es01 /usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token -s node ---- +// REVIEWED[DEC.10.24] + The enrollment token is valid for 30 minutes. @@ -146,6 +156,7 @@ The enrollment token is valid for 30 minutes. ---- docker run -e ENROLLMENT_TOKEN="" --name es02 --net elastic -it -m 1GB {docker-image} ---- +// REVIEWED[DEC.10.24] . Call the <> to verify the node was added to the cluster. + @@ -154,6 +165,7 @@ docker run -e ENROLLMENT_TOKEN="" --name es02 --net elastic -it -m 1GB {d curl --cacert http_ca.crt -u elastic:$ELASTIC_PASSWORD https://localhost:9200/_cat/nodes ---- // NOTCONSOLE +// REVIEWED[DEC.10.24] [[run-kibana-docker]] ===== Run {kib} @@ -170,6 +182,7 @@ endif::[] ---- docker pull {kib-docker-image} ---- +// REVIEWED[DEC.10.24] -- . Optional: Verify the {kib} image's signature. @@ -179,6 +192,7 @@ docker pull {kib-docker-image} wget https://artifacts.elastic.co/cosign.pub cosign verify --key cosign.pub {kib-docker-image} ---- +// REVIEWED[DEC.10.24] . Start a {kib} container. + @@ -186,6 +200,7 @@ cosign verify --key cosign.pub {kib-docker-image} ---- docker run --name kib01 --net elastic -p 5601:5601 {kib-docker-image} ---- +// REVIEWED[DEC.10.24] . When {kib} starts, it outputs a unique generated link to the terminal. To access {kib}, open this link in a web browser. @@ -198,6 +213,7 @@ To regenerate the token, run: ---- docker exec -it es01 /usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token -s kibana ---- +// REVIEWED[DEC.10.24] . Log in to {kib} as the `elastic` user with the password that was generated when you started {es}. @@ -208,6 +224,7 @@ To regenerate the password, run: ---- docker exec -it es01 /usr/share/elasticsearch/bin/elasticsearch-reset-password -u elastic ---- +// REVIEWED[DEC.10.24] [[remove-containers-docker]] ===== Remove containers @@ -226,6 +243,7 @@ docker rm es02 # Remove the {kib} container docker rm kib01 ---- +// REVIEWED[DEC.10.24] ===== Next steps @@ -306,6 +324,7 @@ ES_PORT=127.0.0.1:9200 ---- docker-compose up -d ---- +// REVIEWED[DEC.10.24] . After the cluster has started, open http://localhost:5601 in a web browser to access {kib}. @@ -321,6 +340,7 @@ is preserved and loaded when you restart the cluster with `docker-compose up`. ---- docker-compose down ---- +// REVIEWED[DEC.10.24] To delete the network, containers, and volumes when you stop the cluster, specify the `-v` option: @@ -329,6 +349,7 @@ specify the `-v` option: ---- docker-compose down -v ---- +// REVIEWED[DEC.10.24] ===== Next steps @@ -377,6 +398,7 @@ The `vm.max_map_count` setting must be set within the xhyve virtual machine: -------------------------------------------- screen ~/Library/Containers/com.docker.docker/Data/vms/0/tty -------------------------------------------- +// REVIEWED[DEC.10.24] . Press enter and use `sysctl` to configure `vm.max_map_count`: + @@ -494,6 +516,7 @@ To check the Docker daemon defaults for ulimits, run: -------------------------------------------- docker run --rm {docker-image} /bin/bash -c 'ulimit -Hn && ulimit -Sn && ulimit -Hu && ulimit -Su' -------------------------------------------- +// REVIEWED[DEC.10.24] If needed, adjust them in the Daemon or override them per container. For example, when using `docker run`, set: @@ -502,6 +525,7 @@ For example, when using `docker run`, set: -------------------------------------------- --ulimit nofile=65535:65535 -------------------------------------------- +// REVIEWED[DEC.10.24] ===== Disable swapping @@ -518,6 +542,7 @@ When using `docker run`, you can specify: ---- -e "bootstrap.memory_lock=true" --ulimit memlock=-1:-1 ---- +// REVIEWED[DEC.10.24] ===== Randomize published ports @@ -545,6 +570,7 @@ environment variable. For example, to use 1GB, use the following command. ---- docker run -e ES_JAVA_OPTS="-Xms1g -Xmx1g" -e ENROLLMENT_TOKEN="" --name es01 -p 9200:9200 --net elastic -it {docker-image} ---- +// REVIEWED[DEC.10.24] The `ES_JAVA_OPTS` variable overrides all other JVM options. We do not recommend using `ES_JAVA_OPTS` in production. @@ -616,6 +642,7 @@ If you mount the password file to `/run/secrets/bootstrapPassword.txt`, specify: -------------------------------------------- -e ELASTIC_PASSWORD_FILE=/run/secrets/bootstrapPassword.txt -------------------------------------------- +// REVIEWED[DEC.10.24] You can override the default command for the image to pass {es} configuration parameters as command line options. For example: diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9189d2a27f3f3..33addef8aedd0 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -184,6 +184,11 @@ + + + + + @@ -4478,11 +4483,11 @@ - - - - - + + + + + diff --git a/test/framework/src/main/java/org/elasticsearch/KnownTransportVersions.java b/libs/core/src/main/java/org/elasticsearch/jdk/RuntimeVersionFeature.java similarity index 55% rename from test/framework/src/main/java/org/elasticsearch/KnownTransportVersions.java rename to libs/core/src/main/java/org/elasticsearch/jdk/RuntimeVersionFeature.java index a9981d6951400..fe6e73271599f 100644 --- a/test/framework/src/main/java/org/elasticsearch/KnownTransportVersions.java +++ b/libs/core/src/main/java/org/elasticsearch/jdk/RuntimeVersionFeature.java @@ -7,16 +7,15 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch; +package org.elasticsearch.jdk; -import java.util.List; +import org.elasticsearch.core.UpdateForV9; -/** - * Provides access to all known transport versions. - */ -public class KnownTransportVersions { - /** - * A sorted list of all known transport versions - */ - public static final List ALL_VERSIONS = List.copyOf(TransportVersions.getAllVersions()); +public class RuntimeVersionFeature { + private RuntimeVersionFeature() {} + + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // Remove once we removed all references to SecurityManager in code + public static boolean isSecurityManagerAvailable() { + return Runtime.version().feature() < 24; + } } diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/jdk/JdkVectorLibrary.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/jdk/JdkVectorLibrary.java index 2fccb27fa6e6c..3277cb8f8e6c7 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/jdk/JdkVectorLibrary.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/jdk/JdkVectorLibrary.java @@ -9,6 +9,8 @@ package org.elasticsearch.nativeaccess.jdk; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.nativeaccess.VectorSimilarityFunctions; import org.elasticsearch.nativeaccess.lib.LoaderHelper; import org.elasticsearch.nativeaccess.lib.VectorLibrary; @@ -25,6 +27,8 @@ public final class JdkVectorLibrary implements VectorLibrary { + static final Logger logger = LogManager.getLogger(JdkVectorLibrary.class); + static final MethodHandle dot7u$mh; static final MethodHandle sqr7u$mh; @@ -36,6 +40,7 @@ public final class JdkVectorLibrary implements VectorLibrary { try { int caps = (int) vecCaps$mh.invokeExact(); + logger.info("vec_caps=" + caps); if (caps != 0) { if (caps == 2) { dot7u$mh = downcallHandle( diff --git a/libs/secure-sm/build.gradle b/libs/secure-sm/build.gradle index 473a86215e91e..d93afcf84afed 100644 --- a/libs/secure-sm/build.gradle +++ b/libs/secure-sm/build.gradle @@ -28,4 +28,5 @@ tasks.named('forbiddenApisMain').configure { tasks.named("jarHell").configure { enabled = false } tasks.named("testTestingConventions").configure { baseClass 'junit.framework.TestCase' + baseClass 'org.junit.Assert' } diff --git a/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java b/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java index 69c6973f57cdf..965696d13613f 100644 --- a/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java +++ b/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java @@ -9,27 +9,43 @@ package org.elasticsearch.secure_sm; -import junit.framework.TestCase; +import com.carrotsearch.randomizedtesting.JUnit3MethodProvider; +import com.carrotsearch.randomizedtesting.RandomizedRunner; +import com.carrotsearch.randomizedtesting.RandomizedTest; +import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders; + +import org.elasticsearch.jdk.RuntimeVersionFeature; +import org.junit.BeforeClass; +import org.junit.runner.RunWith; import java.security.Permission; import java.security.Policy; import java.security.ProtectionDomain; import java.util.ArrayList; import java.util.List; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; /** Simple tests for SecureSM */ -public class SecureSMTests extends TestCase { - static { +@TestMethodProviders({ JUnit3MethodProvider.class }) +@RunWith(RandomizedRunner.class) +public class SecureSMTests extends org.junit.Assert { + + @BeforeClass + public static void initialize() { + RandomizedTest.assumeFalse( + "SecurityManager has been permanently removed in JDK 24", + RuntimeVersionFeature.isSecurityManagerAvailable() == false + ); // install a mock security policy: // AllPermission to source code // ThreadPermission not granted anywhere else - final ProtectionDomain sourceCode = SecureSM.class.getProtectionDomain(); + final var sourceCode = Set.of(SecureSM.class.getProtectionDomain(), RandomizedRunner.class.getProtectionDomain()); Policy.setPolicy(new Policy() { @Override public boolean implies(ProtectionDomain domain, Permission permission) { - if (domain == sourceCode) { + if (sourceCode.contains(domain)) { return true; } else if (permission instanceof ThreadPermission) { return false; diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java index 342dc14ec2bf3..71aee00405621 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java @@ -124,17 +124,13 @@ public AutoDateHistogramAggregationBuilder(String name) { public AutoDateHistogramAggregationBuilder(StreamInput in) throws IOException { super(in); numBuckets = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_3_0)) { - minimumIntervalExpression = in.readOptionalString(); - } + minimumIntervalExpression = in.readOptionalString(); } @Override protected void innerWriteTo(StreamOutput out) throws IOException { out.writeVInt(numBuckets); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_3_0)) { - out.writeOptionalString(minimumIntervalExpression); - } + out.writeOptionalString(minimumIntervalExpression); } protected AutoDateHistogramAggregationBuilder( diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregationBuilder.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregationBuilder.java index 7f894d21bab1a..637efb9d91df7 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregationBuilder.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/DerivativePipelineAggregationBuilder.java @@ -259,6 +259,6 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_7_4_0; + return TransportVersions.ZERO; } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ASCIIFoldingTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ASCIIFoldingTokenFilterFactory.java index ba4333bf1be01..bf59fe49d0753 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ASCIIFoldingTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ASCIIFoldingTokenFilterFactory.java @@ -30,7 +30,7 @@ public class ASCIIFoldingTokenFilterFactory extends AbstractTokenFilterFactory i private final boolean preserveOriginal; public ASCIIFoldingTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); preserveOriginal = settings.getAsBoolean(PRESERVE_ORIGINAL.getPreferredName(), DEFAULT_PRESERVE_ORIGINAL); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AbstractCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AbstractCompoundWordTokenFilterFactory.java index 1b68b2a6ebaf9..c0a8c1374c146 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AbstractCompoundWordTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AbstractCompoundWordTokenFilterFactory.java @@ -30,7 +30,7 @@ public abstract class AbstractCompoundWordTokenFilterFactory extends AbstractTok protected final CharArraySet wordList; protected AbstractCompoundWordTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(name, settings); + super(name); minWordSize = settings.getAsInt("min_word_size", CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE); minSubwordSize = settings.getAsInt("min_subword_size", CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ApostropheFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ApostropheFilterFactory.java index 97a5de3a1408a..28404ac453439 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ApostropheFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ApostropheFilterFactory.java @@ -21,7 +21,7 @@ public class ApostropheFilterFactory extends AbstractTokenFilterFactory { ApostropheFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArabicAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArabicAnalyzerProvider.java index 4e021f8138e76..34869c709a42f 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArabicAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArabicAnalyzerProvider.java @@ -22,7 +22,7 @@ public class ArabicAnalyzerProvider extends AbstractIndexAnalyzerProvider asArray = settings.getAsList("ignored_scripts"); Set scripts = new HashSet<>(Arrays.asList("han", "hiragana", "katakana", "hangul")); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKWidthFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKWidthFilterFactory.java index 07e28c5a4924f..717bfc1191ab2 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKWidthFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKWidthFilterFactory.java @@ -20,7 +20,7 @@ public final class CJKWidthFilterFactory extends AbstractTokenFilterFactory implements NormalizingTokenFilterFactory { CJKWidthFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(name, settings); + super(name); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CatalanAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CatalanAnalyzerProvider.java index 2be6f220e4441..b134a1d2ab0fb 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CatalanAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CatalanAnalyzerProvider.java @@ -22,7 +22,7 @@ public class CatalanAnalyzerProvider extends AbstractIndexAnalyzerProvider arrayKeepTypes = settings.getAsList(KEEP_TYPES_KEY, null); if ((arrayKeepTypes == null)) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java index 0fa763d627a7a..e2be82df10600 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java @@ -51,7 +51,7 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory { private static final String ENABLE_POS_INC_KEY = "enable_position_increments"; KeepWordFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(name, settings); + super(name); final List arrayKeepWords = settings.getAsList(KEEP_WORDS_KEY, null); final String keepWordsPath = settings.get(KEEP_WORDS_PATH_KEY, null); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordAnalyzerProvider.java index c43327cf508cc..c4cd39c4f441f 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordAnalyzerProvider.java @@ -20,7 +20,7 @@ public class KeywordAnalyzerProvider extends AbstractIndexAnalyzerProvider regexes = settings.getAsList(PATTERNS_KEY, null, false); if (regexes == null) { throw new IllegalArgumentException("required setting '" + PATTERNS_KEY + "' is missing for token filter [" + name + "]"); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternReplaceTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternReplaceTokenFilterFactory.java index cd15b05aabfbe..d90b7a182cab3 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternReplaceTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternReplaceTokenFilterFactory.java @@ -27,7 +27,7 @@ public class PatternReplaceTokenFilterFactory extends AbstractTokenFilterFactory private final boolean all; public PatternReplaceTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); String sPattern = settings.get("pattern", null); if (sPattern == null) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java index b8f2e194c2ca0..311d63f4d011a 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java @@ -25,7 +25,7 @@ public class PatternTokenizerFactory extends AbstractTokenizerFactory { private final int group; PatternTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, settings, name); + super(name); String sPattern = settings.get("pattern", "\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/); if (sPattern == null) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java index 917a45188123c..b4cb8b8003094 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java @@ -35,7 +35,7 @@ public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider filterNames; ScriptedConditionTokenFilterFactory(IndexSettings indexSettings, String name, Settings settings, ScriptService scriptService) { - super(name, settings); + super(name); Settings scriptSettings = settings.getAsSettings("script"); Script script = Script.parse(scriptSettings); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SerbianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SerbianAnalyzerProvider.java index 6dc899be95875..e36aaa9756e70 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SerbianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SerbianAnalyzerProvider.java @@ -22,7 +22,7 @@ public class SerbianAnalyzerProvider extends AbstractIndexAnalyzerProvider rules = Analysis.getWordList(env, settings, "rules"); if (rules == null) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 7548c8ad2b88b..a6e9fccd9d09c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -92,7 +92,7 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(StemmerTokenFilterFactory.class); StemmerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { - super(name, settings); + super(name); this.language = Strings.capitalize(settings.get("language", settings.get("name", "porter"))); // check that we have a valid language by trying to create a TokenStream create(EMPTY_TOKEN_STREAM).close(); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StopAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StopAnalyzerProvider.java index 0977d08d0fd48..983a9410fba2d 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StopAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StopAnalyzerProvider.java @@ -23,7 +23,7 @@ public class StopAnalyzerProvider extends AbstractIndexAnalyzerProvider DIGIT diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java index 083594f6ab02e..d20a837e92bf6 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java @@ -47,7 +47,7 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory @SuppressWarnings("HiddenField") public WordDelimiterTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(name, settings); + super(name); // Sample Format for the type table: // $ => DIGIT diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/XLowerCaseTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/XLowerCaseTokenizerFactory.java index 7fc1bf6882354..211278d0ece41 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/XLowerCaseTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/XLowerCaseTokenizerFactory.java @@ -20,7 +20,7 @@ public class XLowerCaseTokenizerFactory extends AbstractTokenizerFactory { public XLowerCaseTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, settings, name); + super(name); } @Override diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java index 69dd8e91b52b2..92c134b5c0f56 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java @@ -64,7 +64,6 @@ public void testDictionaryDecompounder() throws Exception { hasItems("donau", "dampf", "schiff", "donaudampfschiff", "spargel", "creme", "suppe", "spargelcremesuppe") ); } - assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); } public void testHyphenationDecompoundingAnalyzerOnlyLongestMatch() throws Exception { @@ -76,7 +75,6 @@ public void testHyphenationDecompoundingAnalyzerOnlyLongestMatch() throws Except hasItems("kaffeemaschine", "kaffee", "fee", "maschine", "fussballpumpe", "fussball", "ballpumpe", "pumpe") ); } - assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); } /** @@ -89,7 +87,6 @@ public void testHyphenationDecompoundingAnalyzerNoSubMatches() throws Exception List terms = analyze(settings, "hyphenationDecompoundingAnalyzerNoSubMatches", "kaffeemaschine fussballpumpe"); MatcherAssert.assertThat(terms, hasItems("kaffeemaschine", "kaffee", "maschine", "fussballpumpe", "fussball", "ballpumpe")); } - assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); } /** @@ -102,7 +99,6 @@ public void testHyphenationDecompoundingAnalyzerNoOverlappingMatches() throws Ex List terms = analyze(settings, "hyphenationDecompoundingAnalyzerNoOverlappingMatches", "kaffeemaschine fussballpumpe"); MatcherAssert.assertThat(terms, hasItems("kaffeemaschine", "kaffee", "maschine", "fussballpumpe", "fussball", "pumpe")); } - assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); } private List analyze(Settings settings, String analyzerName, String text) throws IOException { diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index b017ae9921b0e..8ae56101ef01e 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -42,4 +42,32 @@ if (buildParams.isSnapshotBuild() == false) { tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("data_stream/10_basic/Create hidden data stream", "warning does not exist for compatibility") + + // Failure store configuration changed on 8.18 (earlier versions behind feature flag) + task.skipTest("data_stream/10_basic/Create data stream with failure store", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/10_basic/Delete data stream with failure store", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/10_basic/Delete data stream with failure store uninitialized", "Configuring the failure store via data stream templates is not supported anymore.") + + task.skipTest("data_stream/30_auto_create_data_stream/Don't initialize failure store during data stream auto-creation on successful index", "Configuring the failure store via data stream templates is not supported anymore.") + + task.skipTest("data_stream/150_tsdb/TSDB failures go to failure store", "Configuring the failure store via data stream templates is not supported anymore.") + + task.skipTest("data_stream/170_modify_data_stream/Modify a data stream's failure store", "Configuring the failure store via data stream templates is not supported anymore.") + + task.skipTest("data_stream/190_failure_store_redirection/Failure redirects to original failure store during index change if final pipeline changes target", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/190_failure_store_redirection/Ensure failure is redirected to correct failure store after a reroute processor", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/190_failure_store_redirection/Test failure store status with bulk request", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/190_failure_store_redirection/Redirect ingest failure in data stream to failure store", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/190_failure_store_redirection/Failure redirects to correct failure store when pipeline loop is detected", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/190_failure_store_redirection/Failure redirects to correct failure store when index loop is detected", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/190_failure_store_redirection/Failure redirects to original failure store during index change if self referenced", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/190_failure_store_redirection/Redirect shard failure in data stream to failure store", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/190_failure_store_redirection/Version conflicts are not redirected to failure store", "Configuring the failure store via data stream templates is not supported anymore.") + + task.skipTest("data_stream/200_rollover_failure_store/Lazily roll over a data stream's failure store after an ingest failure", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/200_rollover_failure_store/A failure store marked for lazy rollover should only be rolled over when there is a failure", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/200_rollover_failure_store/Roll over a data stream's failure store without conditions", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/200_rollover_failure_store/Lazily roll over a data stream's failure store after a shard failure", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/200_rollover_failure_store/Don't roll over a data stream's failure store when conditions aren't met", "Configuring the failure store via data stream templates is not supported anymore.") + task.skipTest("data_stream/200_rollover_failure_store/Roll over a data stream's failure store with conditions", "Configuring the failure store via data stream templates is not supported anymore.") }) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 777ddc28fefdc..0e03045a090f8 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -67,6 +67,7 @@ import org.elasticsearch.cluster.metadata.DataStreamAction; import org.elasticsearch.cluster.metadata.DataStreamAlias; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadataStats; import org.elasticsearch.cluster.metadata.IndexWriteLoad; @@ -2447,9 +2448,10 @@ static void putComposableIndexTemplate( .mappings(mappings == null ? null : CompressedXContent.fromJSON(mappings)) .aliases(aliases) .lifecycle(lifecycle) + .dataStreamOptions(DataStreamTestHelper.createDataStreamOptionsTemplate(withFailureStore)) ) .metadata(metadata) - .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, withFailureStore)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build() ); client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/FailureStoreMetricsWithIncrementalBulkIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/FailureStoreMetricsWithIncrementalBulkIT.java index 2c9b7417b2832..d27ec04179e1a 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/FailureStoreMetricsWithIncrementalBulkIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/FailureStoreMetricsWithIncrementalBulkIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -165,8 +166,8 @@ private void createDataStreamWithFailureStore() throws IOException { request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(List.of(DATA_STREAM_NAME + "*")) - .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, true)) - .template(new Template(null, new CompressedXContent(""" + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .template(Template.builder().mappings(new CompressedXContent(""" { "dynamic": false, "properties": { @@ -177,7 +178,7 @@ private void createDataStreamWithFailureStore() throws IOException { "type": "long" } } - }"""), null)) + }""")).dataStreamOptions(DataStreamTestHelper.createDataStreamOptionsTemplate(true))) .build() ); assertAcked(safeGet(client().execute(TransportPutComposableIndexTemplateAction.TYPE, request))); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java index 96def04069e24..e9eaf7b5faddb 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.compress.CompressedXContent; @@ -283,8 +284,8 @@ private void putComposableIndexTemplate(boolean failureStore) throws IOException request.indexTemplate( ComposableIndexTemplate.builder() .indexPatterns(List.of(dataStream + "*")) - .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, failureStore)) - .template(new Template(null, new CompressedXContent(""" + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .template(Template.builder().mappings(new CompressedXContent(""" { "dynamic": false, "properties": { @@ -295,7 +296,7 @@ private void putComposableIndexTemplate(boolean failureStore) throws IOException "type": "long" } } - }"""), null)) + }""")).dataStreamOptions(DataStreamTestHelper.createDataStreamOptionsTemplate(failureStore))) .build() ); client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index 89c440f5edf8b..19067d85a6805 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -40,6 +40,7 @@ import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamAction; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -1226,9 +1227,10 @@ static void putComposableIndexTemplate( .settings(settings) .mappings(mappings == null ? null : CompressedXContent.fromJSON(mappings)) .lifecycle(lifecycle) + .dataStreamOptions(DataStreamTestHelper.createDataStreamOptionsTemplate(withFailureStore)) ) .metadata(metadata) - .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, withFailureStore)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build() ); client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java index 980cc32a12c68..de6b7a682324e 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java @@ -11,12 +11,14 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.junit.Before; import java.io.IOException; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -40,10 +42,14 @@ public void setup() throws IOException { "template": { "settings": { "number_of_replicas": 0 + }, + "data_stream_options": { + "failure_store": { + "enabled": true + } } }, "data_stream": { - "failure_store": true } } """); @@ -59,12 +65,63 @@ public void setup() throws IOException { assertThat(dataStreams.size(), is(1)); Map dataStream = (Map) dataStreams.get(0); assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); + assertThat(((Map) dataStream.get("failure_store")).get("enabled"), is(true)); List backingIndices = getIndices(dataStream); assertThat(backingIndices.size(), is(1)); List failureStore = getFailureStore(dataStream); assertThat(failureStore.size(), is(1)); } + public void testExplicitlyResetDataStreamOptions() throws IOException { + Request putComponentTemplateRequest = new Request("POST", "/_component_template/with-options"); + putComponentTemplateRequest.setJsonEntity(""" + { + "template": { + "data_stream_options": { + "failure_store": { + "enabled": true + } + } + } + } + """); + assertOK(client().performRequest(putComponentTemplateRequest)); + + Request invalidRequest = new Request("POST", "/_index_template/other-template"); + invalidRequest.setJsonEntity(""" + { + "index_patterns": ["something-else"], + "composed_of" : ["with-options"], + "template": { + "settings": { + "number_of_replicas": 0 + } + } + } + """); + Exception error = expectThrows(ResponseException.class, () -> client().performRequest(invalidRequest)); + assertThat( + error.getMessage(), + containsString("specifies data stream options that can only be used in combination with a data stream") + ); + + // Check that when we nullify the data stream options we can create use any component template in a non data stream template + Request otherRequest = new Request("POST", "/_index_template/other-template"); + otherRequest.setJsonEntity(""" + { + "index_patterns": ["something-else"], + "composed_of" : ["with-options"], + "template": { + "settings": { + "number_of_replicas": 0 + }, + "data_stream_options": null + } + } + """); + assertOK(client().performRequest(otherRequest)); + } + public void testEnableDisableFailureStore() throws IOException { { assertAcknowledged(client().performRequest(new Request("DELETE", "/_data_stream/" + DATA_STREAM_NAME + "/_options"))); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java index 20ec26c0c9341..85b914be30b2c 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java @@ -42,10 +42,14 @@ public void setup() throws IOException { "template": { "settings": { "number_of_replicas": 0 + }, + "data_stream_options": { + "failure_store": { + "enabled": true + } } }, "data_stream": { - "failure_store": true } } """); diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 044ea90fec1af..0d19f555d10a4 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -212,8 +212,12 @@ setup: --- "Create data stream with failure store": - requires: - cluster_features: ["gte_v8.15.0"] - reason: "data stream failure stores default settings changed in 8.15+" + test_runner_features: [ capabilities, allowed_warnings ] + reason: "Data stream failure stores config in templates was added in 8.18+" + capabilities: + - method: POST + path: /_index_template/{template} + capabilities: [ 'failure_store_in_template' ] - do: ingest.put_pipeline: @@ -256,8 +260,7 @@ setup: name: my-template4 body: index_patterns: [ failure-data-stream1, failure-data-stream2 ] - data_stream: - failure_store: true + data_stream: {} template: settings: index: @@ -269,6 +272,9 @@ setup: type: date count: type: long + data_stream_options: + failure_store: + enabled: true - do: indices.create_data_stream: @@ -632,8 +638,12 @@ setup: --- "Delete data stream with failure store": - requires: - cluster_features: ["gte_v8.15.0"] - reason: "data stream failure stores REST structure changed in 8.15+" + reason: "Data stream failure stores config in templates was added in 8.18+" + test_runner_features: [ allowed_warnings, capabilities ] + capabilities: + - method: POST + path: /_index_template/{template} + capabilities: [ 'failure_store_in_template' ] - do: allowed_warnings: @@ -642,8 +652,7 @@ setup: name: my-template4 body: index_patterns: [ failure-data-stream1 ] - data_stream: - failure_store: true + data_stream: {} template: mappings: properties: @@ -651,6 +660,9 @@ setup: type: date count: type: long + data_stream_options: + failure_store: + enabled: true - do: indices.create_data_stream: @@ -722,8 +734,12 @@ setup: --- "Delete data stream with failure store uninitialized": - requires: - cluster_features: ["gte_v8.15.0"] - reason: "data stream failure stores REST structure changed in 8.15+" + reason: "Data stream failure stores config in templates was added in 8.18+" + test_runner_features: [ capabilities, allowed_warnings ] + capabilities: + - method: POST + path: /_index_template/{template} + capabilities: [ 'failure_store_in_template' ] - do: allowed_warnings: @@ -732,8 +748,11 @@ setup: name: my-template4 body: index_patterns: [ failure-data-stream1 ] - data_stream: - failure_store: true + data_stream: {} + template: + data_stream_options: + failure_store: + enabled: true - do: indices.create_data_stream: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index 3fbf85ab1e702..9ea3bfefabdf8 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -185,9 +185,12 @@ index without timestamp: --- TSDB failures go to failure store: - requires: - cluster_features: ["data_stream.failure_store.tsdb_fix"] - reason: "tests tsdb failure store fixes in 8.16.0 that catch timestamp errors that happen earlier in the process and redirect them to the failure store." - + reason: "Data stream failure stores config in templates was added in 8.18+" + test_runner_features: [ capabilities, allowed_warnings ] + capabilities: + - method: POST + path: /_index_template/{template} + capabilities: [ 'failure_store_in_template' ] - do: allowed_warnings: - "index template [my-template2] has index patterns [fs-k8s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template2] will take precedence during new index creation" @@ -195,8 +198,7 @@ TSDB failures go to failure store: name: my-template2 body: index_patterns: [ "fs-k8s*" ] - data_stream: - failure_store: true + data_stream: {} template: settings: index: @@ -207,6 +209,9 @@ TSDB failures go to failure store: time_series: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z + data_stream_options: + failure_store: + enabled: true mappings: properties: "@timestamp": diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml index 3c6d29d939226..13f79e95d99f4 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml @@ -92,9 +92,12 @@ --- "Modify a data stream's failure store": - requires: - cluster_features: ["gte_v8.15.0"] - reason: "data stream failure stores REST structure changed in 8.15+" - test_runner_features: [ "allowed_warnings" ] + reason: "Data stream failure stores config in templates was added in 8.18+" + test_runner_features: [ capabilities, allowed_warnings ] + capabilities: + - method: POST + path: /_index_template/{template} + capabilities: [ 'failure_store_in_template' ] - do: allowed_warnings: @@ -103,8 +106,7 @@ name: my-template body: index_patterns: [data-*] - data_stream: - failure_store: true + data_stream: {} template: mappings: properties: @@ -112,6 +114,9 @@ type: date count: type: long + data_stream_options: + failure_store: + enabled: true - do: indices.create_data_stream: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml index 9b5a9dae8bc0a..2f6b7a0bff34b 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml @@ -1,3 +1,15 @@ +setup: + - requires: + reason: "Data stream options was added in 8.18+" + test_runner_features: [ capabilities, allowed_warnings, contains ] + capabilities: + - method: POST + path: /{index}/_doc + capabilities: [ 'failure_store_status' ] + - method: POST + path: /_index_template/{template} + capabilities: [ 'failure_store_in_template' ] + --- teardown: - do: @@ -32,13 +44,6 @@ teardown: --- "Redirect ingest failure in data stream to failure store": - - requires: - reason: "Failure store status was added in 8.16+" - test_runner_features: [capabilities, allowed_warnings, contains] - capabilities: - - method: POST - path: /{index}/_doc - capabilities: [ 'failure_store_status' ] - do: ingest.put_pipeline: id: "failing_pipeline" @@ -78,14 +83,16 @@ teardown: name: generic_logs_template body: index_patterns: logs-* - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 number_of_replicas: 1 index: default_pipeline: "parent_failing_pipeline" + data_stream_options: + failure_store: + enabled: true - do: index: @@ -147,14 +154,6 @@ teardown: --- "Redirect shard failure in data stream to failure store": - - requires: - reason: "Failure store status was added in 8.16+" - test_runner_features: [ capabilities, allowed_warnings, contains ] - capabilities: - - method: POST - path: /{index}/_doc - capabilities: [ 'failure_store_status' ] - - do: allowed_warnings: - "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation" @@ -162,8 +161,7 @@ teardown: name: generic_logs_template body: index_patterns: logs-* - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 @@ -174,7 +172,9 @@ teardown: type: date count: type: long - + data_stream_options: + failure_store: + enabled: true - do: index: @@ -231,13 +231,6 @@ teardown: --- "Ensure failure is redirected to correct failure store after a reroute processor": - - requires: - test_runner_features: [allowed_warnings, capabilities] - reason: "Failure store status was added in 8.16+" - capabilities: - - method: POST - path: /{index}/_doc - capabilities: [ 'failure_store_status' ] - do: ingest.put_pipeline: id: "failing_pipeline" @@ -262,14 +255,16 @@ teardown: name: destination_template body: index_patterns: destination-data-stream - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 number_of_replicas: 1 index: default_pipeline: "failing_pipeline" + data_stream_options: + failure_store: + enabled: true - do: indices.create_data_stream: @@ -331,11 +326,6 @@ teardown: --- "Failure redirects to original failure store during index change if self referenced": - - requires: - cluster_features: [ "gte_v8.15.0" ] - reason: "data stream failure stores REST structure changed in 8.15+" - test_runner_features: [ allowed_warnings, contains ] - - do: ingest.put_pipeline: id: "failing_pipeline" @@ -365,14 +355,16 @@ teardown: name: generic_logs_template body: index_patterns: logs-* - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 number_of_replicas: 1 index: default_pipeline: "failing_pipeline" + data_stream_options: + failure_store: + enabled: true - do: index: @@ -430,14 +422,6 @@ teardown: --- "Failure redirects to original failure store during index change if final pipeline changes target": - - requires: - reason: "Failure store status was added in 8.16+" - test_runner_features: [ capabilities, allowed_warnings, contains ] - capabilities: - - method: POST - path: /{index}/_doc - capabilities: [ 'failure_store_status' ] - - do: ingest.put_pipeline: id: "change_index_pipeline" @@ -462,14 +446,16 @@ teardown: name: generic_logs_template body: index_patterns: logs-* - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 number_of_replicas: 1 index: final_pipeline: "change_index_pipeline" + data_stream_options: + failure_store: + enabled: true - do: index: @@ -526,14 +512,6 @@ teardown: --- "Failure redirects to correct failure store when index loop is detected": - - requires: - reason: "Failure store status was added in 8.16+" - test_runner_features: [ capabilities, allowed_warnings, contains ] - capabilities: - - method: POST - path: /{index}/_doc - capabilities: [ 'failure_store_status' ] - - do: ingest.put_pipeline: id: "send_to_destination" @@ -575,14 +553,16 @@ teardown: name: generic_logs_template body: index_patterns: logs-* - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 number_of_replicas: 1 index: default_pipeline: "send_to_destination" + data_stream_options: + failure_store: + enabled: true - do: allowed_warnings: @@ -591,14 +571,16 @@ teardown: name: destination_logs_template body: index_patterns: destination-* - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 number_of_replicas: 1 index: default_pipeline: "send_back_to_original" + data_stream_options: + failure_store: + enabled: true - do: index: @@ -657,14 +639,6 @@ teardown: --- "Failure redirects to correct failure store when pipeline loop is detected": - - requires: - reason: "Failure store status was added in 8.16+" - test_runner_features: [ capabilities, allowed_warnings, contains ] - capabilities: - - method: POST - path: /{index}/_doc - capabilities: [ 'failure_store_status' ] - - do: ingest.put_pipeline: id: "step_1" @@ -706,14 +680,16 @@ teardown: name: generic_logs_template body: index_patterns: logs-* - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 number_of_replicas: 1 index: default_pipeline: "step_1" + data_stream_options: + failure_store: + enabled: true - do: index: @@ -773,9 +749,6 @@ teardown: --- "Version conflicts are not redirected to failure store": - - requires: - test_runner_features: [ allowed_warnings] - - do: allowed_warnings: - "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation" @@ -783,8 +756,7 @@ teardown: name: generic_logs_template body: index_patterns: logs-* - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 @@ -795,6 +767,9 @@ teardown: type: date count: type: long + data_stream_options: + failure_store: + enabled: true - do: bulk: @@ -812,17 +787,6 @@ teardown: --- "Test failure store status with bulk request": - - requires: - test_runner_features: [ allowed_warnings, capabilities ] - reason: "Failure store status was added in 8.16+" - capabilities: - - method: POST - path: /_bulk - capabilities: [ 'failure_store_status' ] - - method: PUT - path: /_bulk - capabilities: [ 'failure_store_status' ] - - do: allowed_warnings: - "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation" @@ -830,8 +794,7 @@ teardown: name: generic_logs_template body: index_patterns: logs-* - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 @@ -842,6 +805,9 @@ teardown: type: date count: type: long + data_stream_options: + failure_store: + enabled: true - do: allowed_warnings: - "index template [no-fs] has index patterns [no-fs*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [no-fs] will take precedence during new index creation" @@ -849,8 +815,7 @@ teardown: name: no-fs body: index_patterns: no-fs* - data_stream: - failure_store: false + data_stream: {} template: settings: number_of_shards: 1 @@ -861,6 +826,9 @@ teardown: type: date count: type: long + data_stream_options: + failure_store: + enabled: false - do: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml index 0742435f045fb..cc3a11ffde5e8 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml @@ -1,9 +1,15 @@ --- setup: - requires: - cluster_features: ["gte_v8.15.0"] - reason: "data stream failure stores REST structure changed in 8.15+" - test_runner_features: [allowed_warnings, contains, capabilities] + reason: "Data stream failure stores config in templates was added in 8.16+" + test_runner_features: [ capabilities, allowed_warnings ] + capabilities: + - method: POST + path: /_index_template/{template} + capabilities: [ 'failure_store_in_template' ] + - method: POST + path: /{index}/_rollover + capabilities: [ 'lazy-rollover-failure-store' ] - do: allowed_warnings: @@ -12,8 +18,7 @@ setup: name: my-template body: index_patterns: [data-*] - data_stream: - failure_store: true + data_stream: {} template: mappings: properties: @@ -21,6 +26,9 @@ setup: type: date count: type: long + data_stream_options: + failure_store: + enabled: true - do: indices.create_data_stream: @@ -145,14 +153,6 @@ teardown: --- "Lazily roll over a data stream's failure store after a shard failure": - - requires: - reason: "data stream failure store lazy rollover only supported in 8.15+" - test_runner_features: [allowed_warnings, capabilities] - capabilities: - - method: POST - path: /{index}/_rollover - capabilities: [lazy-rollover-failure-store] - # Initialize failure store - do: index: @@ -215,14 +215,6 @@ teardown: --- "Lazily roll over a data stream's failure store after an ingest failure": - - requires: - reason: "data stream failure store lazy rollover only supported in 8.15+" - test_runner_features: [allowed_warnings, capabilities] - capabilities: - - method: POST - path: /{index}/_rollover - capabilities: [lazy-rollover-failure-store] - - do: ingest.put_pipeline: id: "failing_pipeline" @@ -246,12 +238,14 @@ teardown: name: my-template body: index_patterns: [data-*] - data_stream: - failure_store: true + data_stream: {} template: settings: index: default_pipeline: "failing_pipeline" + data_stream_options: + failure_store: + enabled: true - do: indices.create_data_stream: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml index 61d17c3d675cf..60500767213af 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml @@ -50,9 +50,12 @@ --- "Don't initialize failure store during data stream auto-creation on successful index": - requires: - cluster_features: ["gte_v8.15.0"] - reason: "data stream failure stores REST structure changed in 8.15+" - test_runner_features: allowed_warnings + reason: "Data stream failure stores config in templates was added in 8.18+" + test_runner_features: [allowed_warnings, capabilities] + capabilities: + - method: POST + path: /_index_template/{template} + capabilities: [ 'failure_store_in_template' ] - do: allowed_warnings: @@ -61,12 +64,14 @@ name: generic_logs_template body: index_patterns: logs-* - data_stream: - failure_store: true + data_stream: {} template: settings: number_of_shards: 1 number_of_replicas: 1 + data_stream_options: + failure_store: + enabled: true - do: index: diff --git a/muted-tests.yml b/muted-tests.yml index d356dd2f791d1..3d0a57c8d650a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -282,15 +282,18 @@ tests: - class: org.elasticsearch.packaging.test.ArchiveTests method: test60StartAndStop issue: https://github.com/elastic/elasticsearch/issues/118216 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=migrate/10_reindex/Test Reindex With Bad Data Stream Name} - issue: https://github.com/elastic/elasticsearch/issues/118272 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=migrate/10_reindex/Test Reindex With Unsupported Mode} - issue: https://github.com/elastic/elasticsearch/issues/118273 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=migrate/10_reindex/Test Reindex With Nonexistent Data Stream} - issue: https://github.com/elastic/elasticsearch/issues/118274 +- class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests + method: testBottomFieldSort + issue: https://github.com/elastic/elasticsearch/issues/118214 +- class: org.elasticsearch.xpack.esql.action.CrossClustersEnrichIT + method: testTopNThenEnrichRemote + issue: https://github.com/elastic/elasticsearch/issues/118307 +- class: org.elasticsearch.xpack.remotecluster.CrossClusterEsqlRCS1UnavailableRemotesIT + method: testEsqlRcs1UnavailableRemoteScenarios + issue: https://github.com/elastic/elasticsearch/issues/118350 +- class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests + method: testSearcherId + issue: https://github.com/elastic/elasticsearch/issues/118374 # Examples: # diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuAnalyzerProvider.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuAnalyzerProvider.java index 3fea1918252ea..9fb611345dbea 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuAnalyzerProvider.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuAnalyzerProvider.java @@ -28,7 +28,7 @@ public class IcuAnalyzerProvider extends AbstractIndexAnalyzerProvider private final Normalizer2 normalizer; public IcuAnalyzerProvider(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); String method = settings.get("method", "nfkc_cf"); String mode = settings.get("mode", "compose"); if ("compose".equals(mode) == false && "decompose".equals(mode) == false) { diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java index ae8ead523b7ea..fe0b3a00b2bbb 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuCollationTokenFilterFactory.java @@ -44,7 +44,7 @@ public class IcuCollationTokenFilterFactory extends AbstractTokenFilterFactory { @SuppressWarnings("HiddenField") public IcuCollationTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); Collator collator; String rules = settings.get("rules"); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuFoldingTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuFoldingTokenFilterFactory.java index 6cffbb7e0a17e..8932518dc5436 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuFoldingTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuFoldingTokenFilterFactory.java @@ -39,7 +39,7 @@ public class IcuFoldingTokenFilterFactory extends AbstractTokenFilterFactory imp private final Normalizer2 normalizer; public IcuFoldingTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); this.normalizer = IcuNormalizerTokenFilterFactory.wrapWithUnicodeSetFilter(ICU_FOLDING_NORMALIZER, settings); } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuNormalizerTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuNormalizerTokenFilterFactory.java index 23b2c355b7a68..c9eceef30f62e 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuNormalizerTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuNormalizerTokenFilterFactory.java @@ -30,7 +30,7 @@ public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory private final Normalizer2 normalizer; public IcuNormalizerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); String method = settings.get("name", "nfkc_cf"); Normalizer2 normalizerInstance = Normalizer2.getInstance(null, method, Normalizer2.Mode.COMPOSE); this.normalizer = wrapWithUnicodeSetFilter(normalizerInstance, settings); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java index 62ab6d8792905..c66d25ffa2f3b 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTokenizerFactory.java @@ -39,7 +39,7 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory { private static final String RULE_FILES = "rule_files"; public IcuTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, settings, name); + super(name); config = getIcuConfig(environment, settings); } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTransformTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTransformTokenFilterFactory.java index 785b083c4c31e..5a0a0b3897a47 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTransformTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/IcuTransformTokenFilterFactory.java @@ -26,7 +26,7 @@ public class IcuTransformTokenFilterFactory extends AbstractTokenFilterFactory i private final Transliterator transliterator; public IcuTransformTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); this.id = settings.get("id", "Null"); String s = settings.get("dir", "forward"); this.dir = "forward".equals(s) ? Transliterator.FORWARD : Transliterator.REVERSE; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/HiraganaUppercaseFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/HiraganaUppercaseFilterFactory.java index b22757af22372..2d761d99e742f 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/HiraganaUppercaseFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/HiraganaUppercaseFilterFactory.java @@ -18,7 +18,7 @@ public class HiraganaUppercaseFilterFactory extends AbstractTokenFilterFactory { public HiraganaUppercaseFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); } @Override diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/JapaneseStopTokenFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/JapaneseStopTokenFilterFactory.java index e34d5246dd092..3a26e647092f7 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/JapaneseStopTokenFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/JapaneseStopTokenFilterFactory.java @@ -35,7 +35,7 @@ public class JapaneseStopTokenFilterFactory extends AbstractTokenFilterFactory { private final boolean removeTrailing; public JapaneseStopTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(name, settings); + super(name); this.ignoreCase = settings.getAsBoolean("ignore_case", false); this.removeTrailing = settings.getAsBoolean("remove_trailing", true); this.stopWords = Analysis.parseWords( diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KatakanaUppercaseFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KatakanaUppercaseFilterFactory.java index 1f72f1d57d2c5..0776c4ca970a9 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KatakanaUppercaseFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KatakanaUppercaseFilterFactory.java @@ -18,7 +18,7 @@ public class KatakanaUppercaseFilterFactory extends AbstractTokenFilterFactory { public KatakanaUppercaseFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); } @Override diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiAnalyzerProvider.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiAnalyzerProvider.java index db336bec997c4..f0667da992be5 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiAnalyzerProvider.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiAnalyzerProvider.java @@ -26,7 +26,7 @@ public class KuromojiAnalyzerProvider extends AbstractIndexAnalyzerProvider stopWords = Analysis.parseStopWords(env, settings, JapaneseAnalyzer.getDefaultStopSet()); final JapaneseTokenizer.Mode mode = KuromojiTokenizerFactory.getMode(settings); final UserDictionary userDictionary = KuromojiTokenizerFactory.getUserDictionary(env, settings); diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiBaseFormFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiBaseFormFilterFactory.java index 536e6fe993d06..2e8635704c6b0 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiBaseFormFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiBaseFormFilterFactory.java @@ -19,7 +19,7 @@ public class KuromojiBaseFormFilterFactory extends AbstractTokenFilterFactory { public KuromojiBaseFormFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); } @Override diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiCompletionAnalyzerProvider.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiCompletionAnalyzerProvider.java index c4970251d2bb6..f7121fb211acc 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiCompletionAnalyzerProvider.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiCompletionAnalyzerProvider.java @@ -22,7 +22,7 @@ public class KuromojiCompletionAnalyzerProvider extends AbstractIndexAnalyzerPro private final JapaneseCompletionAnalyzer analyzer; public KuromojiCompletionAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(name, settings); + super(name); final UserDictionary userDictionary = KuromojiTokenizerFactory.getUserDictionary(env, settings); final Mode mode = KuromojiCompletionFilterFactory.getMode(settings); analyzer = new JapaneseCompletionAnalyzer(userDictionary, mode); diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiCompletionFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiCompletionFilterFactory.java index 3ec6d08145e69..a7a2d984c3bb7 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiCompletionFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiCompletionFilterFactory.java @@ -22,7 +22,7 @@ public class KuromojiCompletionFilterFactory extends AbstractTokenFilterFactory private final Mode mode; public KuromojiCompletionFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(name, settings); + super(name); mode = getMode(settings); } diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiKatakanaStemmerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiKatakanaStemmerFactory.java index 7c4d3138381fb..c06f8b7a4941c 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiKatakanaStemmerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiKatakanaStemmerFactory.java @@ -21,7 +21,7 @@ public class KuromojiKatakanaStemmerFactory extends AbstractTokenFilterFactory { private final int minimumLength; public KuromojiKatakanaStemmerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); minimumLength = settings.getAsInt("minimum_length", JapaneseKatakanaStemFilter.DEFAULT_MINIMUM_LENGTH); } diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiNumberFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiNumberFilterFactory.java index 089b7e10bfae0..605d2c66fac18 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiNumberFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiNumberFilterFactory.java @@ -18,7 +18,7 @@ public class KuromojiNumberFilterFactory extends AbstractTokenFilterFactory { public KuromojiNumberFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); } @Override diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiPartOfSpeechFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiPartOfSpeechFilterFactory.java index e8efa781726f8..5ec3023ca4846 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiPartOfSpeechFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiPartOfSpeechFilterFactory.java @@ -27,7 +27,7 @@ public class KuromojiPartOfSpeechFilterFactory extends AbstractTokenFilterFactor private final Set stopTags = new HashSet<>(); public KuromojiPartOfSpeechFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(name, settings); + super(name); List wordList = Analysis.getWordList(env, settings, "stoptags"); if (wordList != null) { stopTags.addAll(wordList); diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiReadingFormFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiReadingFormFilterFactory.java index 09ab0bbd4b8d8..8e9f03a0c6261 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiReadingFormFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiReadingFormFilterFactory.java @@ -21,7 +21,7 @@ public class KuromojiReadingFormFilterFactory extends AbstractTokenFilterFactory private final boolean useRomaji; public KuromojiReadingFormFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); useRomaji = settings.getAsBoolean("use_romaji", false); } diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java index edb29a8f4c98e..aa978e3e73872 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java @@ -44,7 +44,7 @@ public class KuromojiTokenizerFactory extends AbstractTokenizerFactory { private boolean discardCompoundToken; public KuromojiTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(indexSettings, settings, name); + super(name); mode = getMode(settings); userDictionary = getUserDictionary(env, settings); discardPunctuation = settings.getAsBoolean("discard_punctuation", true); diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriAnalyzerProvider.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriAnalyzerProvider.java index 180f6aa0a7f96..36094a4f46df1 100644 --- a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriAnalyzerProvider.java +++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriAnalyzerProvider.java @@ -29,7 +29,7 @@ public class NoriAnalyzerProvider extends AbstractIndexAnalyzerProvider tagList = Analysis.getWordList(env, settings, "stoptags"); diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriNumberFilterFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriNumberFilterFactory.java index cbe92156cd765..ac64c5ff26428 100644 --- a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriNumberFilterFactory.java +++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriNumberFilterFactory.java @@ -19,7 +19,7 @@ public class NoriNumberFilterFactory extends AbstractTokenFilterFactory { public NoriNumberFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); } @Override diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriPartOfSpeechStopFilterFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriPartOfSpeechStopFilterFactory.java index dddb485ab0df3..f93a1bd6e9094 100644 --- a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriPartOfSpeechStopFilterFactory.java +++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriPartOfSpeechStopFilterFactory.java @@ -26,7 +26,7 @@ public class NoriPartOfSpeechStopFilterFactory extends AbstractTokenFilterFactor private final Set stopTags; public NoriPartOfSpeechStopFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(name, settings); + super(name); List tagList = Analysis.getWordList(env, settings, "stoptags"); this.stopTags = tagList != null ? resolvePOSList(tagList) : KoreanPartOfSpeechStopFilter.DEFAULT_STOP_TAGS; } diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriReadingFormFilterFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriReadingFormFilterFactory.java index 1e1b211fdea09..ad5a4a6d1b21a 100644 --- a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriReadingFormFilterFactory.java +++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriReadingFormFilterFactory.java @@ -18,7 +18,7 @@ public class NoriReadingFormFilterFactory extends AbstractTokenFilterFactory { public NoriReadingFormFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); } @Override diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriTokenizerFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriTokenizerFactory.java index ed8458bc94043..40e159b343850 100644 --- a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriTokenizerFactory.java +++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriTokenizerFactory.java @@ -38,7 +38,7 @@ public class NoriTokenizerFactory extends AbstractTokenizerFactory { private final boolean discardPunctuation; public NoriTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(indexSettings, settings, name); + super(name); decompoundMode = getMode(settings); userDictionary = getUserDictionary(env, settings, indexSettings); discardPunctuation = settings.getAsBoolean("discard_punctuation", true); diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/phonetic/PhoneticTokenFilterFactory.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/phonetic/PhoneticTokenFilterFactory.java index 786c6230349a4..60986fe58db6a 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/phonetic/PhoneticTokenFilterFactory.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/phonetic/PhoneticTokenFilterFactory.java @@ -46,7 +46,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory { private boolean isDaitchMokotoff; public PhoneticTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); this.languageset = null; this.nametype = null; this.ruletype = null; diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseAnalyzerProvider.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseAnalyzerProvider.java index 668b94d0ca972..9dceac60b3c31 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseAnalyzerProvider.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseAnalyzerProvider.java @@ -20,7 +20,7 @@ public class SmartChineseAnalyzerProvider extends AbstractIndexAnalyzerProvider< private final SmartChineseAnalyzer analyzer; public SmartChineseAnalyzerProvider(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); analyzer = new SmartChineseAnalyzer(SmartChineseAnalyzer.getDefaultStopSet()); } diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseNoOpTokenFilterFactory.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseNoOpTokenFilterFactory.java index 55cda67852272..41869b1a7222e 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseNoOpTokenFilterFactory.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseNoOpTokenFilterFactory.java @@ -18,7 +18,7 @@ public class SmartChineseNoOpTokenFilterFactory extends AbstractTokenFilterFactory { public SmartChineseNoOpTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(name, settings); + super(name); } @Override diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseStopTokenFilterFactory.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseStopTokenFilterFactory.java index 2463ad4a2c188..5688971d02865 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseStopTokenFilterFactory.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseStopTokenFilterFactory.java @@ -35,7 +35,7 @@ public class SmartChineseStopTokenFilterFactory extends AbstractTokenFilterFacto private final boolean removeTrailing; public SmartChineseStopTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(name, settings); + super(name); this.ignoreCase = settings.getAsBoolean("ignore_case", false); this.removeTrailing = settings.getAsBoolean("remove_trailing", true); this.stopWords = Analysis.parseWords( diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseTokenizerTokenizerFactory.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseTokenizerTokenizerFactory.java index 2545c9c7d94e8..2b0a9bfe00341 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseTokenizerTokenizerFactory.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/SmartChineseTokenizerTokenizerFactory.java @@ -19,7 +19,7 @@ public class SmartChineseTokenizerTokenizerFactory extends AbstractTokenizerFactory { public SmartChineseTokenizerTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, settings, name); + super(name); } @Override diff --git a/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishAnalyzerProvider.java b/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishAnalyzerProvider.java index 68e7298473cd4..73f42930e8613 100644 --- a/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishAnalyzerProvider.java +++ b/plugins/analysis-stempel/src/main/java/org/elasticsearch/index/analysis/pl/PolishAnalyzerProvider.java @@ -20,7 +20,7 @@ public class PolishAnalyzerProvider extends AbstractIndexAnalyzerProvider> getTokenFilters() { - return singletonMap( - "test_token_filter", - (indexSettings, environment, name, settings) -> new AbstractTokenFilterFactory(name, settings) { - @Override - public TokenStream create(TokenStream tokenStream) { - if (throwParsingError.get()) { - throw new MapperParsingException("simulate mapping parsing error"); - } - return tokenStream; + return singletonMap("test_token_filter", (indexSettings, environment, name, settings) -> new AbstractTokenFilterFactory(name) { + @Override + public TokenStream create(TokenStream tokenStream) { + if (throwParsingError.get()) { + throw new MapperParsingException("simulate mapping parsing error"); } + return tokenStream; } - ); + }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 36580ebda8aee..fc105d3d4fcd2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -3703,8 +3703,9 @@ public List getPreConfiguredTokenFilters() { @Override public Map>> getAnalyzers() { - return singletonMap("mock_whitespace", (indexSettings, environment, name, settings) -> { - return new AbstractIndexAnalyzerProvider(name, settings) { + return singletonMap( + "mock_whitespace", + (indexSettings, environment, name, settings) -> new AbstractIndexAnalyzerProvider(name) { MockAnalyzer instance = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false); @@ -3712,8 +3713,8 @@ public Map getAllVersions() { + return VersionsHolder.ALL_VERSIONS; } public static TransportVersion fromString(String str) { @@ -139,16 +159,25 @@ public String toString() { return Integer.toString(id); } - private static class CurrentHolder { - private static final TransportVersion CURRENT = findCurrent(); + private static class VersionsHolder { + private static final List ALL_VERSIONS; + private static final Map ALL_VERSIONS_MAP; + private static final TransportVersion CURRENT; + + static { + Collection extendedVersions = ExtensionLoader.loadSingleton(ServiceLoader.load(VersionExtension.class)) + .map(VersionExtension::getTransportVersions) + .orElse(Collections.emptyList()); + + if (extendedVersions.isEmpty()) { + ALL_VERSIONS = TransportVersions.DEFINED_VERSIONS; + } else { + ALL_VERSIONS = Stream.concat(TransportVersions.DEFINED_VERSIONS.stream(), extendedVersions.stream()).sorted().toList(); + } + + ALL_VERSIONS_MAP = ALL_VERSIONS.stream().collect(Collectors.toUnmodifiableMap(TransportVersion::id, Function.identity())); - // finds the pluggable current version - private static TransportVersion findCurrent() { - var version = ExtensionLoader.loadSingleton(ServiceLoader.load(VersionExtension.class)) - .map(e -> e.getCurrentTransportVersion(TransportVersions.LATEST_DEFINED)) - .orElse(TransportVersions.LATEST_DEFINED); - assert version.onOrAfter(TransportVersions.LATEST_DEFINED); - return version; + CURRENT = ALL_VERSIONS.getLast(); } } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 814bcc0fa0a73..9f50df02b4fe8 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -13,13 +13,12 @@ import org.elasticsearch.core.UpdateForV9; import java.lang.reflect.Field; -import java.util.Collection; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; -import java.util.NavigableMap; import java.util.Set; -import java.util.TreeMap; import java.util.TreeSet; import java.util.function.IntFunction; @@ -140,7 +139,8 @@ static TransportVersion def(int id) { public static final TransportVersion SOURCE_MODE_TELEMETRY = def(8_802_00_0); public static final TransportVersion NEW_REFRESH_CLUSTER_BLOCK = def(8_803_00_0); public static final TransportVersion RETRIES_AND_OPERATIONS_IN_BLOBSTORE_STATS = def(8_804_00_0); - public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_805_00_0); + public static final TransportVersion ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES = def(8_805_00_0); + public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_806_00_0); /* * STOP! READ THIS FIRST! No, really, @@ -209,21 +209,24 @@ static TransportVersion def(int id) { */ public static final TransportVersion MINIMUM_CCS_VERSION = V_8_15_0; - static final NavigableMap VERSION_IDS = getAllVersionIds(TransportVersions.class); + /** + * Sorted list of all versions defined in this class + */ + static final List DEFINED_VERSIONS = collectAllVersionIdsDefinedInClass(TransportVersions.class); - // the highest transport version constant defined in this file, used as a fallback for TransportVersion.current() + // the highest transport version constant defined static final TransportVersion LATEST_DEFINED; static { - LATEST_DEFINED = VERSION_IDS.lastEntry().getValue(); + LATEST_DEFINED = DEFINED_VERSIONS.getLast(); // see comment on IDS field // now we're registered all the transport versions, we can clear the map IDS = null; } - public static NavigableMap getAllVersionIds(Class cls) { + public static List collectAllVersionIdsDefinedInClass(Class cls) { Map versionIdFields = new HashMap<>(); - NavigableMap builder = new TreeMap<>(); + List definedTransportVersions = new ArrayList<>(); Set ignore = Set.of("ZERO", "CURRENT", "MINIMUM_COMPATIBLE", "MINIMUM_CCS_VERSION"); @@ -240,7 +243,7 @@ public static NavigableMap getAllVersionIds(Class } catch (IllegalAccessException e) { throw new AssertionError(e); } - builder.put(version.id(), version); + definedTransportVersions.add(version); if (Assertions.ENABLED) { // check the version number is unique @@ -257,11 +260,9 @@ public static NavigableMap getAllVersionIds(Class } } - return Collections.unmodifiableNavigableMap(builder); - } + Collections.sort(definedTransportVersions); - static Collection getAllVersions() { - return VERSION_IDS.values(); + return List.copyOf(definedTransportVersions); } static final IntFunction VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(TransportVersions.class, LATEST_DEFINED.id()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index 9ffef1f178f44..b855f2cee7613 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; @@ -84,7 +85,7 @@ protected void masterOperation( String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices()); Map indicesAndFilters = new HashMap<>(); - Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); + Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); for (String index : concreteIndices) { final AliasFilter aliasFilter = indicesService.buildAliasFilter(clusterState, index, indicesAndAliases); final String[] aliases = indexNameExpressionResolver.indexAliases( diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java index 5c5c71bc002b3..f5c100b7884bb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -565,8 +566,8 @@ static void resolveIndices( if (names.length == 1 && (Metadata.ALL.equals(names[0]) || Regex.isMatchAllPattern(names[0]))) { names = new String[] { "**" }; } - Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names); - for (String s : resolvedIndexAbstractions) { + Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names); + for (ResolvedExpression s : resolvedIndexAbstractions) { enrichIndexAbstraction(clusterState, s, indices, aliases, dataStreams); } indices.sort(Comparator.comparing(ResolvedIndexAbstraction::getName)); @@ -597,12 +598,12 @@ private static void mergeResults( private static void enrichIndexAbstraction( ClusterState clusterState, - String indexAbstraction, + ResolvedExpression indexAbstraction, List indices, List aliases, List dataStreams ) { - IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction); + IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction.resource()); if (ia != null) { switch (ia.getType()) { case CONCRETE_INDEX -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java index 80e6fbfe051a4..a521dac60e96a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.ResettableValue; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -111,9 +112,9 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - if (this.resolvedTemplate != null) { + if (resolvedTemplate != null) { builder.field(TEMPLATE.getPreferredName()); - this.resolvedTemplate.toXContent(builder, params, rolloverConfiguration); + resolvedTemplate.toXContent(builder, ResettableValue.hideResetValues(params), rolloverConfiguration); } if (this.overlappingTemplates != null) { builder.startArray(OVERLAPPING.getPreferredName()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index 94d9b87467ea8..5f98852148ed4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -61,6 +61,7 @@ import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.findConflictingV1Templates; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.findConflictingV2Templates; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.findV2Template; +import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.resolveDataStreamOptions; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.resolveLifecycle; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.resolveSettings; @@ -348,7 +349,13 @@ public static Template resolveTemplate( if (template.getDataStreamTemplate() != null && lifecycle == null && isDslOnlyMode) { lifecycle = DataStreamLifecycle.DEFAULT; } - return new Template(settings, mergedMapping, aliasesByName, lifecycle); + return new Template( + settings, + mergedMapping, + aliasesByName, + lifecycle, + resolveDataStreamOptions(simulatedState.metadata(), matchingTemplate) + ); } private static IndexLongFieldRange getEventIngestedRange(String indexName, ClusterState simulatedState) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index 4e9830fe0d14e..e01f364712676 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -133,7 +134,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener @Override protected ShardValidateQueryRequest newShardRequest(int numShards, ShardRouting shard, ValidateQueryRequest request) { final ClusterState clusterState = clusterService.state(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); final AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, shard.getIndexName(), indicesAndAliases); return new ShardValidateQueryRequest(shard.shardId(), aliasFilter, request); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index cef68324e2a45..e2c73349b93ec 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamOptions; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; @@ -42,6 +43,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.core.Nullable; import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.VersionType; @@ -638,12 +640,16 @@ private static Boolean resolveFailureStoreFromMetadata(String indexName, Metadat } /** - * Determines if an index name is associated with an index template that has a data stream failure store enabled. + * Determines if an index name is associated with an index template that has a data stream failure store enabled. Since failure store is + * a data stream feature, the method returns true/false only if it is a data stream template, otherwise null. * @param indexName The index name to check. * @param metadata Cluster state metadata. - * @return true if the given index name corresponds to an index template with a data stream failure store enabled. + * @return true the associated index template has failure store enabled, false if the failure store is disabled or it's not specified, + * and null if the template is not a data stream template. + * Visible for testing */ - private static Boolean resolveFailureStoreFromTemplate(String indexName, Metadata metadata) { + @Nullable + static Boolean resolveFailureStoreFromTemplate(String indexName, Metadata metadata) { if (indexName == null) { return null; } @@ -656,7 +662,11 @@ private static Boolean resolveFailureStoreFromTemplate(String indexName, Metadat ComposableIndexTemplate composableIndexTemplate = metadata.templatesV2().get(template); if (composableIndexTemplate.getDataStreamTemplate() != null) { // Check if the data stream has the failure store enabled - return composableIndexTemplate.getDataStreamTemplate().hasFailureStore(); + DataStreamOptions dataStreamOptions = MetadataIndexTemplateService.resolveDataStreamOptions( + composableIndexTemplate, + metadata.componentTemplates() + ).mapAndGet(DataStreamOptions.Template::toDataStreamOptions); + return dataStreamOptions != null && dataStreamOptions.isFailureStoreEnabled(); } } diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 9c82d032014f2..84c6df7b8a66f 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; @@ -109,7 +110,7 @@ protected boolean resolveIndex(ExplainRequest request) { @Override protected void resolveRequest(ClusterState state, InternalRequest request) { - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index()); final AliasFilter aliasFilter = searchService.buildAliasFilter(state, request.concreteIndex(), indicesAndAliases); request.request().filteringAlias(aliasFilter); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 5d1fb46a53cef..ae27406bf396d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; @@ -111,6 +112,7 @@ import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.LongSupplier; +import java.util.stream.Collectors; import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH; @@ -207,7 +209,7 @@ public TransportSearchAction( private Map buildPerIndexOriginalIndices( ClusterState clusterState, - Set indicesAndAliases, + Set indicesAndAliases, String[] indices, IndicesOptions indicesOptions ) { @@ -215,6 +217,9 @@ private Map buildPerIndexOriginalIndices( var blocks = clusterState.blocks(); // optimization: mostly we do not have any blocks so there's no point in the expensive per-index checking boolean hasBlocks = blocks.global().isEmpty() == false || blocks.indices().isEmpty() == false; + // Get a distinct set of index abstraction names present from the resolved expressions to help with the reverse resolution from + // concrete index to the expression that produced it. + Set indicesAndAliasesResources = indicesAndAliases.stream().map(ResolvedExpression::resource).collect(Collectors.toSet()); for (String index : indices) { if (hasBlocks) { blocks.indexBlockedRaiseException(ClusterBlockLevel.READ, index); @@ -231,8 +236,8 @@ private Map buildPerIndexOriginalIndices( String[] finalIndices = Strings.EMPTY_ARRAY; if (aliases == null || aliases.length == 0 - || indicesAndAliases.contains(index) - || hasDataStreamRef(clusterState, indicesAndAliases, index)) { + || indicesAndAliasesResources.contains(index) + || hasDataStreamRef(clusterState, indicesAndAliasesResources, index)) { finalIndices = new String[] { index }; } if (aliases != null) { @@ -251,7 +256,11 @@ private static boolean hasDataStreamRef(ClusterState clusterState, Set i return indicesAndAliases.contains(ret.getParentDataStream().getName()); } - Map buildIndexAliasFilters(ClusterState clusterState, Set indicesAndAliases, Index[] concreteIndices) { + Map buildIndexAliasFilters( + ClusterState clusterState, + Set indicesAndAliases, + Index[] concreteIndices + ) { final Map aliasFilterMap = new HashMap<>(); for (Index index : concreteIndices) { clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName()); @@ -1236,7 +1245,10 @@ private void executeSearch( } else { final Index[] indices = resolvedIndices.getConcreteLocalIndices(); concreteLocalIndices = Arrays.stream(indices).map(Index::getName).toArray(String[]::new); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions( + clusterState, + searchRequest.indices() + ); aliasFilter = buildIndexAliasFilters(clusterState, indicesAndAliases, indices); aliasFilter.putAll(remoteAliasMap); localShardIterators = getLocalShardsIterator( @@ -1835,7 +1847,7 @@ List getLocalShardsIterator( ClusterState clusterState, SearchRequest searchRequest, String clusterAlias, - Set indicesAndAliases, + Set indicesAndAliases, String[] concreteIndices ) { var routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java index d8b57972d604f..614a3e9cf22ae 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.index.Index; @@ -127,7 +128,10 @@ public void searchShards(Task task, SearchShardsRequest searchShardsRequest, Act searchService.getRewriteContext(timeProvider::absoluteStartMillis, resolvedIndices, null), listener.delegateFailureAndWrap((delegate, searchRequest) -> { Index[] concreteIndices = resolvedIndices.getConcreteLocalIndices(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions( + clusterState, + searchRequest.indices() + ); final Map aliasFilters = transportSearchAction.buildIndexAliasFilters( clusterState, indicesAndAliases, diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index 6a881163914e4..7b2f0c2c894be 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -21,6 +21,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.index.IndexModule; +import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.process.ProcessProbe; import org.elasticsearch.nativeaccess.NativeAccess; @@ -722,6 +723,9 @@ public final BootstrapCheckResult check(BootstrapContext context) { } boolean isAllPermissionGranted() { + if (RuntimeVersionFeature.isSecurityManagerAvailable() == false) { + return false; + } final SecurityManager sm = System.getSecurityManager(); assert sm != null; try { diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 27cbb39c05d38..04f93494a50f1 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -35,6 +35,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.jdk.JarHell; +import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.os.OsProbe; @@ -113,12 +114,14 @@ private static Bootstrap initPhase1() { * the presence of a security manager or lack thereof act as if there is a security manager present (e.g., DNS cache policy). * This forces such policies to take effect immediately. */ - org.elasticsearch.bootstrap.Security.setSecurityManager(new SecurityManager() { - @Override - public void checkPermission(Permission perm) { - // grant all permissions so that we can later set the security manager to the one that we want - } - }); + if (RuntimeVersionFeature.isSecurityManagerAvailable()) { + org.elasticsearch.bootstrap.Security.setSecurityManager(new SecurityManager() { + @Override + public void checkPermission(Permission perm) { + // grant all permissions so that we can later set the security manager to the one that we want + } + }); + } LogConfigurator.registerErrorListener(); BootstrapInfo.init(); @@ -215,7 +218,7 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { .toList(); EntitlementBootstrap.bootstrap(pluginData, pluginsResolver::resolveClassToPluginName); - } else { + } else if (RuntimeVersionFeature.isSecurityManagerAvailable()) { // install SM after natives, shutdown hooks, etc. LogManager.getLogger(Elasticsearch.class).info("Bootstrapping java SecurityManager"); org.elasticsearch.bootstrap.Security.configure( @@ -223,6 +226,8 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(args.nodeSettings()), args.pidFile() ); + } else { + LogManager.getLogger(Elasticsearch.class).warn("Bootstrapping without any protection"); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/RepositoryCleanupInProgress.java b/server/src/main/java/org/elasticsearch/cluster/RepositoryCleanupInProgress.java index 06ec8abf60ff4..172fa34e14ecb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/RepositoryCleanupInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/RepositoryCleanupInProgress.java @@ -93,7 +93,7 @@ public String toString() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_7_4_0; + return TransportVersions.ZERO; } public record Entry(String repository, long repositoryStateId) implements Writeable, RepositoryOperation { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index ae7cff6312155..998217e93c426 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -372,16 +373,14 @@ public static class DataStreamTemplate implements Writeable, ToXContentObject { private static final ParseField HIDDEN = new ParseField("hidden"); private static final ParseField ALLOW_CUSTOM_ROUTING = new ParseField("allow_custom_routing"); + // Remove this after this PR gets backported + @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) private static final ParseField FAILURE_STORE = new ParseField("failure_store"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "data_stream_template", false, - args -> new DataStreamTemplate( - args[0] != null && (boolean) args[0], - args[1] != null && (boolean) args[1], - DataStream.isFailureStoreFeatureFlagEnabled() && args[2] != null && (boolean) args[2] - ) + args -> new DataStreamTemplate(args[0] != null && (boolean) args[0], args[1] != null && (boolean) args[1]) ); static { @@ -394,20 +393,14 @@ public static class DataStreamTemplate implements Writeable, ToXContentObject { private final boolean hidden; private final boolean allowCustomRouting; - private final boolean failureStore; public DataStreamTemplate() { - this(false, false, false); + this(false, false); } public DataStreamTemplate(boolean hidden, boolean allowCustomRouting) { - this(hidden, allowCustomRouting, false); - } - - public DataStreamTemplate(boolean hidden, boolean allowCustomRouting, boolean failureStore) { this.hidden = hidden; this.allowCustomRouting = allowCustomRouting; - this.failureStore = failureStore; } DataStreamTemplate(StreamInput in) throws IOException { @@ -425,10 +418,9 @@ public DataStreamTemplate(boolean hidden, boolean allowCustomRouting, boolean fa boolean value = in.readBoolean(); assert value == false : "expected false, because this used to be an optional enum that never got set"; } - if (in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION)) { - failureStore = in.readBoolean(); - } else { - failureStore = false; + if (in.getTransportVersion() + .between(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION, TransportVersions.ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES)) { + in.readBoolean(); } } @@ -458,10 +450,6 @@ public boolean isAllowCustomRouting() { return allowCustomRouting; } - public boolean hasFailureStore() { - return failureStore; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(hidden); @@ -472,8 +460,11 @@ public void writeTo(StreamOutput out) throws IOException { // See comment in constructor. out.writeBoolean(false); } - if (out.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION)) { - out.writeBoolean(failureStore); + if (out.getTransportVersion() + .between(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION, TransportVersions.ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES)) { + // Previous versions expect the failure store to be configured via the DataStreamTemplate. We add it here, so we don't break + // the serialisation, but we do not care to preserve the value because this feature is still behind a feature flag. + out.writeBoolean(false); } } @@ -482,9 +473,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field("hidden", hidden); builder.field(ALLOW_CUSTOM_ROUTING.getPreferredName(), allowCustomRouting); - if (DataStream.isFailureStoreFeatureFlagEnabled()) { - builder.field(FAILURE_STORE.getPreferredName(), failureStore); - } builder.endObject(); return builder; } @@ -494,12 +482,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DataStreamTemplate that = (DataStreamTemplate) o; - return hidden == that.hidden && allowCustomRouting == that.allowCustomRouting && failureStore == that.failureStore; + return hidden == that.hidden && allowCustomRouting == that.allowCustomRouting; } @Override public int hashCode() { - return Objects.hash(hidden, allowCustomRouting, failureStore); + return Objects.hash(hidden, allowCustomRouting); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 979434950cf7a..1c6206a4815eb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -424,7 +424,7 @@ public boolean isAllowCustomRouting() { * @return true, if the user has explicitly enabled the failure store. */ public boolean isFailureStoreEnabled() { - return dataStreamOptions.failureStore() != null && dataStreamOptions.failureStore().isExplicitlyEnabled(); + return dataStreamOptions.isFailureStoreEnabled(); } @Nullable @@ -1188,6 +1188,7 @@ public void writeTo(StreamOutput out) throws IOException { ); // The fields behind the feature flag should always be last. if (DataStream.isFailureStoreFeatureFlagEnabled()) { + // Should be removed after backport PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), FAILURE_STORE_FIELD); PARSER.declareObjectArray( ConstructingObjectParser.optionalConstructorArg(), diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStore.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStore.java index e9d32594fa833..5a6217eea8f7b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStore.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStore.java @@ -14,7 +14,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -27,11 +29,13 @@ * supports the following configurations only explicitly enabling or disabling the failure store */ public record DataStreamFailureStore(Boolean enabled) implements SimpleDiffable, ToXContentObject { + public static final String FAILURE_STORE = "failure_store"; + public static final String ENABLED = "enabled"; - public static final ParseField ENABLED_FIELD = new ParseField("enabled"); + public static final ParseField ENABLED_FIELD = new ParseField(ENABLED); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "failure_store", + FAILURE_STORE, false, (args, unused) -> new DataStreamFailureStore((Boolean) args[0]) ); @@ -59,13 +63,6 @@ public static Diff readDiffFrom(StreamInput in) throws I return SimpleDiffable.readDiffFrom(DataStreamFailureStore::new, in); } - /** - * @return iff the user has explicitly enabled the failure store - */ - public boolean isExplicitlyEnabled() { - return enabled != null && enabled; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalBoolean(enabled); @@ -89,4 +86,80 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public static DataStreamFailureStore fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } + + /** + * This class is only used in template configuration. It wraps the fields of {@link DataStreamFailureStore} with {@link ResettableValue} + * to allow a user to signal when they want to reset any previously encountered values during template composition. Furthermore, it + * provides the method {@link #merge(Template, Template)} that dictates how two templates can be composed. + */ + public record Template(ResettableValue enabled) implements Writeable, ToXContentObject { + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "failure_store_template", + false, + (args, unused) -> new Template(args[0] == null ? ResettableValue.undefined() : (ResettableValue) args[0]) + ); + + static { + PARSER.declareField( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL + ? ResettableValue.reset() + : ResettableValue.create(p.booleanValue()), + ENABLED_FIELD, + ObjectParser.ValueType.BOOLEAN_OR_NULL + ); + } + + public Template { + if (enabled.get() == null) { + throw new IllegalArgumentException("Failure store configuration should have at least one non-null configuration value."); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + ResettableValue.write(out, enabled, StreamOutput::writeBoolean); + } + + public static Template read(StreamInput in) throws IOException { + ResettableValue enabled = ResettableValue.read(in, StreamInput::readBoolean); + return new Template(enabled); + } + + /** + * Converts the template to XContent, depending on the XContent.Params set by {@link ResettableValue#hideResetValues(Params)} + * it may or may not display any explicit nulls when the value is to be reset. + */ + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + enabled.toXContent(builder, params, ENABLED_FIELD.getPreferredName()); + builder.endObject(); + return builder; + } + + public static Template fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + /** + * Returns a template which has the value of the initial template updated with the values of the update. + * Note: for now it's a trivial composition because we have only one non-null field. + * @return the composed template + */ + public static Template merge(Template ignored, Template update) { + return update; + } + + public DataStreamFailureStore toFailureStore() { + return new DataStreamFailureStore(enabled.get()); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamOptions.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamOptions.java index 9cd4e2625e2ba..51e13c05e6892 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamOptions.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamOptions.java @@ -14,9 +14,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -24,6 +24,8 @@ import java.io.IOException; +import static org.elasticsearch.cluster.metadata.DataStreamFailureStore.FAILURE_STORE; + /** * Holds data stream dedicated configuration options such as failure store, (in the future lifecycle). Currently, it * supports the following configurations: @@ -34,10 +36,10 @@ public record DataStreamOptions(@Nullable DataStreamFailureStore failureStore) SimpleDiffable, ToXContentObject { - public static final ParseField FAILURE_STORE_FIELD = new ParseField("failure_store"); + public static final ParseField FAILURE_STORE_FIELD = new ParseField(FAILURE_STORE); public static final DataStreamOptions FAILURE_STORE_ENABLED = new DataStreamOptions(new DataStreamFailureStore(true)); public static final DataStreamOptions FAILURE_STORE_DISABLED = new DataStreamOptions(new DataStreamFailureStore(false)); - public static final DataStreamOptions EMPTY = new DataStreamOptions(); + public static final DataStreamOptions EMPTY = new DataStreamOptions(null); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "options", @@ -46,18 +48,13 @@ public record DataStreamOptions(@Nullable DataStreamFailureStore failureStore) ); static { - PARSER.declareField( + PARSER.declareObject( ConstructingObjectParser.optionalConstructorArg(), (p, c) -> DataStreamFailureStore.fromXContent(p), - FAILURE_STORE_FIELD, - ObjectParser.ValueType.OBJECT_OR_NULL + FAILURE_STORE_FIELD ); } - public DataStreamOptions() { - this(null); - } - public static DataStreamOptions read(StreamInput in) throws IOException { return new DataStreamOptions(in.readOptionalWriteable(DataStreamFailureStore::new)); } @@ -66,8 +63,21 @@ public static Diff readDiffFrom(StreamInput in) throws IOExce return SimpleDiffable.readDiffFrom(DataStreamOptions::read, in); } + /** + * @return true if none of the options are defined + */ public boolean isEmpty() { - return this.equals(EMPTY); + return failureStore == null; + } + + /** + * Determines if this data stream has its failure store enabled or not. Currently, the failure store + * is enabled only when a user has explicitly requested it. + * + * @return true, if the user has explicitly enabled the failure store. + */ + public boolean isFailureStoreEnabled() { + return failureStore != null && Boolean.TRUE.equals(failureStore.enabled()); } @Override @@ -93,4 +103,100 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public static DataStreamOptions fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } + + /** + * This class is only used in template configuration. It wraps the fields of {@link DataStreamOptions} with {@link ResettableValue} + * to allow a user to signal when they want to reset any previously encountered values during template composition. Furthermore, it + * provides the {@link Template.Builder} that dictates how two templates can be composed. + */ + public record Template(ResettableValue failureStore) implements Writeable, ToXContentObject { + public static final Template EMPTY = new Template(ResettableValue.undefined()); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "data_stream_options_template", + false, + (args, unused) -> new Template( + args[0] == null ? ResettableValue.undefined() : (ResettableValue) args[0] + ) + ); + + static { + PARSER.declareObjectOrNull( + ConstructingObjectParser.optionalConstructorArg(), + (p, s) -> ResettableValue.create(DataStreamFailureStore.Template.fromXContent(p)), + ResettableValue.reset(), + FAILURE_STORE_FIELD + ); + } + + public Template { + assert failureStore != null : "Template does not accept null values, please use Resettable.undefined()"; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + ResettableValue.write(out, failureStore, (o, v) -> v.writeTo(o)); + } + + public static Template read(StreamInput in) throws IOException { + ResettableValue failureStore = ResettableValue.read(in, DataStreamFailureStore.Template::read); + return new Template(failureStore); + } + + public static Template fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + /** + * Converts the template to XContent, depending on the {@param params} set by {@link ResettableValue#hideResetValues(Params)} + * it may or may not display any explicit nulls when the value is to be reset. + */ + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + failureStore.toXContent(builder, params, FAILURE_STORE_FIELD.getPreferredName()); + builder.endObject(); + return builder; + } + + public DataStreamOptions toDataStreamOptions() { + return new DataStreamOptions(failureStore.mapAndGet(DataStreamFailureStore.Template::toFailureStore)); + } + + public static Builder builder(Template template) { + return new Builder(template); + } + + /** + * Builds and composes a data stream template. + */ + public static class Builder { + private ResettableValue failureStore = ResettableValue.undefined(); + + public Builder(Template template) { + if (template != null) { + failureStore = template.failureStore(); + } + } + + /** + * Updates the current failure store configuration with the provided value. This is not a replacement necessarily, if both + * instance contain data the configurations are merged. + */ + public Builder updateFailureStore(ResettableValue newFailureStore) { + failureStore = ResettableValue.merge(failureStore, newFailureStore, DataStreamFailureStore.Template::merge); + return this; + } + + public Template build() { + return new Template(failureStore); + } + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 279243eeff7cf..e7914d812e05c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -80,6 +80,13 @@ public IndexNameExpressionResolver(ThreadContext threadContext, SystemIndices sy this.systemIndices = Objects.requireNonNull(systemIndices, "System Indices must not be null"); } + /** + * This represents a resolved expression in the form of the name of a resource in the cluster. + * Soon it will facilitate an index component selector, which will define which part of the resource the expression is targeting. + * @param resource the name of a resource that an expression refers to. + */ + public record ResolvedExpression(String resource) {} + /** * Same as {@link #concreteIndexNames(ClusterState, IndicesOptions, String...)}, but the index expressions and options * are encapsulated in the specified request. @@ -197,8 +204,9 @@ public List dataStreamNames(ClusterState state, IndicesOptions options, getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressionsToResources(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); return expressions.stream() + .map(ResolvedExpression::resource) .map(x -> state.metadata().getIndicesLookup().get(x)) .filter(Objects::nonNull) .filter(ia -> ia.getType() == Type.DATA_STREAM) @@ -227,10 +235,11 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressionsToResources(context, request.index()); + final Collection expressions = resolveExpressionsToResources(context, request.index()); if (expressions.size() == 1) { - IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next()); + ResolvedExpression resolvedExpression = expressions.iterator().next(); + IndexAbstraction ia = state.metadata().getIndicesLookup().get(resolvedExpression.resource()); if (ia.getType() == Type.ALIAS) { Index writeIndex = ia.getWriteIndex(); if (writeIndex == null) { @@ -257,7 +266,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit * If {@param preserveDataStreams} is {@code true}, data streams that are covered by the wildcards from the * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. */ - protected static Collection resolveExpressionsToResources(Context context, String... expressions) { + protected static Collection resolveExpressionsToResources(Context context, String... expressions) { // If we do not expand wildcards, then empty or _all expression result in an empty list boolean expandWildcards = context.getOptions().expandWildcardExpressions(); if (expandWildcards == false) { @@ -275,7 +284,7 @@ protected static Collection resolveExpressionsToResources(Context contex } // Using ArrayList when we know we do not have wildcards is an optimisation, given that one expression result in 0 or 1 resources. - Collection resources = expandWildcards && WildcardExpressionResolver.hasWildcards(expressions) + Collection resources = expandWildcards && WildcardExpressionResolver.hasWildcards(expressions) ? new LinkedHashSet<>() : new ArrayList<>(expressions.length); boolean wildcardSeen = false; @@ -297,7 +306,7 @@ protected static Collection resolveExpressionsToResources(Context contex wildcardSeen |= isWildcard; if (isWildcard) { - Set matchingResources = WildcardExpressionResolver.matchWildcardToResources(context, baseExpression); + Set matchingResources = WildcardExpressionResolver.matchWildcardToResources(context, baseExpression); if (context.getOptions().allowNoIndices() == false && matchingResources.isEmpty()) { throw notFoundException(baseExpression); @@ -310,9 +319,9 @@ protected static Collection resolveExpressionsToResources(Context contex } } else { if (isExclusion) { - resources.remove(baseExpression); + resources.remove(new ResolvedExpression(baseExpression)); } else if (ensureAliasOrIndexExists(context, baseExpression)) { - resources.add(baseExpression); + resources.add(new ResolvedExpression(baseExpression)); } } } @@ -428,12 +437,12 @@ String[] concreteIndexNames(Context context, String... indexExpressions) { } Index[] concreteIndices(Context context, String... indexExpressions) { - final Collection expressions = resolveExpressionsToResources(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size()); final Map indicesLookup = context.getState().metadata().getIndicesLookup(); - for (String expression : expressions) { - final IndexAbstraction indexAbstraction = indicesLookup.get(expression); + for (ResolvedExpression expression : expressions) { + final IndexAbstraction indexAbstraction = indicesLookup.get(expression.resource()); assert indexAbstraction != null; if (indexAbstraction.getType() == Type.ALIAS && context.isResolveToWriteIndex()) { Index writeIndex = indexAbstraction.getWriteIndex(); @@ -467,7 +476,7 @@ Index[] concreteIndices(Context context, String... indexExpressions) { throw new IllegalArgumentException( indexAbstraction.getType().getDisplayName() + " [" - + expression + + expression.resource() + "] has more than one index associated with it " + Arrays.toString(indexNames) + ", can't execute a single index op" @@ -682,7 +691,7 @@ public Index concreteSingleIndex(ClusterState state, IndicesRequest request) { * Utility method that allows to resolve an index expression to its corresponding single write index. * * @param state the cluster state containing all the data to resolve to expression to a concrete index - * @param request The request that defines how the an alias or an index need to be resolved to a concrete index + * @param request The request that defines how an alias or an index need to be resolved to a concrete index * and the expression that can be resolved to an alias or an index name. * @throws IllegalArgumentException if the index resolution does not lead to an index, or leads to more than one index * @return the write index obtained as a result of the index resolution @@ -759,7 +768,7 @@ public boolean hasIndexAbstraction(String indexAbstraction, ClusterState state) /** * Resolve an array of expressions to the set of indices and aliases that these expressions match. */ - public Set resolveExpressions(ClusterState state, String... expressions) { + public Set resolveExpressions(ClusterState state, String... expressions) { return resolveExpressions(state, IndicesOptions.lenientExpandOpen(), false, expressions); } @@ -768,7 +777,7 @@ public Set resolveExpressions(ClusterState state, String... expressions) * If {@param preserveDataStreams} is {@code true}, datastreams that are covered by the wildcards from the * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. */ - public Set resolveExpressions( + public Set resolveExpressions( ClusterState state, IndicesOptions indicesOptions, boolean preserveDataStreams, @@ -786,10 +795,10 @@ public Set resolveExpressions( getNetNewSystemIndexPredicate() ); // unmodifiable without creating a new collection as it might contain many items - Collection resolved = resolveExpressionsToResources(context, expressions); - if (resolved instanceof Set) { + Collection resolved = resolveExpressionsToResources(context, expressions); + if (resolved instanceof Set) { // unmodifiable without creating a new collection as it might contain many items - return Collections.unmodifiableSet((Set) resolved); + return Collections.unmodifiableSet((Set) resolved); } else { return Set.copyOf(resolved); } @@ -802,7 +811,7 @@ public Set resolveExpressions( * the index itself - null is returned. Returns {@code null} if no filtering is required. * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressionsToResources(Context, String...)}. */ - public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { + public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); } @@ -829,26 +838,25 @@ public String[] indexAliases( Predicate requiredAlias, Predicate requiredDataStreamAlias, boolean skipIdentity, - Set resolvedExpressions + Set resolvedExpressions ) { - if (isAllIndices(resolvedExpressions)) { + if (isAllIndicesExpression(resolvedExpressions)) { return null; } - final IndexMetadata indexMetadata = state.metadata().getIndices().get(index); if (indexMetadata == null) { // Shouldn't happen throw new IndexNotFoundException(index); } - if (skipIdentity == false && resolvedExpressions.contains(index)) { + if (skipIdentity == false && resolvedExpressions.contains(new ResolvedExpression(index))) { return null; } IndexAbstraction ia = state.metadata().getIndicesLookup().get(index); DataStream dataStream = ia.getParentDataStream(); if (dataStream != null) { - if (skipIdentity == false && resolvedExpressions.contains(dataStream.getName())) { + if (skipIdentity == false && resolvedExpressions.contains(new ResolvedExpression(dataStream.getName()))) { // skip the filters when the request targets the data stream name return null; } @@ -857,11 +865,12 @@ public String[] indexAliases( if (iterateIndexAliases(dataStreamAliases.size(), resolvedExpressions.size())) { aliasesForDataStream = dataStreamAliases.values() .stream() - .filter(dataStreamAlias -> resolvedExpressions.contains(dataStreamAlias.getName())) + .filter(dataStreamAlias -> resolvedExpressions.contains(new ResolvedExpression(dataStreamAlias.getName()))) .filter(dataStreamAlias -> dataStreamAlias.getDataStreams().contains(dataStream.getName())) .toList(); } else { aliasesForDataStream = resolvedExpressions.stream() + .map(ResolvedExpression::resource) .map(dataStreamAliases::get) .filter(dataStreamAlias -> dataStreamAlias != null && dataStreamAlias.getDataStreams().contains(dataStream.getName())) .toList(); @@ -890,11 +899,12 @@ public String[] indexAliases( // faster to iterate indexAliases aliasCandidates = indexAliases.values() .stream() - .filter(aliasMetadata -> resolvedExpressions.contains(aliasMetadata.alias())) + .filter(aliasMetadata -> resolvedExpressions.contains(new ResolvedExpression(aliasMetadata.alias()))) .toArray(AliasMetadata[]::new); } else { // faster to iterate resolvedExpressions aliasCandidates = resolvedExpressions.stream() + .map(ResolvedExpression::resource) .map(indexAliases::get) .filter(Objects::nonNull) .toArray(AliasMetadata[]::new); @@ -937,12 +947,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection resolvedExpressions = resolveExpressionsToResources(context, expressions); - - // TODO: it appears that this can never be true? - if (isAllIndices(resolvedExpressions)) { - return resolveSearchRoutingAllIndices(state.metadata(), routing); - } + final Collection resolvedExpressions = resolveExpressionsToResources(context, expressions); Map> routings = null; Set paramRouting = null; @@ -952,8 +957,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab paramRouting = Sets.newHashSet(Strings.splitStringByCommaToArray(routing)); } - for (String expression : resolvedExpressions) { - IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression); + for (ResolvedExpression resolvedExpression : resolvedExpressions) { + IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(resolvedExpression.resource()); if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) { for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { Index index = indexAbstraction.getIndices().get(i); @@ -993,7 +998,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab } } else { // Index - routings = collectRoutings(routings, paramRouting, norouting, expression); + routings = collectRoutings(routings, paramRouting, norouting, resolvedExpression.resource()); } } @@ -1039,6 +1044,30 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m return null; } + /** + * Identifies whether the array containing index names given as argument refers to all indices + * The empty or null array identifies all indices + * + * @param aliasesOrIndices the array containing index names + * @return true if the provided array maps to all indices, false otherwise + */ + public static boolean isAllIndicesExpression(Collection aliasesOrIndices) { + return aliasesOrIndices == null || aliasesOrIndices.isEmpty() || isExplicitAllPatternExpression(aliasesOrIndices); + } + + /** + * Identifies whether the array containing index names given as argument explicitly refers to all indices + * The empty or null array doesn't explicitly map to all indices + * + * @param aliasesOrIndices the array containing index names + * @return true if the provided array explicitly maps to all indices, false otherwise + */ + static boolean isExplicitAllPatternExpression(Collection aliasesOrIndices) { + return aliasesOrIndices != null + && aliasesOrIndices.size() == 1 + && Metadata.ALL.equals(aliasesOrIndices.iterator().next().resource()); + } + /** * Identifies whether the array containing index names given as argument refers to all indices * The empty or null array identifies all indices @@ -1334,14 +1363,14 @@ private WildcardExpressionResolver() { * Returns all the indices, data streams, and aliases, considering the open/closed, system, and hidden context parameters. * Depending on the context, returns the names of the data streams themselves or their backing indices. */ - public static Collection resolveAll(Context context) { - List concreteIndices = resolveEmptyOrTrivialWildcard(context); + public static Collection resolveAll(Context context) { + List concreteIndices = resolveEmptyOrTrivialWildcard(context); if (context.includeDataStreams() == false && context.getOptions().ignoreAliases()) { return concreteIndices; } - Set resolved = new HashSet<>(concreteIndices.size()); + Set resolved = new HashSet<>(concreteIndices.size()); context.getState() .metadata() .getIndicesLookup() @@ -1386,10 +1415,10 @@ private static IndexMetadata.State excludeState(IndicesOptions options) { * The {@param context} provides the current time-snapshot view of cluster state, as well as conditions * on whether to consider alias, data stream, system, and hidden resources. */ - static Set matchWildcardToResources(Context context, String wildcardExpression) { + static Set matchWildcardToResources(Context context, String wildcardExpression) { assert isWildcard(wildcardExpression); final SortedMap indicesLookup = context.getState().getMetadata().getIndicesLookup(); - Set matchedResources = new HashSet<>(); + Set matchedResources = new HashSet<>(); // this applies an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" if (Regex.isSuffixMatchPattern(wildcardExpression)) { for (IndexAbstraction ia : filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values()) { @@ -1416,7 +1445,7 @@ private static void maybeAddToResult( Context context, String wildcardExpression, IndexAbstraction indexAbstraction, - Set matchedResources + Set matchedResources ) { if (shouldExpandToIndexAbstraction(context, wildcardExpression, indexAbstraction)) { matchedResources.addAll(expandToOpenClosed(context, indexAbstraction)); @@ -1475,20 +1504,20 @@ private static Map filterIndicesLookupForSuffixWildcar * Data streams and aliases are interpreted to refer to multiple indices, * then all index resources are filtered by their open/closed status. */ - private static Set expandToOpenClosed(Context context, IndexAbstraction indexAbstraction) { + private static Set expandToOpenClosed(Context context, IndexAbstraction indexAbstraction) { final IndexMetadata.State excludeState = excludeState(context.getOptions()); - Set resources = new HashSet<>(); + Set resources = new HashSet<>(); if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - resources.add(indexAbstraction.getName()); + resources.add(new ResolvedExpression(indexAbstraction.getName())); } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - resources.add(indexAbstraction.getName()); + resources.add(new ResolvedExpression(indexAbstraction.getName())); } else { if (shouldIncludeRegularIndices(context.getOptions())) { for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { Index index = indexAbstraction.getIndices().get(i); IndexMetadata indexMetadata = context.state.metadata().index(index); if (indexMetadata.getState() != excludeState) { - resources.add(index.getName()); + resources.add(new ResolvedExpression(index.getName())); } } } @@ -1498,7 +1527,7 @@ private static Set expandToOpenClosed(Context context, IndexAbstraction Index index = dataStream.getFailureIndices().getIndices().get(i); IndexMetadata indexMetadata = context.state.metadata().index(index); if (indexMetadata.getState() != excludeState) { - resources.add(index.getName()); + resources.add(new ResolvedExpression(index.getName())); } } } @@ -1506,20 +1535,27 @@ private static Set expandToOpenClosed(Context context, IndexAbstraction return resources; } - private static List resolveEmptyOrTrivialWildcard(Context context) { + private static List resolveEmptyOrTrivialWildcard(Context context) { final String[] allIndices = resolveEmptyOrTrivialWildcardToAllIndices(context.getOptions(), context.getState().metadata()); if (context.systemIndexAccessLevel == SystemIndexAccessLevel.ALL) { - return List.of(allIndices); + List result = new ArrayList<>(allIndices.length); + for (int i = 0; i < allIndices.length; i++) { + result.add(new ResolvedExpression(allIndices[i])); + } + return result; } else { return resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices); } } - private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) { - List filteredIndices = new ArrayList<>(allIndices.length); + private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices( + Context context, + String[] allIndices + ) { + List filteredIndices = new ArrayList<>(allIndices.length); for (int i = 0; i < allIndices.length; i++) { if (shouldIncludeIndexAbstraction(context, allIndices[i])) { - filteredIndices.add(allIndices[i]); + filteredIndices.add(new ResolvedExpression(allIndices[i])); } } return filteredIndices; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 5dbf4da6f376f..0de87c7226380 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -261,11 +261,16 @@ static ClusterState createDataStream( // This is not a problem as both have different prefixes (`.ds-` vs `.fs-`) and both will be using the same `generation` field // when rolling over in the future. final long initialGeneration = 1; + ResettableValue dataStreamOptionsTemplate = isSystem + ? MetadataIndexTemplateService.resolveDataStreamOptions(template, systemDataStreamDescriptor.getComponentTemplates()) + : MetadataIndexTemplateService.resolveDataStreamOptions(template, metadata.componentTemplates()); + final DataStreamOptions dataStreamOptions = dataStreamOptionsTemplate.mapAndGet(DataStreamOptions.Template::toDataStreamOptions); + var isFailureStoreEnabled = dataStreamOptions != null && dataStreamOptions.isFailureStoreEnabled(); // If we need to create a failure store, do so first. Do not reroute during the creation since we will do // that as part of creating the backing index if required. IndexMetadata failureStoreIndex = null; - if (template.getDataStreamTemplate().hasFailureStore() && initializeFailureStore) { + if (isFailureStoreEnabled && initializeFailureStore) { if (isSystem) { throw new IllegalArgumentException("Failure stores are not supported on system data streams"); } @@ -303,7 +308,7 @@ static ClusterState createDataStream( } assert writeIndex != null; assert writeIndex.mapping() != null : "no mapping found for backing index [" + writeIndex.getIndex().getName() + "]"; - assert template.getDataStreamTemplate().hasFailureStore() == false || initializeFailureStore == false || failureStoreIndex != null + assert isFailureStoreEnabled == false || initializeFailureStore == false || failureStoreIndex != null : "failure store should have an initial index"; assert failureStoreIndex == null || failureStoreIndex.mapping() != null : "no mapping found for failure store [" + failureStoreIndex.getIndex().getName() + "]"; @@ -329,7 +334,7 @@ static ClusterState createDataStream( template.getDataStreamTemplate().isAllowCustomRouting(), indexMode, lifecycle == null && isDslOnlyMode ? DataStreamLifecycle.DEFAULT : lifecycle, - template.getDataStreamTemplate().hasFailureStore() ? DataStreamOptions.FAILURE_STORE_ENABLED : DataStreamOptions.EMPTY, + dataStreamOptions, new DataStream.DataStreamIndices(DataStream.BACKING_INDEX_PREFIX, dsBackingIndices, false, null), // If the failure store shouldn't be initialized on data stream creation, we're marking it for "lazy rollover", which will // initialize the failure store on first write. diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index 3878a3329b634..7f8b87d2d3f48 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -312,12 +312,7 @@ public ClusterState addComponentTemplate( } } - final Template finalTemplate = new Template( - finalSettings, - wrappedMappings, - template.template().aliases(), - template.template().lifecycle() - ); + final Template finalTemplate = Template.builder(template.template()).settings(finalSettings).mappings(wrappedMappings).build(); final ComponentTemplate finalComponentTemplate = new ComponentTemplate( finalTemplate, template.version(), @@ -348,6 +343,7 @@ public ClusterState addComponentTemplate( composableTemplate, globalRetentionSettings.get() ); + validateDataStreamOptions(tempStateWithComponentTemplateAdded.metadata(), composableTemplateName, composableTemplate); validateIndexTemplateV2(composableTemplateName, composableTemplate, tempStateWithComponentTemplateAdded); } catch (Exception e) { if (validationFailure == null) { @@ -629,7 +625,7 @@ public ClusterState addIndexTemplateV2( // adjusted (to add _doc) and it should be validated CompressedXContent mappings = innerTemplate.mappings(); CompressedXContent wrappedMappings = wrapMappingsIfNecessary(mappings, xContentRegistry); - final Template finalTemplate = new Template(finalSettings, wrappedMappings, innerTemplate.aliases(), innerTemplate.lifecycle()); + final Template finalTemplate = Template.builder(innerTemplate).settings(finalSettings).mappings(wrappedMappings).build(); finalIndexTemplate = template.toBuilder().template(finalTemplate).build(); } @@ -690,7 +686,8 @@ public static Map> v2TemplateOverlaps( return overlaps; } - private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexTemplate, ClusterState currentState) { + // Visibility for testing + void validateIndexTemplateV2(String name, ComposableIndexTemplate indexTemplate, ClusterState currentState) { // Workaround for the fact that start_time and end_time are injected by the MetadataCreateDataStreamService upon creation, // but when validating templates that create data streams the MetadataCreateDataStreamService isn't used. var finalTemplate = indexTemplate.template(); @@ -726,6 +723,7 @@ private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexT validate(name, templateToValidate); validateDataStreamsStillReferenced(currentState, name, templateToValidate); validateLifecycle(currentState.metadata(), name, templateToValidate, globalRetentionSettings.get()); + validateDataStreamOptions(currentState.metadata(), name, templateToValidate); if (templateToValidate.isDeprecated() == false) { validateUseOfDeprecatedComponentTemplates(name, templateToValidate, currentState.metadata().componentTemplates()); @@ -819,6 +817,20 @@ static void validateLifecycle( } } + // Visible for testing + static void validateDataStreamOptions(Metadata metadata, String indexTemplateName, ComposableIndexTemplate template) { + ResettableValue dataStreamOptions = resolveDataStreamOptions(template, metadata.componentTemplates()); + if (dataStreamOptions.get() != null) { + if (template.getDataStreamTemplate() == null) { + throw new IllegalArgumentException( + "index template [" + + indexTemplateName + + "] specifies data stream options that can only be used in combination with a data stream" + ); + } + } + } + /** * Validate that by changing or adding {@code newTemplate}, there are * no unreferenced data streams. Note that this scenario is still possible @@ -1561,7 +1573,7 @@ static List> resolveAliases( public static DataStreamLifecycle resolveLifecycle(final Metadata metadata, final String templateName) { final ComposableIndexTemplate template = metadata.templatesV2().get(templateName); assert template != null - : "attempted to resolve settings for a template [" + templateName + "] that did not exist in the cluster state"; + : "attempted to resolve lifecycle for a template [" + templateName + "] that did not exist in the cluster state"; if (template == null) { return null; } @@ -1653,6 +1665,81 @@ public static DataStreamLifecycle composeDataLifecycles(List} object + */ + public static ResettableValue resolveDataStreamOptions(final Metadata metadata, final String templateName) { + final ComposableIndexTemplate template = metadata.templatesV2().get(templateName); + assert template != null + : "attempted to resolve data stream options for a template [" + templateName + "] that did not exist in the cluster state"; + if (template == null) { + return ResettableValue.undefined(); + } + return resolveDataStreamOptions(template, metadata.componentTemplates()); + } + + /** + * Resolve the provided v2 template and component templates into a {@link ResettableValue} object + */ + public static ResettableValue resolveDataStreamOptions( + ComposableIndexTemplate template, + Map componentTemplates + ) { + Objects.requireNonNull(template, "attempted to resolve data stream for a null template"); + Objects.requireNonNull(componentTemplates, "attempted to resolve data stream options with null component templates"); + + List> dataStreamOptionsList = new ArrayList<>(); + for (String componentTemplateName : template.composedOf()) { + if (componentTemplates.containsKey(componentTemplateName) == false) { + continue; + } + ResettableValue dataStreamOptions = componentTemplates.get(componentTemplateName) + .template() + .resettableDataStreamOptions(); + if (dataStreamOptions.isDefined()) { + dataStreamOptionsList.add(dataStreamOptions); + } + } + // The actual index template's data stream options have the highest precedence. + if (template.template() != null && template.template().resettableDataStreamOptions().isDefined()) { + dataStreamOptionsList.add(template.template().resettableDataStreamOptions()); + } + return composeDataStreamOptions(dataStreamOptionsList); + } + + /** + * This method composes a series of data streams options to a final one. Since currently the data stream options + * contains only the failure store configuration which also contains only one field, the composition is a bit trivial. + * But we introduce the mechanics that will help extend it really easily. + * @param dataStreamOptionsList a sorted list of data stream options in the order that they will be composed + * @return the final data stream option configuration + */ + public static ResettableValue composeDataStreamOptions( + List> dataStreamOptionsList + ) { + if (dataStreamOptionsList.isEmpty()) { + return ResettableValue.undefined(); + } + DataStreamOptions.Template.Builder builder = null; + for (ResettableValue current : dataStreamOptionsList) { + if (current.isDefined() == false) { + continue; + } + if (current.shouldReset()) { + builder = null; + } else { + DataStreamOptions.Template currentTemplate = current.get(); + if (builder == null) { + builder = DataStreamOptions.Template.builder(currentTemplate); + } else { + // Currently failure store has only one field that needs to be defined so the composing of the failure store is trivial + builder.updateFailureStore(currentTemplate.failureStore()); + } + } + } + return builder == null ? ResettableValue.undefined() : ResettableValue.create(builder.build()); + } + /** * Given a state and a composable template, validate that the final composite template * generated by the composable template and all of its component templates contains valid diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ResettableValue.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ResettableValue.java new file mode 100644 index 0000000000000..4f38d2b8386a6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ResettableValue.java @@ -0,0 +1,216 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.function.BiFunction; +import java.util.function.Function; + +/** + * This class holds a value of type @{param T} that can be in one of 3 states: + * - It has a concrete value, or + * - It is missing, or + * - It is meant to reset any other when it is composed with it. + * It is mainly used in template composition to capture the case when the user wished to reset any previous values. + * @param + */ +public class ResettableValue { + private static final ResettableValue RESET = new ResettableValue<>(true, null); + private static final ResettableValue UNDEFINED = new ResettableValue<>(false, null); + private static final String DISPLAY_RESET_VALUES = "display_reset"; + private static final Map HIDE_RESET_VALUES_PARAMS = Map.of(DISPLAY_RESET_VALUES, "false"); + + private final T value; + private final boolean isDefined; + + /** + * @return the reset state, meaning that this value is explicitly requested to be reset + */ + public static ResettableValue reset() { + @SuppressWarnings("unchecked") + ResettableValue t = (ResettableValue) RESET; + return t; + } + + /** + * @return the undefined state, meaning that this value has not been specified + */ + public static ResettableValue undefined() { + @SuppressWarnings("unchecked") + ResettableValue t = (ResettableValue) UNDEFINED; + return t; + } + + /** + * Wraps a value, if the value is null, it returns {@link #undefined()} + */ + public static ResettableValue create(T value) { + if (value == null) { + return undefined(); + } + return new ResettableValue<>(true, value); + } + + private ResettableValue(boolean isDefined, T value) { + this.isDefined = isDefined; + this.value = value; + } + + /** + * @return true if the state of this is reset + */ + public boolean shouldReset() { + return isDefined && value == null; + } + + /** + * @return true when the value is defined, either with a concrete value or reset. + */ + public boolean isDefined() { + return isDefined; + } + + /** + * @return the concrete value or null if it is in undefined or reset states. + */ + @Nullable + public T get() { + return value; + } + + /** + * Writes a single optional explicitly nullable value. This method is in direct relation with the + * {@link #read(StreamInput, Writeable.Reader)} which reads the respective value. It's the + * responsibility of the caller to preserve order of the fields and their backwards compatibility. + * + * @throws IOException + */ + static void write(StreamOutput out, ResettableValue value, Writeable.Writer writer) throws IOException { + out.writeBoolean(value.isDefined); + if (value.isDefined) { + out.writeBoolean(value.shouldReset()); + if (value.shouldReset() == false) { + writer.write(out, value.get()); + } + } + } + + /** + * Reads a single optional and explicitly nullable value. This method is in direct relation with the + * {@link #write(StreamOutput, ResettableValue, Writeable.Writer)} which writes the respective value. It's the + * responsibility of the caller to preserve order of the fields and their backwards compatibility. + * + * @throws IOException + */ + @Nullable + static ResettableValue read(StreamInput in, Writeable.Reader reader) throws IOException { + boolean isDefined = in.readBoolean(); + if (isDefined == false) { + return ResettableValue.undefined(); + } + boolean shouldReset = in.readBoolean(); + if (shouldReset) { + return ResettableValue.reset(); + } + T value = reader.read(in); + return ResettableValue.create(value); + } + + /** + * Gets the value and applies the function {@param f} when the value is not null. Slightly more efficient than + * this.map(f).get(). + */ + public U mapAndGet(Function f) { + if (isDefined() == false || shouldReset()) { + return null; + } else { + return f.apply(value); + } + } + + public ResettableValue map(Function mapper) { + Objects.requireNonNull(mapper); + if (isDefined == false) { + return ResettableValue.undefined(); + } + if (shouldReset()) { + return reset(); + } + return ResettableValue.create(mapper.apply(value)); + } + + /** + * Ιt merges the values of the ResettableValue's when they are defined using the provided mergeFunction. + */ + public static ResettableValue merge(ResettableValue initial, ResettableValue update, BiFunction mergeFunction) { + if (update.shouldReset()) { + return undefined(); + } + if (update.isDefined() == false) { + return initial; + } + if (initial.isDefined() == false || initial.shouldReset()) { + return update; + } + // Because we checked that's defined and not in reset state, we can directly apply the merge function. + return ResettableValue.create(mergeFunction.apply(initial.value, update.value)); + } + + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params, String field) throws IOException { + return toXContent(builder, params, field, Function.identity()); + } + + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params, String field, Function transformValue) + throws IOException { + if (isDefined) { + if (value != null) { + builder.field(field, transformValue.apply(value)); + } else if (ResettableValue.shouldDisplayResetValue(params)) { + builder.nullField(field); + } + } + return builder; + } + + public static boolean shouldDisplayResetValue(ToXContent.Params params) { + return params.paramAsBoolean(DISPLAY_RESET_VALUES, true); + } + + public static ToXContent.Params hideResetValues(ToXContent.Params params) { + return new ToXContent.DelegatingMapParams(HIDE_RESET_VALUES_PARAMS, params); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ResettableValue that = (ResettableValue) o; + return isDefined == that.isDefined && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value, isDefined); + } + + @Override + public String toString() { + return "ResettableValue{" + "value=" + value + ", isDefined=" + isDefined + '}'; + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java index 0a9e79284ced6..7d354768ca987 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java @@ -47,12 +47,19 @@ public class Template implements SimpleDiffable