diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index dfedfac9d5b04..081d059460653 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -40,8 +40,6 @@ steps: matrix: setup: image: - - windows-2016 - - windows-2019 - windows-2022 agents: provider: gcp diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 8819a5f7f493f..788960c76e150 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -345,8 +345,6 @@ steps: matrix: setup: image: - - windows-2016 - - windows-2019 - windows-2022 agents: provider: gcp diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index 86e0623ba5b87..f9f75488f0917 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -38,8 +38,6 @@ steps: matrix: setup: image: - - windows-2016 - - windows-2019 - windows-2022 GRADLE_TASK: - checkPart1 diff --git a/.buildkite/pipelines/pull-request/packaging-tests-windows-sample.yml b/.buildkite/pipelines/pull-request/packaging-tests-windows-sample.yml index bcf38f51f4a66..6e8fec65f75a5 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-windows-sample.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-windows-sample.yml @@ -12,7 +12,7 @@ steps: matrix: setup: image: - - windows-2019 + - windows-2022 PACKAGING_TASK: - default-windows-archive agents: diff --git a/.buildkite/pipelines/pull-request/packaging-tests-windows.yml b/.buildkite/pipelines/pull-request/packaging-tests-windows.yml index 651a82982460f..df5505bea9a62 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-windows.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-windows.yml @@ -10,8 +10,6 @@ steps: matrix: setup: image: - - windows-2016 - - windows-2019 - windows-2022 PACKAGING_TASK: - default-windows-archive diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 540da14402192..6a39b58b0957e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -65,6 +65,30 @@ server/src/main/java/org/elasticsearch/bootstrap @elastic/es-core-infra server/src/main/java/org/elasticsearch/node @elastic/es-core-infra server/src/main/java/org/elasticsearch/plugins @elastic/es-core-infra server/src/main/java/org/elasticsearch/threadpool @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/breaker @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/bytes @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/cli @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/collect @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/component @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/compress @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/document @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/file @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/hash @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/io @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/logging @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/metrics @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/network @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/path @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/recycler @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/regex @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/scheduler @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/settings @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/text @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/time @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/transport @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/unit @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/util @elastic/es-core-infra +server/src/main/java/org/elasticsearch/common/xcontent @elastic/es-core-infra # Security x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege @elastic/es-security diff --git a/README.asciidoc b/README.asciidoc index bac6d0ed71752..df6208a8f4229 100644 --- a/README.asciidoc +++ b/README.asciidoc @@ -4,7 +4,7 @@ Elasticsearch is a distributed search and analytics engine, scalable data store Use cases enabled by Elasticsearch include: -* https://www.elastic.co/search-labs/blog/articles/retrieval-augmented-generation-rag[Retrieval Augmented Generation (RAG)] +* https://www.elastic.co/search-labs/blog/articles/retrieval-augmented-generation-rag[Retrieval Augmented Generation (RAG)] * https://www.elastic.co/search-labs/blog/categories/vector-search[Vector search] * Full-text search * Logs @@ -17,7 +17,7 @@ Use cases enabled by Elasticsearch include: To learn more about Elasticsearch's features and capabilities, see our https://www.elastic.co/products/elasticsearch[product page]. -To access information on https://www.elastic.co/search-labs/blog/categories/ml-research[machine learning innovations] and the latest https://www.elastic.co/search-labs/blog/categories/lucene[Lucene contributions from Elastic], more information can be found in https://www.elastic.co/search-labs[Search Labs]. +To access information on https://www.elastic.co/search-labs/blog/categories/ml-research[machine learning innovations] and the latest https://www.elastic.co/search-labs/blog/categories/lucene[Lucene contributions from Elastic], more information can be found in https://www.elastic.co/search-labs[Search Labs]. [[get-started]] == Get started @@ -27,20 +27,20 @@ https://www.elastic.co/cloud/as-a-service[Elasticsearch Service on Elastic Cloud]. If you prefer to install and manage Elasticsearch yourself, you can download -the latest version from +the latest version from https://www.elastic.co/downloads/elasticsearch[elastic.co/downloads/elasticsearch]. === Run Elasticsearch locally -//// +//// IMPORTANT: This content is replicated in the Elasticsearch repo. See `run-elasticsearch-locally.asciidoc`. Ensure both files are in sync. https://github.com/elastic/start-local is the source of truth. -//// +//// [WARNING] -==== +==== DO NOT USE THESE INSTRUCTIONS FOR PRODUCTION DEPLOYMENTS. This setup is intended for local development and testing only. @@ -93,7 +93,7 @@ Use this key to connect to Elasticsearch with a https://www.elastic.co/guide/en/ From the `elastic-start-local` folder, check the connection to Elasticsearch using `curl`: [source,sh] ----- +---- source .env curl $ES_LOCAL_URL -H "Authorization: ApiKey ${ES_LOCAL_API_KEY}" ---- @@ -101,12 +101,12 @@ curl $ES_LOCAL_URL -H "Authorization: ApiKey ${ES_LOCAL_API_KEY}" === Send requests to Elasticsearch -You send data and other requests to Elasticsearch through REST APIs. -You can interact with Elasticsearch using any client that sends HTTP requests, +You send data and other requests to Elasticsearch through REST APIs. +You can interact with Elasticsearch using any client that sends HTTP requests, such as the https://www.elastic.co/guide/en/elasticsearch/client/index.html[Elasticsearch -language clients] and https://curl.se[curl]. +language clients] and https://curl.se[curl]. -==== Using curl +==== Using curl Here's an example curl command to create a new Elasticsearch index, using basic auth: @@ -149,19 +149,19 @@ print(client.info()) ==== Using the Dev Tools Console -Kibana's developer console provides an easy way to experiment and test requests. +Kibana's developer console provides an easy way to experiment and test requests. To access the console, open Kibana, then go to **Management** > **Dev Tools**. **Add data** -You index data into Elasticsearch by sending JSON objects (documents) through the REST APIs. -Whether you have structured or unstructured text, numerical data, or geospatial data, -Elasticsearch efficiently stores and indexes it in a way that supports fast searches. +You index data into Elasticsearch by sending JSON objects (documents) through the REST APIs. +Whether you have structured or unstructured text, numerical data, or geospatial data, +Elasticsearch efficiently stores and indexes it in a way that supports fast searches. For timestamped data such as logs and metrics, you typically add documents to a data stream made up of multiple auto-generated backing indices. -To add a single document to an index, submit an HTTP post request that targets the index. +To add a single document to an index, submit an HTTP post request that targets the index. ---- POST /customer/_doc/1 @@ -171,11 +171,11 @@ POST /customer/_doc/1 } ---- -This request automatically creates the `customer` index if it doesn't exist, -adds a new document that has an ID of 1, and +This request automatically creates the `customer` index if it doesn't exist, +adds a new document that has an ID of 1, and stores and indexes the `firstname` and `lastname` fields. -The new document is available immediately from any node in the cluster. +The new document is available immediately from any node in the cluster. You can retrieve it with a GET request that specifies its document ID: ---- @@ -183,7 +183,7 @@ GET /customer/_doc/1 ---- To add multiple documents in one request, use the `_bulk` API. -Bulk data must be newline-delimited JSON (NDJSON). +Bulk data must be newline-delimited JSON (NDJSON). Each line must end in a newline character (`\n`), including the last line. ---- @@ -200,15 +200,15 @@ PUT customer/_bulk **Search** -Indexed documents are available for search in near real-time. -The following search matches all customers with a first name of _Jennifer_ +Indexed documents are available for search in near real-time. +The following search matches all customers with a first name of _Jennifer_ in the `customer` index. ---- GET customer/_search { "query" : { - "match" : { "firstname": "Jennifer" } + "match" : { "firstname": "Jennifer" } } } ---- @@ -223,9 +223,9 @@ data streams, or index aliases. . Go to **Management > Stack Management > Kibana > Data Views**. . Select **Create data view**. -. Enter a name for the data view and a pattern that matches one or more indices, -such as _customer_. -. Select **Save data view to Kibana**. +. Enter a name for the data view and a pattern that matches one or more indices, +such as _customer_. +. Select **Save data view to Kibana**. To start exploring, go to **Analytics > Discover**. @@ -254,11 +254,6 @@ To build a distribution for another platform, run the related command: ./gradlew :distribution:archives:windows-zip:assemble ---- -To build distributions for all supported platforms, run: ----- -./gradlew assemble ----- - Distributions are output to `distribution/archives`. To run the test suite, see xref:TESTING.asciidoc[TESTING]. @@ -281,7 +276,7 @@ The https://github.com/elastic/elasticsearch-labs[`elasticsearch-labs`] repo con [[contribute]] == Contribute -For contribution guidelines, see xref:CONTRIBUTING.md[CONTRIBUTING]. +For contribution guidelines, see xref:CONTRIBUTING.md[CONTRIBUTING]. [[questions]] == Questions? Problems? Suggestions? diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/indices/resolution/IndexNameExpressionResolverBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/indices/resolution/IndexNameExpressionResolverBenchmark.java new file mode 100644 index 0000000000000..13a222b1ed35e --- /dev/null +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/indices/resolution/IndexNameExpressionResolverBenchmark.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.benchmark.indices.resolution; + +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.indices.SystemIndices; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +@State(Scope.Benchmark) +@Fork(3) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@SuppressWarnings("unused") // invoked by benchmarking framework +public class IndexNameExpressionResolverBenchmark { + + private static final String DATA_STREAM_PREFIX = "my-ds-"; + private static final String INDEX_PREFIX = "my-index-"; + + @Param( + { + // # data streams | # indices + " 1000| 100", + " 5000| 500", + " 10000| 1000" } + ) + public String resourceMix = "100|10"; + + @Setup + public void setUp() { + final String[] params = resourceMix.split("\\|"); + + int numDataStreams = toInt(params[0]); + int numIndices = toInt(params[1]); + + Metadata.Builder mb = Metadata.builder(); + String[] indices = new String[numIndices + numDataStreams * (numIndices + 1)]; + int position = 0; + for (int i = 1; i <= numIndices; i++) { + String indexName = INDEX_PREFIX + i; + createIndexMetadata(indexName, mb); + indices[position++] = indexName; + } + + for (int i = 1; i <= numDataStreams; i++) { + String dataStreamName = DATA_STREAM_PREFIX + i; + List backingIndices = new ArrayList<>(); + for (int j = 1; j <= numIndices; j++) { + String backingIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, j); + backingIndices.add(createIndexMetadata(backingIndexName, mb).getIndex()); + indices[position++] = backingIndexName; + } + indices[position++] = dataStreamName; + mb.put(DataStream.builder(dataStreamName, backingIndices).build()); + } + int mid = indices.length / 2; + clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(mb).build(); + resolver = new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY), new SystemIndices(List.of())); + indexListRequest = new Request(IndicesOptions.lenientExpandOpenHidden(), indices); + starRequest = new Request(IndicesOptions.lenientExpandOpenHidden(), "*"); + String[] mixed = indices.clone(); + mixed[mid] = "my-*"; + mixedRequest = new Request(IndicesOptions.lenientExpandOpenHidden(), mixed); + } + + private IndexMetadata createIndexMetadata(String indexName, Metadata.Builder mb) { + IndexMetadata indexMetadata = IndexMetadata.builder(indexName) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + mb.put(indexMetadata, false); + return indexMetadata; + } + + private IndexNameExpressionResolver resolver; + private ClusterState clusterState; + private Request starRequest; + private Request indexListRequest; + private Request mixedRequest; + + @Benchmark + public String[] resolveResourcesListToConcreteIndices() { + return resolver.concreteIndexNames(clusterState, indexListRequest); + } + + @Benchmark + public String[] resolveAllStarToConcreteIndices() { + return resolver.concreteIndexNames(clusterState, starRequest); + } + + @Benchmark + public String[] resolveMixedConcreteIndices() { + return resolver.concreteIndexNames(clusterState, mixedRequest); + } + + private int toInt(String v) { + return Integer.parseInt(v.trim()); + } + + record Request(IndicesOptions indicesOptions, String... indices) implements IndicesRequest { + + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionModuleCheckTaskProvider.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionModuleCheckTaskProvider.java index bbf411dbf04fa..92a8db6b5b913 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionModuleCheckTaskProvider.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionModuleCheckTaskProvider.java @@ -48,10 +48,11 @@ public class InternalDistributionModuleCheckTaskProvider { /** ES jars in the lib directory that are not modularized. For now, es-log4j is the only one. */ private static final List ES_JAR_EXCLUDES = List.of("elasticsearch-log4j"); - /** List of the current Elasticsearch Java Modules, by name. */ + /** List of the current Elasticsearch Java Modules, alphabetically by name. */ private static final List EXPECTED_ES_SERVER_MODULES = List.of( "org.elasticsearch.base", "org.elasticsearch.cli", + "org.elasticsearch.entitlement", "org.elasticsearch.geo", "org.elasticsearch.grok", "org.elasticsearch.logging", diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java index 1e50da3895187..54bffd2a14b3d 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java @@ -42,6 +42,7 @@ public abstract class RunTask extends DefaultTestClustersTask { private Boolean debug = false; private Boolean cliDebug = false; + private Boolean entitlementsEnabled = false; private Boolean apmServerEnabled = false; private Boolean preserveData = false; @@ -69,6 +70,14 @@ public void setCliDebug(boolean enabled) { this.cliDebug = enabled; } + @Option( + option = "entitlements", + description = "Use the Entitlements agent system in place of SecurityManager to enforce sandbox policies." + ) + public void setEntitlementsEnabled(boolean enabled) { + this.entitlementsEnabled = enabled; + } + @Input public Boolean getDebug() { return debug; @@ -79,6 +88,11 @@ public Boolean getCliDebug() { return cliDebug; } + @Input + public Boolean getEntitlementsEnabled() { + return entitlementsEnabled; + } + @Input public Boolean getApmServerEnabled() { return apmServerEnabled; @@ -226,6 +240,9 @@ else if (node.getSettingKeys().contains("telemetry.metrics.enabled") == false) { if (cliDebug) { enableCliDebug(); } + if (entitlementsEnabled) { + enableEntitlements(); + } } @TaskAction diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java index 4fb89cc2e8674..f84aa2a0389c2 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java @@ -74,4 +74,12 @@ default void enableCliDebug() { } } } + + default void enableEntitlements() { + for (ElasticsearchCluster cluster : getClusters()) { + for (ElasticsearchNode node : cluster.getNodes()) { + node.cliJvmArgs("-Des.entitlements.enabled=true"); + } + } + } } diff --git a/build.gradle b/build.gradle index 2ef0511b2be88..a91347ca6e19b 100644 --- a/build.gradle +++ b/build.gradle @@ -13,14 +13,13 @@ import com.avast.gradle.dockercompose.tasks.ComposePull import com.fasterxml.jackson.databind.JsonNode import com.fasterxml.jackson.databind.ObjectMapper +import org.elasticsearch.gradle.DistributionDownloadPlugin import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.BaseInternalPluginBuildPlugin import org.elasticsearch.gradle.internal.ResolveAllDependencies import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.util.GradleUtils import org.gradle.plugins.ide.eclipse.model.AccessRule -import org.gradle.plugins.ide.eclipse.model.ProjectDependency -import org.elasticsearch.gradle.DistributionDownloadPlugin import java.nio.file.Files @@ -89,7 +88,7 @@ class ListExpansion { // Filters out intermediate patch releases to reduce the load of CI testing def filterIntermediatePatches = { List versions -> - versions.groupBy {"${it.major}.${it.minor}"}.values().collect {it.max()} + versions.groupBy { "${it.major}.${it.minor}" }.values().collect { it.max() } } tasks.register("updateCIBwcVersions") { @@ -101,7 +100,10 @@ tasks.register("updateCIBwcVersions") { } } - def writeBuildkitePipeline = { String outputFilePath, String pipelineTemplatePath, List listExpansions, List stepExpansions = [] -> + def writeBuildkitePipeline = { String outputFilePath, + String pipelineTemplatePath, + List listExpansions, + List stepExpansions = [] -> def outputFile = file(outputFilePath) def pipelineTemplate = file(pipelineTemplatePath) @@ -132,7 +134,12 @@ tasks.register("updateCIBwcVersions") { // Writes a Buildkite pipeline from a template, and replaces $BWC_STEPS with a list of steps, one for each version // Useful when you need to configure more versions than are allowed in a matrix configuration def expandBwcSteps = { String outputFilePath, String pipelineTemplatePath, String stepTemplatePath, List versions -> - writeBuildkitePipeline(outputFilePath, pipelineTemplatePath, [], [new StepExpansion(templatePath: stepTemplatePath, versions: versions, variable: "BWC_STEPS")]) + writeBuildkitePipeline( + outputFilePath, + pipelineTemplatePath, + [], + [new StepExpansion(templatePath: stepTemplatePath, versions: versions, variable: "BWC_STEPS")] + ) } doLast { @@ -150,7 +157,11 @@ tasks.register("updateCIBwcVersions") { new ListExpansion(versions: filterIntermediatePatches(BuildParams.bwcVersions.unreleasedIndexCompatible), variable: "BWC_LIST"), ], [ - new StepExpansion(templatePath: ".buildkite/pipelines/periodic.bwc.template.yml", versions: filterIntermediatePatches(BuildParams.bwcVersions.indexCompatible), variable: "BWC_STEPS"), + new StepExpansion( + templatePath: ".buildkite/pipelines/periodic.bwc.template.yml", + versions: filterIntermediatePatches(BuildParams.bwcVersions.indexCompatible), + variable: "BWC_STEPS" + ), ] ) @@ -302,7 +313,7 @@ allprojects { if (project.path.startsWith(":x-pack:")) { if (project.path.contains("security") || project.path.contains(":ml")) { tasks.register('checkPart4') { dependsOn 'check' } - } else if (project.path == ":x-pack:plugin" || project.path.contains("ql") || project.path.contains("smoke-test")) { + } else if (project.path == ":x-pack:plugin" || project.path.contains("ql") || project.path.contains("smoke-test")) { tasks.register('checkPart3') { dependsOn 'check' } } else if (project.path.contains("multi-node")) { tasks.register('checkPart5') { dependsOn 'check' } diff --git a/distribution/build.gradle b/distribution/build.gradle index f7b6f7bc1c7d0..5b865b36f9e4d 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -262,7 +262,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { * Properties to expand when copying packaging files * *****************************************************************************/ configurations { - ['libs', 'libsVersionChecker', 'libsCliLauncher', 'libsServerCli', 'libsWindowsServiceCli', 'libsPluginCli', 'libsKeystoreCli', 'libsSecurityCli', 'libsGeoIpCli', 'libsAnsiConsole', 'libsNative'].each { + ['libs', 'libsVersionChecker', 'libsCliLauncher', 'libsServerCli', 'libsWindowsServiceCli', 'libsPluginCli', 'libsKeystoreCli', 'libsSecurityCli', 'libsGeoIpCli', 'libsAnsiConsole', 'libsNative', 'libsEntitlementAgent', 'libsEntitlementBridge'].each { create(it) { canBeConsumed = false canBeResolved = true @@ -292,6 +292,8 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { libsSecurityCli project(':x-pack:plugin:security:cli') libsGeoIpCli project(':distribution:tools:geoip-cli') libsNative project(':libs:native:native-libraries') + libsEntitlementAgent project(':libs:entitlement:agent') + libsEntitlementBridge project(':libs:entitlement:bridge') } project.ext { @@ -336,6 +338,12 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { include (os + '-' + architecture + '/*') } } + into('entitlement-agent') { + from(configurations.libsEntitlementAgent) + } + into('entitlement-bridge') { + from(configurations.libsEntitlementBridge) + } } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java index 9832501073815..b17ad7c87e3ff 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java @@ -12,9 +12,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import java.util.stream.Stream; final class SystemJvmOptions { @@ -22,8 +24,8 @@ final class SystemJvmOptions { static List systemJvmOptions(Settings nodeSettings, final Map sysprops) { String distroType = sysprops.get("es.distribution.type"); boolean isHotspot = sysprops.getOrDefault("sun.management.compiler", "").contains("HotSpot"); - - return Stream.concat( + boolean useEntitlements = Boolean.parseBoolean(sysprops.getOrDefault("es.entitlements.enabled", "false")); + return Stream.of( Stream.of( /* * Cache ttl in seconds for positive DNS lookups noting that this overrides the JDK security property @@ -35,8 +37,6 @@ static List systemJvmOptions(Settings nodeSettings, final Map systemJvmOptions(Settings nodeSettings, final Map maybeSetReplayFile(String distroType, boolean isHotspot) { if (isHotspot == false) { // the replay file option is only guaranteed for hotspot vms - return ""; + return Stream.empty(); } String replayDir = "logs"; if ("rpm".equals(distroType) || "deb".equals(distroType)) { replayDir = "/var/log/elasticsearch"; } - return "-XX:ReplayDataFile=" + replayDir + "/replay_pid%p.log"; + return Stream.of("-XX:ReplayDataFile=" + replayDir + "/replay_pid%p.log"); } /* * node.processors determines thread pool sizes for Elasticsearch. When it * is set, we need to also tell the JVM to respect a different value */ - private static String maybeSetActiveProcessorCount(Settings nodeSettings) { + private static Stream maybeSetActiveProcessorCount(Settings nodeSettings) { if (EsExecutors.NODE_PROCESSORS_SETTING.exists(nodeSettings)) { int allocated = EsExecutors.allocatedProcessors(nodeSettings); - return "-XX:ActiveProcessorCount=" + allocated; + return Stream.of("-XX:ActiveProcessorCount=" + allocated); } - return ""; + return Stream.empty(); } - private static String maybeEnableNativeAccess() { + private static Stream maybeEnableNativeAccess() { if (Runtime.version().feature() >= 21) { - return "--enable-native-access=org.elasticsearch.nativeaccess,org.apache.lucene.core"; + return Stream.of("--enable-native-access=org.elasticsearch.nativeaccess,org.apache.lucene.core"); } - return ""; + return Stream.empty(); } /* @@ -134,4 +136,37 @@ private static Stream maybeWorkaroundG1Bug() { } return Stream.of(); } + + private static Stream maybeAllowSecurityManager() { + // Will become conditional on useEntitlements once entitlements can run without SM + return Stream.of("-Djava.security.manager=allow"); + } + + private static Stream maybeAttachEntitlementAgent(boolean useEntitlements) { + if (useEntitlements == false) { + return Stream.empty(); + } + + Path dir = Path.of("lib", "entitlement-bridge"); + if (Files.exists(dir) == false) { + throw new IllegalStateException("Directory for entitlement bridge jar does not exist: " + dir); + } + String bridgeJar; + try (var s = Files.list(dir)) { + var candidates = s.limit(2).toList(); + if (candidates.size() != 1) { + throw new IllegalStateException("Expected one jar in " + dir + "; found " + candidates.size()); + } + bridgeJar = candidates.get(0).toString(); + } catch (IOException e) { + throw new IllegalStateException("Failed to list entitlement jars in: " + dir, e); + } + return Stream.of( + "-Des.entitlements.enabled=true", + "-XX:+EnableDynamicAgentLoading", + "-Djdk.attach.allowAttachSelf=true", + "--patch-module=java.base=" + bridgeJar, + "--add-exports=java.base/org.elasticsearch.entitlement.bridge=org.elasticsearch.entitlement" + ); + } } diff --git a/docs/changelog/113713.yaml b/docs/changelog/113713.yaml new file mode 100644 index 0000000000000..c5478c95e464d --- /dev/null +++ b/docs/changelog/113713.yaml @@ -0,0 +1,5 @@ +pr: 113713 +summary: Adding inference endpoint validation for `AzureAiStudioService` +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/114207.yaml b/docs/changelog/114207.yaml new file mode 100644 index 0000000000000..2a0a8ae87452a --- /dev/null +++ b/docs/changelog/114207.yaml @@ -0,0 +1,15 @@ +pr: 114207 +summary: Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting +area: Allocation +type: breaking +issues: [] +breaking: + title: Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting + area: 'Cluster and node setting' + details: >- + Prior to 7.8, whenever a cluster had only a single data node, the watermarks would not be respected. + In order to change this in 7.8+ in a backwards compatible way, we introduced the + `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` + node setting. The setting was deprecated in 7.14 and was made to accept only true in 8.0 + impact: No known end user impact + notable: false diff --git a/docs/changelog/114227.yaml b/docs/changelog/114227.yaml new file mode 100644 index 0000000000000..9b508f07c9e5a --- /dev/null +++ b/docs/changelog/114227.yaml @@ -0,0 +1,6 @@ +pr: 114227 +summary: Ignore conflicting fields during dynamic mapping update +area: Mapping +type: bug +issues: + - 114228 diff --git a/docs/changelog/114484.yaml b/docs/changelog/114484.yaml new file mode 100644 index 0000000000000..48f54ad0218bb --- /dev/null +++ b/docs/changelog/114484.yaml @@ -0,0 +1,6 @@ +pr: 114484 +summary: Add `docvalue_fields` Support for `dense_vector` Fields +area: Search +type: enhancement +issues: + - 108470 diff --git a/docs/changelog/114879.yaml b/docs/changelog/114879.yaml new file mode 100644 index 0000000000000..b1546fd89d56c --- /dev/null +++ b/docs/changelog/114879.yaml @@ -0,0 +1,5 @@ +pr: 114879 +summary: Add refresh `.security` index call between security migrations +area: Security +type: enhancement +issues: [] diff --git a/docs/changelog/115655.yaml b/docs/changelog/115655.yaml new file mode 100644 index 0000000000000..7184405867657 --- /dev/null +++ b/docs/changelog/115655.yaml @@ -0,0 +1,5 @@ +pr: 115655 +summary: Better sizing `BytesRef` for Strings in Queries +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/115779.yaml b/docs/changelog/115779.yaml new file mode 100644 index 0000000000000..326751db7750b --- /dev/null +++ b/docs/changelog/115779.yaml @@ -0,0 +1,6 @@ +pr: 115779 +summary: Don't allow secure settings in YML config (109115) +area: Infra/Settings +type: bug +issues: + - 109115 diff --git a/docs/changelog/115792.yaml b/docs/changelog/115792.yaml new file mode 100644 index 0000000000000..2945a64e3043a --- /dev/null +++ b/docs/changelog/115792.yaml @@ -0,0 +1,5 @@ +pr: 115792 +summary: Add ES|QL `bit_length` function +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/115876.yaml b/docs/changelog/115876.yaml new file mode 100644 index 0000000000000..29b34b8b250fb --- /dev/null +++ b/docs/changelog/115876.yaml @@ -0,0 +1,5 @@ +pr: 115876 +summary: Inference duration and error metrics +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/116015.yaml b/docs/changelog/116015.yaml new file mode 100644 index 0000000000000..693fad639f2fa --- /dev/null +++ b/docs/changelog/116015.yaml @@ -0,0 +1,6 @@ +pr: 116015 +summary: Empty percentile results no longer throw no_such_element_exception in Anomaly Detection jobs +area: Machine Learning +type: bug +issues: + - 116013 diff --git a/docs/changelog/116021.yaml b/docs/changelog/116021.yaml new file mode 100644 index 0000000000000..58c84b26805b2 --- /dev/null +++ b/docs/changelog/116021.yaml @@ -0,0 +1,6 @@ +pr: 116021 +summary: Fields caps does not honour ignore_unavailable +area: Search +type: bug +issues: + - 107767 diff --git a/docs/changelog/116043.yaml b/docs/changelog/116043.yaml new file mode 100644 index 0000000000000..9f90257ecd7d8 --- /dev/null +++ b/docs/changelog/116043.yaml @@ -0,0 +1,6 @@ +pr: 116043 +summary: Support partial sort fields in TopN pushdown +area: ES|QL +type: enhancement +issues: + - 114515 diff --git a/docs/changelog/116082.yaml b/docs/changelog/116082.yaml new file mode 100644 index 0000000000000..35ca5fb1ea82e --- /dev/null +++ b/docs/changelog/116082.yaml @@ -0,0 +1,5 @@ +pr: 116082 +summary: Add support for bitwise inner-product in painless +area: Vector Search +type: enhancement +issues: [] diff --git a/docs/changelog/116112.yaml b/docs/changelog/116112.yaml new file mode 100644 index 0000000000000..9e15d691a77d3 --- /dev/null +++ b/docs/changelog/116112.yaml @@ -0,0 +1,13 @@ +pr: 116112 +summary: Remove ignored fallback option on GeoIP processor +area: Ingest Node +type: breaking +issues: [] +breaking: + title: Remove ignored fallback option on GeoIP processor + area: Ingest + details: >- + The option fallback_to_default_databases on the geoip ingest processor has been removed. + (It was deprecated and ignored since 8.0.0.) + impact: Customers should stop remove the noop fallback_to_default_databases option on any geoip ingest processors. + notable: false diff --git a/docs/changelog/116128.yaml b/docs/changelog/116128.yaml new file mode 100644 index 0000000000000..7c38c0529c50d --- /dev/null +++ b/docs/changelog/116128.yaml @@ -0,0 +1,5 @@ +pr: 116128 +summary: Add num docs and size to logsdb telemetry +area: Logs +type: enhancement +issues: [] diff --git a/docs/changelog/116174.yaml b/docs/changelog/116174.yaml new file mode 100644 index 0000000000000..e3403f2c1c7fc --- /dev/null +++ b/docs/changelog/116174.yaml @@ -0,0 +1,7 @@ +pr: 116174 +summary: Handle with `illegalArgumentExceptions` negative values in HDR percentile + aggregations +area: Aggregations +type: bug +issues: + - 115777 diff --git a/docs/changelog/116211.yaml b/docs/changelog/116211.yaml new file mode 100644 index 0000000000000..6f55b1b2fef34 --- /dev/null +++ b/docs/changelog/116211.yaml @@ -0,0 +1,5 @@ +pr: 116211 +summary: Use underlying `ByteBuf` `refCount` for `ReleasableBytesReference` +area: Network +type: bug +issues: [] diff --git a/docs/changelog/116212.yaml b/docs/changelog/116212.yaml new file mode 100644 index 0000000000000..7c8756f4054cd --- /dev/null +++ b/docs/changelog/116212.yaml @@ -0,0 +1,6 @@ +pr: 116212 +summary: Handle status code 0 in S3 CMU response +area: Snapshot/Restore +type: bug +issues: + - 102294 diff --git a/docs/changelog/116219.yaml b/docs/changelog/116219.yaml new file mode 100644 index 0000000000000..aeeea68570e77 --- /dev/null +++ b/docs/changelog/116219.yaml @@ -0,0 +1,6 @@ +pr: 116219 +summary: "[apm-data] Apply lazy rollover on index template creation" +area: Data streams +type: bug +issues: + - 116230 diff --git a/docs/changelog/116259.yaml b/docs/changelog/116259.yaml new file mode 100644 index 0000000000000..72357842c4d13 --- /dev/null +++ b/docs/changelog/116259.yaml @@ -0,0 +1,14 @@ +pr: 116259 +summary: Fix `_type` deprecation on simulate pipeline API +area: Ingest Node +type: deprecation +issues: [] +deprecation: + title: Document `_type` deprecated on simulate pipeline API + area: REST API + details: >- + Passing a document with a `_type` property is deprecated in the `/_ingest/pipeline/{id}/_simulate` and + `/_ingest/pipeline/_simulate` APIs. + impact: >- + Users should already have stopped using mapping types, which were deprecated in {es} 7. This deprecation warning + will fire if they specify mapping types on documents pass to the simulate pipeline API. diff --git a/docs/changelog/116266.yaml b/docs/changelog/116266.yaml new file mode 100644 index 0000000000000..1fcc0c310962d --- /dev/null +++ b/docs/changelog/116266.yaml @@ -0,0 +1,5 @@ +pr: 116266 +summary: Align dot prefix validation with Serverless +area: Indices APIs +type: bug +issues: [] diff --git a/docs/changelog/116274.yaml b/docs/changelog/116274.yaml new file mode 100644 index 0000000000000..9d506c7725afd --- /dev/null +++ b/docs/changelog/116274.yaml @@ -0,0 +1,5 @@ +pr: 116274 +summary: "[ES|QL] Verify aggregation filter's type is boolean to avoid `class_cast_exception`" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/116292.yaml b/docs/changelog/116292.yaml new file mode 100644 index 0000000000000..f741c67bea155 --- /dev/null +++ b/docs/changelog/116292.yaml @@ -0,0 +1,5 @@ +pr: 116292 +summary: Add missing header in `put_data_lifecycle` rest-api-spec +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/116346.yaml b/docs/changelog/116346.yaml new file mode 100644 index 0000000000000..1dcace88a98c0 --- /dev/null +++ b/docs/changelog/116346.yaml @@ -0,0 +1,5 @@ +pr: 116346 +summary: "[ESQL] Fix Binary Comparisons on Date Nanos" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/116437.yaml b/docs/changelog/116437.yaml new file mode 100644 index 0000000000000..94c2464db9980 --- /dev/null +++ b/docs/changelog/116437.yaml @@ -0,0 +1,5 @@ +pr: 116437 +summary: Ensure class resource stream is closed in `ResourceUtils` +area: Indices APIs +type: enhancement +issues: [] diff --git a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc index c4397748debfd..23a690b62372d 100644 --- a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc @@ -77,8 +77,8 @@ percentiles: `[ 1, 5, 25, 50, 75, 95, 99 ]`. The response will look like this: As you can see, the aggregation will return a calculated value for each percentile in the default range. If we assume response times are in milliseconds, it is -immediately obvious that the webpage normally loads in 10-725ms, but occasionally -spikes to 945-985ms. +immediately obvious that the webpage normally loads in 10-720ms, but occasionally +spikes to 940-980ms. Often, administrators are only interested in outliers -- the extreme percentiles. We can specify just the percents we are interested in (requested percentiles diff --git a/docs/reference/connector/docs/_connectors-create-client.asciidoc b/docs/reference/connector/docs/_connectors-create-client.asciidoc index 31e4468f7a6bc..917777a2ac786 100644 --- a/docs/reference/connector/docs/_connectors-create-client.asciidoc +++ b/docs/reference/connector/docs/_connectors-create-client.asciidoc @@ -3,7 +3,7 @@ To create a new {service-name} connector: -. Navigate to the *Search -> Connectors* page in the Kibana UI. +. In the Kibana UI, navigate to the *Search -> Content -> Connectors* page from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. . Follow the instructions to create a new *{service-name}* self-managed connector. [discrete#es-connectors-{service-name-stub}-client-create-use-the-api] diff --git a/docs/reference/connector/docs/_connectors-create-native.asciidoc b/docs/reference/connector/docs/_connectors-create-native.asciidoc index 1b7f5f22415fe..8023fbbe61136 100644 --- a/docs/reference/connector/docs/_connectors-create-native.asciidoc +++ b/docs/reference/connector/docs/_connectors-create-native.asciidoc @@ -3,7 +3,7 @@ To create a new {service-name} connector: -. Navigate to the *Search -> Connectors* page in the Kibana UI. +. In the Kibana UI, navigate to the *Search -> Content -> Connectors* page from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. . Follow the instructions to create a new native *{service-name}* connector. For additional operations, see <>. @@ -22,7 +22,7 @@ PUT _connector/my-{service-name-stub}-connector "index_name": "my-elasticsearch-index", "name": "Content synced from {service-name}", "service_type": "{service-name-stub}", - "is_native": "true" + "is_native": true } ---- // TEST[skip:can't test in isolation] diff --git a/docs/reference/connector/docs/connectors-hosted-tutorial-mongo.asciidoc b/docs/reference/connector/docs/connectors-hosted-tutorial-mongo.asciidoc index 12050ff17e279..6237a09129965 100644 --- a/docs/reference/connector/docs/connectors-hosted-tutorial-mongo.asciidoc +++ b/docs/reference/connector/docs/connectors-hosted-tutorial-mongo.asciidoc @@ -123,7 +123,7 @@ Once you're deployment is created, navigate to *Search*. The Elastic connector will sync your MongoDB data into a search-optimized Elasticsearch index. The first step is to create your index in the Kibana UI. -In the main menu navigate to *Search > Content > Indices*. +In the main menu, navigate to *Search > Content > Indices*, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Follow these steps to create your index: @@ -178,7 +178,7 @@ If all the configuration details are correct, the sync will begin and documents As soon as your first documents are synced, you can view the documents and inspect the mapping for the index: -* In Kibana, navigate to *Search* > *Content* > *Indices*. +* In Kibana, navigate to *Search* > *Content* > *Indices* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. * Select your index, for example `search-mongo-sample`. * Choose the *Documents* tab to view the synced documents. Expand a document to view its fields. diff --git a/docs/reference/connector/docs/connectors-managed-service.asciidoc b/docs/reference/connector/docs/connectors-managed-service.asciidoc index df76a5ce9093f..98144ed74bcfa 100644 --- a/docs/reference/connector/docs/connectors-managed-service.asciidoc +++ b/docs/reference/connector/docs/connectors-managed-service.asciidoc @@ -80,7 +80,7 @@ Create a new index to be managed by the connector. Continue from above, or navigate to the following location within the {kib} UI: -*Search > Content > Elasticsearch indices* +*Search > Content > Elasticsearch indices* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Choose the index to configure, and then choose the *Configuration* tab. diff --git a/docs/reference/connector/docs/connectors-ms-sql.asciidoc b/docs/reference/connector/docs/connectors-ms-sql.asciidoc index 5b6b74e86e621..47fb282b16877 100644 --- a/docs/reference/connector/docs/connectors-ms-sql.asciidoc +++ b/docs/reference/connector/docs/connectors-ms-sql.asciidoc @@ -45,7 +45,9 @@ include::_connectors-create-native.asciidoc[] To use this connector as a *managed connector*, use the *Connector* workflow. See <>. -Users require the `sysadmin` server role. +Users require the `sysadmin` SQL Server role. +Note that SQL Server Authentication is required. +Windows Authentication is not supported. For additional operations, see <>. @@ -75,10 +77,10 @@ Port:: The port where the Microsoft SQL Server is hosted. Default value is `1433`. Username:: -The username of the account for Microsoft SQL Server. +The username of the account for Microsoft SQL Server (SQL Server Authentication only). Password:: -The password of the account to be used for the Microsoft SQL Server. +The password of the account to be used for the Microsoft SQL Server (SQL Server Authentication only). Database:: Name of the Microsoft SQL Server database. @@ -310,6 +312,8 @@ include::_connectors-create-client.asciidoc[] ===== Usage Users require the `sysadmin` server role. +Note that SQL Server Authentication is required. +Windows Authentication is not supported. To use this connector as a *self-managed connector*, see <> For additional usage operations, see <>. @@ -350,10 +354,10 @@ Examples: The port where the Microsoft SQL Server is hosted. Default value is `9090`. `username`:: -The username of the account for Microsoft SQL Server. +The username of the account for Microsoft SQL Server. (SQL Server Authentication only) `password`:: -The password of the account to be used for the Microsoft SQL Server. +The password of the account to be used for the Microsoft SQL Server. (SQL Server Authentication only) `database`:: Name of the Microsoft SQL Server database. diff --git a/docs/reference/connector/docs/connectors-usage.asciidoc b/docs/reference/connector/docs/connectors-usage.asciidoc index 97fe7d92e945a..e48c503971e42 100644 --- a/docs/reference/connector/docs/connectors-usage.asciidoc +++ b/docs/reference/connector/docs/connectors-usage.asciidoc @@ -3,7 +3,7 @@ This document describes operations available to <> and <>, using the UI. -In the Kibana UI, go to *Search > Content > Connectors* to view a summary of all your connectors and sync jobs, and to create new connectors. +In the Kibana UI, navigate to *Search > Content > Connectors* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Here, you can view a summary of all your connectors and sync jobs, and to create new connectors. [TIP] ==== @@ -24,7 +24,7 @@ Once you've chosen the data source type you'd like to sync, you'll be prompted t View and manage all Elasticsearch indices managed by connectors. -In the {kib} UI, navigate to *Search > Content > Connectors* to view a list of connector indices and their attributes, including connector type health and ingestion status. +In the {kib} UI, navigate to *Search > Content > Connectors* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Here, you can view a list of connector indices and their attributes, including connector type health and ingestion status. Within this interface, you can choose to view the details for each existing index or delete an index. Or, you can <>. @@ -82,7 +82,7 @@ The workflow for these updates is as follows: After creating an index to be managed by a connector, you can configure automatic, recurring syncs. -In the {kib} UI, navigate to *Search > Content > Connectors*. +In the {kib} UI, navigate to *Search > Content > Connectors* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Choose the index to configure, and then choose the *Scheduling* tab. @@ -107,7 +107,7 @@ You may want to <> to see After creating the index to be managed by a connector, you can request a single sync at any time. -In the {kib} UI, navigate to *Search > Content > Elasticsearch indices*. +In the {kib} UI, navigate to *Search > Content > Elasticsearch indices* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Then choose the index to sync. @@ -128,7 +128,7 @@ This operation requires access to Kibana and the `write` {ref}/security-privileg After a sync has started, you can cancel the sync before it completes. -In the {kib} UI, navigate to *Search > Content > Elasticsearch indices*. +In the {kib} UI, navigate to *Search > Content > Elasticsearch indices* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Then choose the index with the running sync. @@ -144,7 +144,7 @@ This operation requires access to Kibana and the `write` {ref}/security-privileg View the index details to see a variety of information that communicate the status of the index and connector. -In the {kib} UI, navigate to *Search > Content > Elasticsearch indices*. +In the {kib} UI, navigate to *Search > Content > Elasticsearch indices* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Then choose the index to view. @@ -192,7 +192,7 @@ This operation requires access to Kibana and the `read` {ref}/security-privilege View the documents the connector has synced from the data. Additionally view the index mappings to determine the current document schema. -In the {kib} UI, navigate to *Search > Content > Elasticsearch indices*. +In the {kib} UI, navigate to *Search > Content > Elasticsearch indices* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Then choose the index to view. @@ -211,7 +211,7 @@ See <> for security details. Use <> to limit which documents are fetched from the data source, or limit which fetched documents are stored in Elastic. -In the {kib} UI, navigate to *Search > Content > Elasticsearch indices*. +In the {kib} UI, navigate to *Search > Content > Elasticsearch indices* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Then choose the index to manage and choose the *Sync rules* tab. @@ -220,6 +220,6 @@ Then choose the index to manage and choose the *Sync rules* tab. Use {ref}/ingest-pipeline-search.html[ingest pipelines] to transform fetched data before it is stored in Elastic. -In the {kib} UI, navigate to *Search > Content > Elasticsearch indices*. +In the {kib} UI, navigate to *Search > Content > Elasticsearch indices* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Then choose the index to manage and choose the *Pipelines* tab. diff --git a/docs/reference/connector/docs/dls-e2e-guide.asciidoc b/docs/reference/connector/docs/dls-e2e-guide.asciidoc index 7f07fddd575c9..3670ed0730bc7 100644 --- a/docs/reference/connector/docs/dls-e2e-guide.asciidoc +++ b/docs/reference/connector/docs/dls-e2e-guide.asciidoc @@ -54,7 +54,7 @@ To build our search experience for our SharePoint Online data, we need to create Follow these steps to create a Search Application in the Kibana UI: -. Navigate to *Search > Search Applications*. +. Navigate to *Search > Search Applications* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. . Select *Create*. . *Name* the Search Application. . Select the *index* used by the SharePoint Online connector. diff --git a/docs/reference/connector/docs/postgresql-connector-client-tutorial.asciidoc b/docs/reference/connector/docs/postgresql-connector-client-tutorial.asciidoc index cf8aac9c689ca..3a3ab242a47aa 100644 --- a/docs/reference/connector/docs/postgresql-connector-client-tutorial.asciidoc +++ b/docs/reference/connector/docs/postgresql-connector-client-tutorial.asciidoc @@ -70,7 +70,7 @@ To complete this tutorial, you'll need to complete the following steps: Elastic connectors enable you to create searchable, read-only replicas of your data sources in Elasticsearch. The first step in setting up your self-managed connector is to create an index. -In the {kibana-ref}[Kibana^] UI go to *Search > Content > Elasticsearch indices*. +In the {kibana-ref}[Kibana^] UI, navigate to *Search > Content > Elasticsearch indices* from the main menu, or use the {kibana-ref}/kibana-concepts-analysts.html#_finding_your_apps_and_objects[global search field]. Create a new connector index: diff --git a/docs/reference/esql/esql-kibana.asciidoc b/docs/reference/esql/esql-kibana.asciidoc index 9850e012fc049..85969e19957af 100644 --- a/docs/reference/esql/esql-kibana.asciidoc +++ b/docs/reference/esql/esql-kibana.asciidoc @@ -9,9 +9,9 @@ You can use {esql} in {kib} to query and aggregate your data, create visualizations, and set up alerts. This guide shows you how to use {esql} in Kibana. To follow along with the -queries, load the "Sample web logs" sample data set by clicking *Try sample -data* from the {kib} Home, selecting *Other sample data sets*, and clicking *Add -data* on the *Sample web logs* card. +queries, load the "Sample web logs" sample data set by selecting **Sample Data** +from the **Integrations** page in {kib}, selecting *Other sample data sets*, +and clicking *Add data* on the *Sample web logs* card. [discrete] [[esql-kibana-enable]] @@ -30,9 +30,7 @@ However, users will be able to access existing {esql} artifacts like saved searc // tag::esql-mode[] To get started with {esql} in Discover, open the main menu and select -*Discover*. Next, from the Data views menu, select *Language: ES|QL*. - -image::images/esql/esql-data-view-menu.png[align="center",width=33%] +*Discover*. Next, select *Try ES|QL* from the application menu bar. // end::esql-mode[] [discrete] @@ -54,8 +52,9 @@ A source command can be followed by one or more <>. In this query, the processing command is <>. `LIMIT` limits the number of rows that are retrieved. -TIP: Click the help icon (image:images/esql/esql-icon-help.svg[Static,20]) to open the -in-product reference documentation for all commands and functions. +TIP: Click the **ES|QL help** button to open the +in-product reference documentation for all commands and functions or to get +recommended queries that will help you get started. // tag::autocomplete[] To make it easier to write queries, auto-complete offers suggestions with @@ -76,7 +75,7 @@ FROM kibana_sample_data_logs | LIMIT 10 ==== [discrete] -==== Expand the query bar +==== Make your query readable For readability, you can put each processing command on a new line. The following query is identical to the previous one: @@ -87,15 +86,12 @@ FROM kibana_sample_data_logs | LIMIT 10 ---- -// tag::compact[] -To make it easier to write multi-line queries, click the double-headed arrow -button (image:images/esql/esql-icon-expand-query-bar.svg[]) to expand the query -bar: +You can do that using the **Add line breaks on pipes** button from the query editor's footer. -image::images/esql/esql-expanded-query-bar.png[align="center"] +image::https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/bltd5554518309e10f6/672d153cfeb8f9d479ebcc6e/esql-line-breakdown.gif[Automatic line breaks for ES|QL queries] -To return to a compact query bar, click the minimize editor button -(image:images/esql/esql-icon-minimize-query-bar.svg[]). +// tag::compact[] +You can adjust the editor's height by dragging its bottom border to your liking. // end::compact[] [discrete] @@ -110,9 +106,7 @@ detailed warning, expand the query bar, and click *warnings*. ==== Query history You can reuse your recent {esql} queries in the query bar. -In the query bar click *Show recent queries*: - -image::images/esql/esql-discover-show-recent-query.png[align="center",size="50%"] +In the query bar click *Show recent queries*. You can then scroll through your recent queries: @@ -220,8 +214,9 @@ FROM kibana_sample_data_logs === Analyze and visualize data Between the query bar and the results table, Discover shows a date histogram -visualization. If the indices you're querying do not contain a `@timestamp` -field, the histogram is not shown. +visualization. By default, if the indices you're querying do not contain a `@timestamp` +field, the histogram is not shown. But you can use a custom time field with the `?_tstart` +and `?_tend` parameters to enable it. The visualization adapts to the query. A query's nature determines the type of visualization. For example, this query aggregates the total number of bytes per @@ -250,7 +245,7 @@ save button (image:images/esql/esql-icon-save-visualization.svg[]). Once saved to a dashboard, you'll be taken to the Dashboards page. You can continue to make changes to the visualization. Click the options button in the top-right (image:images/esql/esql-icon-options.svg[]) and -select *Edit ESQL visualization* to open the in-line editor: +select *Edit ES|QL visualization* to open the in-line editor: image::images/esql/esql-kibana-edit-on-dashboard.png[align="center",width=66%] diff --git a/docs/reference/esql/functions/description/bit_length.asciidoc b/docs/reference/esql/functions/description/bit_length.asciidoc new file mode 100644 index 0000000000000..1aad47488802d --- /dev/null +++ b/docs/reference/esql/functions/description/bit_length.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the bit length of a string. diff --git a/docs/reference/esql/functions/examples/bit_length.asciidoc b/docs/reference/esql/functions/examples/bit_length.asciidoc new file mode 100644 index 0000000000000..a99f6f664e79e --- /dev/null +++ b/docs/reference/esql/functions/examples/bit_length.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=bitLength] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=bitLength-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/docs/bit_length.md b/docs/reference/esql/functions/kibana/docs/bit_length.md new file mode 100644 index 0000000000000..22280febd7876 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/bit_length.md @@ -0,0 +1,12 @@ + + +### BIT_LENGTH +Returns the bit length of a string. + +``` +FROM employees +| KEEP first_name, last_name +| EVAL fn_bit_length = BIT_LENGTH(first_name) +``` diff --git a/docs/reference/esql/functions/layout/bit_length.asciidoc b/docs/reference/esql/functions/layout/bit_length.asciidoc new file mode 100644 index 0000000000000..00a7206f3ceda --- /dev/null +++ b/docs/reference/esql/functions/layout/bit_length.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-bit_length]] +=== `BIT_LENGTH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/bit_length.svg[Embedded,opts=inline] + +include::../parameters/bit_length.asciidoc[] +include::../description/bit_length.asciidoc[] +include::../types/bit_length.asciidoc[] +include::../examples/bit_length.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/bit_length.asciidoc b/docs/reference/esql/functions/parameters/bit_length.asciidoc new file mode 100644 index 0000000000000..7bb8c080ce4a1 --- /dev/null +++ b/docs/reference/esql/functions/parameters/bit_length.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`string`:: +String expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/signature/bit_length.svg b/docs/reference/esql/functions/signature/bit_length.svg new file mode 100644 index 0000000000000..904dbbe25c9c2 --- /dev/null +++ b/docs/reference/esql/functions/signature/bit_length.svg @@ -0,0 +1 @@ +BIT_LENGTH(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index f5222330d579d..422860f0a7a1d 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -8,6 +8,7 @@ {esql} supports these string functions: // tag::string_list[] +* <> * <> * <> * <> @@ -30,6 +31,7 @@ * <> // end::string_list[] +include::layout/bit_length.asciidoc[] include::layout/concat.asciidoc[] include::layout/ends_with.asciidoc[] include::layout/from_base64.asciidoc[] diff --git a/docs/reference/esql/functions/types/bit_length.asciidoc b/docs/reference/esql/functions/types/bit_length.asciidoc new file mode 100644 index 0000000000000..db5a48c7c4390 --- /dev/null +++ b/docs/reference/esql/functions/types/bit_length.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +string | result +keyword | integer +text | integer +|=== diff --git a/docs/reference/images/esql/esql-dashboard-panel.png b/docs/reference/images/esql/esql-dashboard-panel.png index d621d1170edcf..61b44f7c9f857 100644 Binary files a/docs/reference/images/esql/esql-dashboard-panel.png and b/docs/reference/images/esql/esql-dashboard-panel.png differ diff --git a/docs/reference/images/esql/esql-discover-query-history.png b/docs/reference/images/esql/esql-discover-query-history.png index da31e4a6acce4..ff1d2ffa8b280 100644 Binary files a/docs/reference/images/esql/esql-discover-query-history.png and b/docs/reference/images/esql/esql-discover-query-history.png differ diff --git a/docs/reference/images/esql/esql-discover-show-recent-query.png b/docs/reference/images/esql/esql-discover-show-recent-query.png deleted file mode 100644 index 13c8df9965ea3..0000000000000 Binary files a/docs/reference/images/esql/esql-discover-show-recent-query.png and /dev/null differ diff --git a/docs/reference/images/esql/esql-kibana-auto-complete.png b/docs/reference/images/esql/esql-kibana-auto-complete.png index d50d6b133442f..155df2447dd6c 100644 Binary files a/docs/reference/images/esql/esql-kibana-auto-complete.png and b/docs/reference/images/esql/esql-kibana-auto-complete.png differ diff --git a/docs/reference/images/esql/esql-kibana-bar-chart.png b/docs/reference/images/esql/esql-kibana-bar-chart.png index a760d3d69920e..b74b33710d908 100644 Binary files a/docs/reference/images/esql/esql-kibana-bar-chart.png and b/docs/reference/images/esql/esql-kibana-bar-chart.png differ diff --git a/docs/reference/images/esql/esql-kibana-create-rule.png b/docs/reference/images/esql/esql-kibana-create-rule.png index c9fb14b0d2ee9..a763c6f366df0 100644 Binary files a/docs/reference/images/esql/esql-kibana-create-rule.png and b/docs/reference/images/esql/esql-kibana-create-rule.png differ diff --git a/docs/reference/images/esql/esql-kibana-edit-on-dashboard.png b/docs/reference/images/esql/esql-kibana-edit-on-dashboard.png index 14f6be81af7df..348b77150ed87 100644 Binary files a/docs/reference/images/esql/esql-kibana-edit-on-dashboard.png and b/docs/reference/images/esql/esql-kibana-edit-on-dashboard.png differ diff --git a/docs/reference/images/esql/esql-kibana-enrich-autocomplete.png b/docs/reference/images/esql/esql-kibana-enrich-autocomplete.png index 95a997ca2ac30..f2a0779be348a 100644 Binary files a/docs/reference/images/esql/esql-kibana-enrich-autocomplete.png and b/docs/reference/images/esql/esql-kibana-enrich-autocomplete.png differ diff --git a/docs/reference/images/esql/esql-kibana-in-line-editor.png b/docs/reference/images/esql/esql-kibana-in-line-editor.png index 7b7a11e532226..85631896e833f 100644 Binary files a/docs/reference/images/esql/esql-kibana-in-line-editor.png and b/docs/reference/images/esql/esql-kibana-in-line-editor.png differ diff --git a/docs/reference/ingest/apis/simulate-ingest.asciidoc b/docs/reference/ingest/apis/simulate-ingest.asciidoc index da591eed7546f..52ed09b1d32c2 100644 --- a/docs/reference/ingest/apis/simulate-ingest.asciidoc +++ b/docs/reference/ingest/apis/simulate-ingest.asciidoc @@ -265,8 +265,8 @@ Definition of a mapping that will be merged into the index's mapping for validat [[simulate-ingest-api-pre-existing-pipelines-ex]] ===== Use pre-existing pipeline definitions -In this example the index `index` has a default pipeline called `my-pipeline` and a final -pipeline called `my-final-pipeline`. Since both documents are being ingested into `index`, +In this example the index `my-index` has a default pipeline called `my-pipeline` and a final +pipeline called `my-final-pipeline`. Since both documents are being ingested into `my-index`, both pipelines are executed using the pipeline definitions that are already in the system. [source,console] diff --git a/docs/reference/modules/cluster/remote-clusters-migration.asciidoc b/docs/reference/modules/cluster/remote-clusters-migration.asciidoc index e205d7cb141fe..e84304ce9ef94 100644 --- a/docs/reference/modules/cluster/remote-clusters-migration.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-migration.asciidoc @@ -66,10 +66,11 @@ indices that were created from the auto-follow pattern. On the local cluster: . Enhance any roles used by local cluster users with the required -<> for {ccr} and {ccs}. +<> or +<> for {ccr} and {ccs}. Refer to <>. Note: -** You only need to assign additional `remote_indices` privileges to existing +** You only need to assign additional `remote_indices` or `remote_cluster` privileges to existing roles used for cross-cluster operations. You should be able to copy these privileges from the original roles on the remote cluster, where they are defined under the certification based security model. @@ -197,7 +198,7 @@ authentication. Resume any persistent tasks that you stopped earlier. Tasks should be restarted by the same user or API key that created the task before the migration. Ensure the roles of this user or API key have been updated with the required -`remote_indices` privileges. For users, tasks capture the caller's credentials +`remote_indices` or `remote_cluster` privileges. For users, tasks capture the caller's credentials when started and run in that user's security context. For API keys, restarting a task will update the task with the updated API key. @@ -246,7 +247,7 @@ If you need to roll back, follow these steps on the local cluster: . Remove the remote cluster definition by setting the remote cluster settings to `null`. -. Remove the `remote_indices` privileges from any roles that were updated during +. Remove the `remote_indices` or 'remote_cluster' privileges from any roles that were updated during the migration. . On each node, remove the `remote_cluster_client.ssl.*` settings from diff --git a/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc b/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc index df3c54794dc06..e21f93d81afc7 100644 --- a/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc @@ -399,7 +399,7 @@ This does not show up in any logs. ====== Resolution -. Check that the local user has the necessary `remote_indices` privileges. Grant sufficient `remote_indices` privileges if necessary. +. Check that the local user has the necessary `remote_indices` or `remote_cluster` privileges. Grant sufficient `remote_indices` or `remote_cluster` privileges if necessary. . If permission is not an issue locally, ask the remote cluster administrator to create and distribute a <>. Replace the diff --git a/docs/reference/modules/indices/circuit_breaker.asciidoc b/docs/reference/modules/indices/circuit_breaker.asciidoc index 452d4e99704ce..13d81821c4f33 100644 --- a/docs/reference/modules/indices/circuit_breaker.asciidoc +++ b/docs/reference/modules/indices/circuit_breaker.asciidoc @@ -2,7 +2,16 @@ === Circuit breaker settings [[circuit-breaker-description]] // tag::circuit-breaker-description-tag[] -{es} contains multiple circuit breakers used to prevent operations from causing an OutOfMemoryError. Each breaker specifies a limit for how much memory it can use. Additionally, there is a parent-level breaker that specifies the total amount of memory that can be used across all breakers. +{es} contains multiple circuit breakers used to prevent operations from using an excessive amount of memory. Each breaker tracks the memory +used by certain operations and specifies a limit for how much memory it may track. Additionally, there +is a parent-level breaker that specifies the total amount of memory that may be tracked across all breakers. + +When a circuit breaker reaches its limit, {es} will reject further operations. See <> for information about errors +raised by circuit breakers. + +Circuit breakers do not track all memory usage in {es} and therefore provide only incomplete protection against excessive memory usage. If +{es} uses too much memory then it may suffer from performance issues and nodes may even fail with an `OutOfMemoryError`. See +<> for help with troubleshooting high heap usage. Except where noted otherwise, these settings can be dynamically updated on a live cluster with the <> API. diff --git a/docs/reference/query-dsl/rule-query.asciidoc b/docs/reference/query-dsl/rule-query.asciidoc index dfedc2261bbde..43e79f656a55a 100644 --- a/docs/reference/query-dsl/rule-query.asciidoc +++ b/docs/reference/query-dsl/rule-query.asciidoc @@ -12,6 +12,12 @@ The old syntax using `rule_query` and `ruleset_id` is deprecated and will be removed in a future release, so it is strongly advised to migrate existing rule queries to the new API structure. ==== +[TIP] +==== +The rule query is not supported for use alongside reranking. +If you want to use query rules in conjunction with reranking, use the <> instead. +==== + Applies <> to the query before returning results. Query rules can be used to promote documents in the manner of a <> based on matching defined rules, or to identify specific documents to exclude from a contextual result set. If no matching query rules are defined, the "organic" matches for the query are returned. diff --git a/docs/reference/rest-api/security/bulk-create-roles.asciidoc b/docs/reference/rest-api/security/bulk-create-roles.asciidoc index e4b6ef7f765c2..a1fe998c08146 100644 --- a/docs/reference/rest-api/security/bulk-create-roles.asciidoc +++ b/docs/reference/rest-api/security/bulk-create-roles.asciidoc @@ -75,7 +75,7 @@ that begin with `_` are reserved for system usage. For more information, see <>. -`remote_indices`:: beta:[] (list) A list of remote indices permissions entries. +`remote_indices`:: (list) A list of remote indices permissions entries. + -- NOTE: Remote indices are effective for <>. @@ -94,6 +94,18 @@ have on the specified indices. read access to. A document within the specified indices must match this query in order for it to be accessible by the owners of the role. +`remote_cluster`:: (list) A list of remote cluster permissions entries. ++ +-- +NOTE: Remote cluster permissions are effective for <>. +They have no effect for remote clusters configured with the <>. +-- +`clusters` (required)::: (list) A list of cluster aliases to which the permissions +in this entry apply. +`privileges`(required)::: (list) The cluster level privileges that the owners of the role +have in the specified clusters. + + For more information, see <>. ==== diff --git a/docs/reference/rest-api/security/create-roles.asciidoc b/docs/reference/rest-api/security/create-roles.asciidoc index 75f1d7c799187..a1ab892330e67 100644 --- a/docs/reference/rest-api/security/create-roles.asciidoc +++ b/docs/reference/rest-api/security/create-roles.asciidoc @@ -96,6 +96,17 @@ have on the specified indices. read access to. A document within the specified indices must match this query in order for it to be accessible by the owners of the role. +`remote_cluster`:: (list) A list of remote cluster permissions entries. ++ +-- +NOTE: Remote cluster permissions are effective for <>. +They have no effect for remote clusters configured with the <>. +-- +`clusters` (required)::: (list) A list of cluster aliases to which the permissions +in this entry apply. +`privileges`(required)::: (list) The cluster level privileges that the owners of the role +have in the specified clusters. + For more information, see <>. [[security-api-put-role-example]] diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 9306d83c79136..0da75ac30d2dd 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -1,14 +1,12 @@ [[retriever]] === Retriever -A retriever is a specification to describe top documents returned from a -search. A retriever replaces other elements of the <> +A retriever is a specification to describe top documents returned from a search. +A retriever replaces other elements of the <> that also return top documents such as <> and -<>. A retriever may have child retrievers where a -retriever with two or more children is considered a compound retriever. This -allows for complex behavior to be depicted in a tree-like structure, called -the retriever tree, to better clarify the order of operations that occur -during a search. +<>. +A retriever may have child retrievers where a retriever with two or more children is considered a compound retriever. +This allows for complex behavior to be depicted in a tree-like structure, called the retriever tree, which clarifies the order of operations that occur during a search. [TIP] ==== @@ -29,6 +27,9 @@ A <> that produces top documents from <> that enhances search results by re-ranking documents based on semantic similarity to a specified inference text, using a machine learning model. +`rule`:: +A <> that applies contextual <> to pin or exclude documents for specific queries. + [[standard-retriever]] ==== Standard Retriever @@ -44,8 +45,7 @@ Defines a query to retrieve a set of top documents. `filter`:: (Optional, <>) + -Applies a <> to this retriever -where all documents must match this query but do not contribute to the score. +Applies a <> to this retriever, where all documents must match this query but do not contribute to the score. `search_after`:: (Optional, <>) @@ -56,14 +56,13 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] `sort`:: + -(Optional, <>) -A sort object that that specifies the order of matching documents. +(Optional, <>) A sort object that specifies the order of matching documents. `min_score`:: (Optional, `float`) + -Minimum <> for matching documents. Documents with a -lower `_score` are not included in the top documents. +Minimum <> for matching documents. +Documents with a lower `_score` are not included in the top documents. `collapse`:: (Optional, <>) @@ -72,8 +71,7 @@ Collapses the top documents by a specified key into a single top document per ke ===== Restrictions -When a retriever tree contains a compound retriever (a retriever with two or more child -retrievers) the <> parameter is not supported. +When a retriever tree contains a compound retriever (a retriever with two or more child retrievers) the <> parameter is not supported. [discrete] [[standard-retriever-example]] @@ -105,12 +103,39 @@ POST /restaurants/_bulk?refresh {"region": "Austria", "year": "2020", "vector": [10, 22, 79]} {"index":{}} {"region": "France", "year": "2020", "vector": [10, 22, 80]} + +PUT /movies + +PUT _query_rules/my-ruleset +{ + "rules": [ + { + "rule_id": "my-rule1", + "type": "pinned", + "criteria": [ + { + "type": "exact", + "metadata": "query_string", + "values": [ "pugs" ] + } + ], + "actions": { + "ids": [ + "id1" + ] + } + } + ] +} + ---- // TESTSETUP [source,console] -------------------------------------------------- DELETE /restaurants + +DELETE /movies -------------------------------------------------- // TEARDOWN //// @@ -143,11 +168,13 @@ GET /restaurants/_search } } ---- + <1> Opens the `retriever` object. <2> The `standard` retriever is used for defining traditional {es} queries. <3> The entry point for defining the search query. <4> The `bool` object allows for combining multiple query clauses logically. -<5> The `should` array indicates conditions under which a document will match. Documents matching these conditions will increase their relevancy score. +<5> The `should` array indicates conditions under which a document will match. +Documents matching these conditions will have increased relevancy scores. <6> The `match` object finds documents where the `region` field contains the word "Austria." <7> The `filter` array provides filtering conditions that must be met but do not contribute to the relevancy score. <8> The `term` object is used for exact matches, in this case, filtering documents by the `year` field. @@ -178,8 +205,8 @@ Defines a <> to build a query vector. `k`:: (Required, integer) + -Number of nearest neighbors to return as top hits. This value must be fewer than -or equal to `num_candidates`. +Number of nearest neighbors to return as top hits. +This value must be fewer than or equal to `num_candidates`. `num_candidates`:: (Required, integer) @@ -222,16 +249,15 @@ GET /restaurants/_search <1> Configuration for k-nearest neighbor (knn) search, which is based on vector similarity. <2> Specifies the field name that contains the vectors. <3> The query vector against which document vectors are compared in the `knn` search. -<4> The number of nearest neighbors to return as top hits. This value must be fewer than or equal to `num_candidates`. +<4> The number of nearest neighbors to return as top hits. +This value must be fewer than or equal to `num_candidates`. <5> The size of the initial candidate set from which the final `k` nearest neighbors are selected. [[rrf-retriever]] ==== RRF Retriever -An <> retriever returns top documents based on the RRF formula, -equally weighting two or more child retrievers. -Reciprocal rank fusion (RRF) is a method for combining multiple result -sets with different relevance indicators into a single result set. +An <> retriever returns top documents based on the RRF formula, equally weighting two or more child retrievers. +Reciprocal rank fusion (RRF) is a method for combining multiple result sets with different relevance indicators into a single result set. ===== Parameters @@ -357,7 +383,8 @@ Refer to <> for a high level overview of semantic re-ranking ===== Prerequisites To use `text_similarity_reranker` you must first set up a `rerank` task using the <>. -The `rerank` task should be set up with a machine learning model that can compute text similarity. Refer to {ml-docs}/ml-nlp-model-ref.html#ml-nlp-model-ref-text-similarity[the Elastic NLP model reference] for a list of third-party text similarity models supported by {es}. +The `rerank` task should be set up with a machine learning model that can compute text similarity. +Refer to {ml-docs}/ml-nlp-model-ref.html#ml-nlp-model-ref-text-similarity[the Elastic NLP model reference] for a list of third-party text similarity models supported by {es}. Currently you can: @@ -368,6 +395,7 @@ Currently you can: ** Refer to the <> on this page for a step-by-step guide. ===== Parameters + `retriever`:: (Required, <>) + @@ -376,7 +404,8 @@ The child retriever that generates the initial set of top documents to be re-ran `field`:: (Required, `string`) + -The document field to be used for text similarity comparisons. This field should contain the text that will be evaluated against the `inferenceText`. +The document field to be used for text similarity comparisons. +This field should contain the text that will be evaluated against the `inferenceText`. `inference_id`:: (Required, `string`) @@ -391,25 +420,28 @@ The text snippet used as the basis for similarity comparison. `rank_window_size`:: (Optional, `int`) + -The number of top documents to consider in the re-ranking process. Defaults to `10`. +The number of top documents to consider in the re-ranking process. +Defaults to `10`. `min_score`:: (Optional, `float`) + -Sets a minimum threshold score for including documents in the re-ranked results. Documents with similarity scores below this threshold will be excluded. Note that score calculations vary depending on the model used. +Sets a minimum threshold score for including documents in the re-ranked results. +Documents with similarity scores below this threshold will be excluded. +Note that score calculations vary depending on the model used. `filter`:: (Optional, <>) + Applies the specified <> to the child <>. -If the child retriever already specifies any filters, then this top-level filter is applied in conjuction -with the filter defined in the child retriever. +If the child retriever already specifies any filters, then this top-level filter is applied in conjuction with the filter defined in the child retriever. [discrete] [[text-similarity-reranker-retriever-example-cohere]] ==== Example: Cohere Rerank -This example enables out-of-the-box semantic search by re-ranking top documents using the Cohere Rerank API. This approach eliminate the need to generate and store embeddings for all indexed documents. +This example enables out-of-the-box semantic search by re-ranking top documents using the Cohere Rerank API. +This approach eliminates the need to generate and store embeddings for all indexed documents. This requires a <> using the `rerank` task type. [source,console] @@ -459,7 +491,9 @@ Follow these steps to load the model and create a semantic re-ranker. python -m pip install eland[pytorch] ---- + -. Upload the model to {es} using Eland. This example assumes you have an Elastic Cloud deployment and an API key. Refer to the https://www.elastic.co/guide/en/elasticsearch/client/eland/current/machine-learning.html#ml-nlp-pytorch-auth[Eland documentation] for more authentication options. +. Upload the model to {es} using Eland. +This example assumes you have an Elastic Cloud deployment and an API key. +Refer to the https://www.elastic.co/guide/en/elasticsearch/client/eland/current/machine-learning.html#ml-nlp-pytorch-auth[Eland documentation] for more authentication options. + [source,sh] ---- @@ -517,14 +551,142 @@ POST movies/_search This retriever uses a standard `match` query to search the `movie` index for films tagged with the genre "drama". It then re-ranks the results based on semantic similarity to the text in the `inference_text` parameter, using the model we uploaded to {es}. +[[rule-retriever]] +==== Query Rules Retriever + +The `rule` retriever enables fine-grained control over search results by applying contextual <> to pin or exclude documents for specific queries. +This retriever has similar functionality to the <>, but works out of the box with other retrievers. + +===== Prerequisites + +To use the `rule` retriever you must first create one or more query rulesets using the <>. + +[discrete] +[[rule-retriever-parameters]] +===== Parameters + +`retriever`:: +(Required, <>) ++ +The child retriever that returns the results to apply query rules on top of. +This can be a standalone retriever such as the <> or <> retriever, or it can be a compound retriever. + +`ruleset_ids`:: +(Required, `array`) ++ +An array of one or more unique <> IDs with query-based rules to match and apply as applicable. +Rulesets and their associated rules are evaluated in the order in which they are specified in the query and ruleset. +The maximum number of rulesets to specify is 10. + +`match_criteria`:: +(Required, `object`) ++ +Defines the match criteria to apply to rules in the given query ruleset(s). +Match criteria should match the keys defined in the `criteria.metadata` field of the rule. + +`rank_window_size`:: +(Optional, `int`) ++ +The number of top documents to return from the `rule` retriever. +Defaults to `10`. + +[discrete] +[[rule-retriever-example]] +==== Example: Rule retriever + +This example shows the rule retriever executed without any additional retrievers. +It runs the query defined by the `retriever` and applies the rules from `my-ruleset` on top of the returned results. + +[source,console] +---- +GET movies/_search +{ + "retriever": { + "rule": { + "match_criteria": { + "query_string": "harry potter" + }, + "ruleset_ids": [ + "my-ruleset" + ], + "retriever": { + "standard": { + "query": { + "query_string": { + "query": "harry potter" + } + } + } + } + } + } +} +---- + +[discrete] +[[rule-retriever-example-rrf]] +==== Example: Rule retriever combined with RRF + +This example shows how to combine the `rule` retriever with other rerank retrievers such as <> or <>. + +[WARNING] +==== +The `rule` retriever will apply rules to any documents returned from its defined `retriever` or any of its sub-retrievers. +This means that for the best results, the `rule` retriever should be the outermost defined retriever. +Nesting a `rule` retriever as a sub-retriever under a reranker such as `rrf` or `text_similarity_reranker` may not produce the expected results. +==== + +[source,console] +---- +GET movies/_search +{ + "retriever": { + "rule": { <1> + "match_criteria": { + "query_string": "harry potter" + }, + "ruleset_ids": [ + "my-ruleset" + ], + "retriever": { + "rrf": { <2> + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "sorcerer's stone" + } + } + } + }, + { + "standard": { + "query": { + "query_string": { + "query": "chamber of secrets" + } + } + } + } + ] + } + } + } + } +} +---- + +<1> The `rule` retriever is the outermost retriever, applying rules to the search results that were previously reranked using the `rrf` retriever. +<2> The `rrf` retriever returns results from all of its sub-retrievers, and the output of the `rrf` retriever is used as input to the `rule` retriever. + ==== Using `from` and `size` with a retriever tree The <> and <> parameters are provided globally as part of the general -<>. They are applied to all retrievers in a -retriever tree unless a specific retriever overrides the `size` parameter -using a different parameter such as `rank_window_size`. Though, the final -search hits are always limited to `size`. +<>. +They are applied to all retrievers in a retriever tree, unless a specific retriever overrides the `size` parameter using a different parameter such as `rank_window_size`. +Though, the final search hits are always limited to `size`. ==== Using aggregations with a retriever tree @@ -534,8 +696,8 @@ clauses in a <>. ==== Restrictions on search parameters when specifying a retriever -When a retriever is specified as part of a search the following elements are not allowed -at the top-level and instead are only allowed as elements of specific retrievers: +When a retriever is specified as part of a search, the following elements are not allowed at the top-level. +Instead they are only allowed as elements of specific retrievers: * <> * <> @@ -543,3 +705,4 @@ at the top-level and instead are only allowed as elements of specific retrievers * <> * <> * <> + diff --git a/docs/reference/search/search-your-data/retrievers-overview.asciidoc b/docs/reference/search/search-your-data/retrievers-overview.asciidoc index fc1c6f1a47299..377718e442aa0 100644 --- a/docs/reference/search/search-your-data/retrievers-overview.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-overview.asciidoc @@ -16,22 +16,21 @@ For implementation details, including notable restrictions, check out the Retrievers come in various types, each tailored for different search operations. The following retrievers are currently available: -* <>. Returns top documents from a -traditional https://www.elastic.co/guide/en/elasticsearch/reference/master/query-dsl.html[query]. -Mimics a traditional query but in the context of a retriever framework. This -ensures backward compatibility as existing `_search` requests remain supported. -That way you can transition to the new abstraction at your own pace without -mixing syntaxes. -* <>. Returns top documents from a <>, -in the context of a retriever framework. -* <>. Combines and ranks multiple first-stage retrievers using -the reciprocal rank fusion (RRF) algorithm. Allows you to combine multiple result sets -with different relevance indicators into a single result set. -An RRF retriever is a *compound retriever*, where its `filter` element is -propagated to its sub retrievers. -+ - -* <>. Used for <>. +* <>. +Returns top documents from a traditional https://www.elastic.co/guide/en/elasticsearch/reference/master/query-dsl.html[query]. +Mimics a traditional query but in the context of a retriever framework. +This ensures backward compatibility as existing `_search` requests remain supported. +That way you can transition to the new abstraction at your own pace without mixing syntaxes. +* <>. +Returns top documents from a <>, in the context of a retriever framework. +* <>. +Combines and ranks multiple first-stage retrievers using the reciprocal rank fusion (RRF) algorithm. +Allows you to combine multiple result sets with different relevance indicators into a single result set. +An RRF retriever is a *compound retriever*, where its `filter` element is propagated to its sub retrievers. +* <>. +Applies <> to the query before returning results. +* <>. +Used for <>. Requires first creating a `rerank` task using the <>. [discrete] @@ -198,15 +197,18 @@ include::retrievers_examples.asciidoc[tag=rrf-retriever-with-aggs] Here are some important terms: -* *Retrieval Pipeline*. Defines the entire retrieval and ranking logic to -produce top hits. -* *Retriever Tree*. A hierarchical structure that defines how retrievers interact. -* *First-stage Retriever*. Returns an initial set of candidate documents. -* *Compound Retriever*. Builds on one or more retrievers, -enhancing document retrieval and ranking logic. -* *Combiners*. Compound retrievers that merge top hits -from multiple sub-retrievers. -* *Rerankers*. Special compound retrievers that reorder hits and may adjust the number of hits, with distinctions between first-stage and second-stage rerankers. +* *Retrieval Pipeline*. +Defines the entire retrieval and ranking logic to produce top hits. +* *Retriever Tree*. +A hierarchical structure that defines how retrievers interact. +* *First-stage Retriever*. +Returns an initial set of candidate documents. +* *Compound Retriever*. +Builds on one or more retrievers, enhancing document retrieval and ranking logic. +* *Combiners*. +Compound retrievers that merge top hits from multiple sub-retrievers. +* *Rerankers*. +Special compound retrievers that reorder hits and may adjust the number of hits, with distinctions between first-stage and second-stage rerankers. [discrete] [[retrievers-overview-play-in-search]] diff --git a/docs/reference/search/search-your-data/search-using-query-rules.asciidoc b/docs/reference/search/search-your-data/search-using-query-rules.asciidoc index 18be825d02376..7d9d14684beee 100644 --- a/docs/reference/search/search-your-data/search-using-query-rules.asciidoc +++ b/docs/reference/search/search-your-data/search-using-query-rules.asciidoc @@ -10,7 +10,7 @@ _Query rules_ allow customization of search results for queries that match speci This allows for more control over results, for example ensuring that promoted documents that match defined criteria are returned at the top of the result list. Metadata is defined in the query rule, and is matched against the query criteria. Query rules use metadata to match a query. -Metadata is provided as part of the <> as an object and can be anything that helps differentiate the query, for example: +Metadata is provided as part of the search request as an object and can be anything that helps differentiate the query, for example: * A user-entered query string * Personalized metadata about users (e.g. country, language, etc) @@ -18,13 +18,13 @@ Metadata is provided as part of the <> as an o * A referring site * etc. -Query rules define a metadata key that will be used to match the metadata provided in the <> with the criteria specified in the rule. +Query rules define a metadata key that will be used to match the metadata provided in the <> with the criteria specified in the rule. -When a query rule matches the <> metadata according to its defined criteria, the query rule action is applied to the underlying `organic` query. +When a query rule matches the rule metadata according to its defined criteria, the query rule action is applied to the underlying `organic` query. For example, a query rule could be defined to match a user-entered query string of `pugs` and a country `us` and promote adoptable shelter dogs if the rule query met both criteria. -Rules are defined using the <> and searched using the <>. +Rules are defined using the <> and searched using the <> or the <>. [discrete] [[query-rule-definition]] @@ -189,9 +189,11 @@ You can use the <> call to retrieve the ruleset you just crea [discrete] [[rule-query-search]] -==== Perform a rule query +==== Search using query rules + +Once you have defined one or more query rulesets, you can search using these rulesets using the <> or the <>. +Retrievers are the recommended way to use rule queries, as they will work out of the box with other reranking retrievers such as <>. -Once you have defined one or more query rulesets, you can search these rulesets using the <> query. Rulesets are evaluated in order, so rules in the first ruleset you specify will be applied before any subsequent rulesets. An example query for the `my-ruleset` defined above is: @@ -200,18 +202,22 @@ An example query for the `my-ruleset` defined above is: ---- GET /my-index-000001/_search { - "query": { + "retriever": { "rule": { - "organic": { - "query_string": { - "query": "puggles" + "retriever": { + "standard": { + "query": { + "query_string": { + "query": "puggles" + } + } } }, "match_criteria": { "query_string": "puggles", "user_country": "us" }, - "ruleset_ids": ["my-ruleset"] + "ruleset_ids": [ "my-ruleset" ] } } } @@ -227,3 +233,51 @@ In this case, the rules are applied in the following order: - Where the matching rule appears in the ruleset - If multiple documents are specified in a single rule, in the order they are specified - If a document is matched by both a `pinned` rule and an `exclude` rule, the `exclude` rule will take precedence + +You can specify reranking retrievers such as <> or <> in the rule query to apply query rules on already-reranked results. +Here is an example: + +[source,console] +---- +GET my-index-000001/_search +{ + "retriever": { + "rule": { + "match_criteria": { + "query_string": "puggles", + "user_country": "us" + }, + "ruleset_ids": [ + "my-ruleset" + ], + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "pugs" + } + } + } + }, + { + "standard": { + "query": { + "query_string": { + "query": "puggles" + } + } + } + } + ] + } + } + } + } +} +---- +// TEST[continued] + +This will apply pinned and excluded query rules on top of the content that was reranked by RRF. diff --git a/docs/reference/security/authentication/remote-clusters-privileges-api-key.asciidoc b/docs/reference/security/authentication/remote-clusters-privileges-api-key.asciidoc index 1d31c7b6b9345..9b51a58725f39 100644 --- a/docs/reference/security/authentication/remote-clusters-privileges-api-key.asciidoc +++ b/docs/reference/security/authentication/remote-clusters-privileges-api-key.asciidoc @@ -2,7 +2,8 @@ === Configure roles and users To use a remote cluster for {ccr} or {ccs}, you need to create user roles with -<> on the local cluster. +<> or +<> on the local cluster. You can manage users and roles from Stack Management in {kib} by selecting *Security > Roles* from the side navigation. You can also use the @@ -80,7 +81,7 @@ POST /_security/role/remote-search "privileges": [ "read", "read_cross_cluster", - "view_index_metadata" + "view_index_metadata" ] } ] diff --git a/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc b/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc index 7eb27d5428956..2222d09c26db2 100644 --- a/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc +++ b/docs/reference/troubleshooting/common-issues/disk-usage-exceeded.asciidoc @@ -106,6 +106,8 @@ As a long-term solution, we recommend you do one of the following best suited to your use case: * add nodes to the affected <> ++ +TIP: You should enable <> for clusters deployed using our {ess}, {ece}, and {eck} platforms. * upgrade existing nodes to increase disk space + diff --git a/docs/reference/vectors/vector-functions.asciidoc b/docs/reference/vectors/vector-functions.asciidoc index 2a80290cf9d3b..10dca8084e28a 100644 --- a/docs/reference/vectors/vector-functions.asciidoc +++ b/docs/reference/vectors/vector-functions.asciidoc @@ -16,7 +16,7 @@ This is the list of available vector functions and vector access methods: 6. <].vectorValue`>> – returns a vector's value as an array of floats 7. <].magnitude`>> – returns a vector's magnitude -NOTE: The `cosineSimilarity` and `dotProduct` functions are not supported for `bit` vectors. +NOTE: The `cosineSimilarity` function is not supported for `bit` vectors. NOTE: The recommended way to access dense vectors is through the `cosineSimilarity`, `dotProduct`, `l1norm` or `l2norm` functions. Please note @@ -332,6 +332,92 @@ When using `bit` vectors, not all the vector functions are available. The suppor * <> – calculates Hamming distance, the sum of the bitwise XOR of the two vectors * <> – calculates L^1^ distance, this is simply the `hamming` distance * <> - calculates L^2^ distance, this is the square root of the `hamming` distance +* <> – calculates dot product. When comparing two `bit` vectors, +this is the sum of the bitwise AND of the two vectors. If providing `float[]` or `byte[]`, who has `dims` number of elements, as a query vector, the `dotProduct` is +the sum of the floating point values using the stored `bit` vector as a mask. -Currently, the `cosineSimilarity` and `dotProduct` functions are not supported for `bit` vectors. +Here is an example of using dot-product with bit vectors. + +[source,console] +-------------------------------------------------- +PUT my-index-bit-vectors +{ + "mappings": { + "properties": { + "my_dense_vector": { + "type": "dense_vector", + "index": false, + "element_type": "bit", + "dims": 40 <1> + } + } + } +} + +PUT my-index-bit-vectors/_doc/1 +{ + "my_dense_vector": [8, 5, -15, 1, -7] <2> +} + +PUT my-index-bit-vectors/_doc/2 +{ + "my_dense_vector": [-1, 115, -3, 4, -128] +} + +PUT my-index-bit-vectors/_doc/3 +{ + "my_dense_vector": [2, 18, -5, 0, -124] +} + +POST my-index-bit-vectors/_refresh +-------------------------------------------------- +// TEST[continued] +<1> The number of dimensions or bits for the `bit` vector. +<2> This vector represents 5 bytes, or `5 * 8 = 40` bits, which equals the configured dimensions + +[source,console] +-------------------------------------------------- +GET my-index-bit-vectors/_search +{ + "query": { + "script_score": { + "query" : { + "match_all": {} + }, + "script": { + "source": "dotProduct(params.query_vector, 'my_dense_vector')", + "params": { + "query_vector": [8, 5, -15, 1, -7] <1> + } + } + } + } +} +-------------------------------------------------- +// TEST[continued] +<1> This vector is 40 bits, and thus will compute a bitwise `&` operation with the stored vectors. + +[source,console] +-------------------------------------------------- +GET my-index-bit-vectors/_search +{ + "query": { + "script_score": { + "query" : { + "match_all": {} + }, + "script": { + "source": "dotProduct(params.query_vector, 'my_dense_vector')", + "params": { + "query_vector": [0.23, 1.45, 3.67, 4.89, -0.56, 2.34, 3.21, 1.78, -2.45, 0.98, -0.12, 3.45, 4.56, 2.78, 1.23, 0.67, 3.89, 4.12, -2.34, 1.56, 0.78, 3.21, 4.12, 2.45, -1.67, 0.34, -3.45, 4.56, -2.78, 1.23, -0.67, 3.89, -4.34, 2.12, -1.56, 0.78, -3.21, 4.45, 2.12, 1.67] <1> + } + } + } + } +} +-------------------------------------------------- +// TEST[continued] +<1> This vector is 40 individual dimensions, and thus will sum the floating point values using the stored `bit` vector as a mask. + +Currently, the `cosineSimilarity` function is not supported for `bit` vectors. diff --git a/libs/core/src/main/java/module-info.java b/libs/core/src/main/java/module-info.java index 3c5d787ab8ba3..487ce69fad36b 100644 --- a/libs/core/src/main/java/module-info.java +++ b/libs/core/src/main/java/module-info.java @@ -19,7 +19,7 @@ to org.elasticsearch.xcontent, org.elasticsearch.nativeaccess, - org.elasticsearch.entitlement.agent; + org.elasticsearch.entitlement; uses ModuleQualifiedExportsService; } diff --git a/libs/entitlement/README.md b/libs/entitlement/README.md index 3e064705c3aef..76e4db0505d38 100644 --- a/libs/entitlement/README.md +++ b/libs/entitlement/README.md @@ -1,7 +1,7 @@ -### Entitlement runtime +### Entitlement library This module implements mechanisms to grant and check permissions under the _entitlements_ system. The entitlements system provides an alternative to the legacy `SecurityManager` system, which is deprecated for removal. -The `entitlement-agent` tool instruments sensitive class library methods with calls to this module, in order to enforce the controls. +The `entitlement-agent` instruments sensitive class library methods with calls to this module, in order to enforce the controls. diff --git a/libs/entitlement/agent/README.md b/libs/entitlement/agent/README.md index f8224f4266fb6..0dc331266cc94 100644 --- a/libs/entitlement/agent/README.md +++ b/libs/entitlement/agent/README.md @@ -5,6 +5,6 @@ This is a java agent that instruments sensitive class library methods with calls The entitlements system provides an alternative to the legacy `SecurityManager` system, which is deprecated for removal. With this agent, the Elasticsearch server can retain some control over which class library methods can be invoked by which callers. -This module is responsible for inserting the appropriate bytecode to achieve enforcement of the rules governed by the `entitlement-runtime` module. +This module is responsible for inserting the appropriate bytecode to achieve enforcement of the rules governed by the main `entitlement` module. -It is not responsible for permission granting or checking logic. That responsibility lies with `entitlement-runtime`. +It is not responsible for permission granting or checking logic. That responsibility lies with the main `entitlement` module. diff --git a/libs/entitlement/agent/build.gradle b/libs/entitlement/agent/build.gradle index 5b29ba40b5f25..5976c2f64af5f 100644 --- a/libs/entitlement/agent/build.gradle +++ b/libs/entitlement/agent/build.gradle @@ -6,51 +6,18 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ - -import static java.util.stream.Collectors.joining - apply plugin: 'elasticsearch.build' -apply plugin: 'elasticsearch.embedded-providers' - -embeddedProviders { - impl 'entitlement-agent', project(':libs:entitlement:agent:impl') -} - -configurations { - entitlementBridge -} dependencies { - entitlementBridge project(":libs:entitlement:bridge") compileOnly project(":libs:core") compileOnly project(":libs:entitlement") - testImplementation project(":test:framework") - testImplementation project(":libs:entitlement:bridge") - testImplementation project(":libs:entitlement:agent:impl") -} - -tasks.named('test').configure { - systemProperty "tests.security.manager", "false" - dependsOn('jar') - - // Register an argument provider to avoid eager resolution of configurations - jvmArgumentProviders.add(new CommandLineArgumentProvider() { - @Override - Iterable asArguments() { - return ["-javaagent:${tasks.jar.archiveFile.get()}", "-Des.entitlements.bridgeJar=${configurations.entitlementBridge.singleFile}"] - } - }) - - - // The Elasticsearch build plugin automatically adds all compileOnly deps as testImplementation. - // We must not add the bridge this way because it is also on the boot classpath, and that would lead to jar hell. - classpath -= files(configurations.entitlementBridge) + compileOnly project(":libs:entitlement:bridge") } tasks.named('jar').configure { manifest { attributes( - 'Premain-Class': 'org.elasticsearch.entitlement.agent.EntitlementAgent' + 'Agent-Class': 'org.elasticsearch.entitlement.agent.EntitlementAgent' , 'Can-Retransform-Classes': 'true' ) } diff --git a/libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/agent/EntitlementAgent.java b/libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/agent/EntitlementAgent.java index acb11af97bb5b..d062ba1ed2465 100644 --- a/libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/agent/EntitlementAgent.java +++ b/libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/agent/EntitlementAgent.java @@ -9,53 +9,41 @@ package org.elasticsearch.entitlement.agent; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.internal.provider.ProviderLocator; -import org.elasticsearch.entitlement.instrumentation.InstrumentationService; -import org.elasticsearch.entitlement.instrumentation.MethodKey; - -import java.io.IOException; import java.lang.instrument.Instrumentation; +import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; -import java.util.Map; -import java.util.Set; -import java.util.jar.JarFile; +/** + * A Java Agent that sets up the bytecode instrumentation for the entitlement system. + *

+ * Agents are loaded into the unnamed module, which makes module exports awkward. + * To work around this, we keep minimal code in the agent itself, and + * instead use reflection to call into the main entitlement library, + * which bootstraps by using {@link Module#addExports} to make a single {@code initialize} + * method available for us to call from here. + * That method does the rest. + */ public class EntitlementAgent { - public static void premain(String agentArgs, Instrumentation inst) throws Exception { - // Add the bridge library (the one with the entitlement checking interface) to the bootstrap classpath. - // We can't actually reference the classes here for real before this point because they won't resolve. - var bridgeJarName = System.getProperty("es.entitlements.bridgeJar"); - if (bridgeJarName == null) { - throw new IllegalArgumentException("System property es.entitlements.bridgeJar is required"); + public static void agentmain(String agentArgs, Instrumentation inst) { + final Class initClazz; + try { + initClazz = Class.forName("org.elasticsearch.entitlement.initialization.EntitlementInitialization"); + } catch (ClassNotFoundException e) { + throw new AssertionError("entitlement agent does could not find EntitlementInitialization", e); } - addJarToBootstrapClassLoader(inst, bridgeJarName); - - Method targetMethod = System.class.getMethod("exit", int.class); - Method instrumentationMethod = Class.forName("org.elasticsearch.entitlement.api.EntitlementChecks") - .getMethod("checkSystemExit", Class.class, int.class); - Map methodMap = Map.of(INSTRUMENTER_FACTORY.methodKeyForTarget(targetMethod), instrumentationMethod); - - inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), Set.of(internalName(System.class))), true); - inst.retransformClasses(System.class); - } - @SuppressForbidden(reason = "The appendToBootstrapClassLoaderSearch method takes a JarFile") - private static void addJarToBootstrapClassLoader(Instrumentation inst, String jarString) throws IOException { - inst.appendToBootstrapClassLoaderSearch(new JarFile(jarString)); - } + final Method initMethod; + try { + initMethod = initClazz.getMethod("initialize", Instrumentation.class); + } catch (NoSuchMethodException e) { + throw new AssertionError("EntitlementInitialization missing initialize method", e); + } - private static String internalName(Class c) { - return c.getName().replace('.', '/'); + try { + initMethod.invoke(null, inst); + } catch (IllegalAccessException | InvocationTargetException e) { + throw new AssertionError("entitlement initialization failed", e); + } } - - private static final InstrumentationService INSTRUMENTER_FACTORY = (new ProviderLocator<>( - "entitlement-agent", - InstrumentationService.class, - "org.elasticsearch.entitlement.agent.impl", - Set.of("org.objectweb.nonexistent.asm") - )).get(); - - // private static final Logger LOGGER = LogManager.getLogger(EntitlementAgent.class); } diff --git a/libs/entitlement/agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java b/libs/entitlement/agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java deleted file mode 100644 index cf7991626029a..0000000000000 --- a/libs/entitlement/agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.agent; - -import com.carrotsearch.randomizedtesting.annotations.SuppressForbidden; - -import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementManager; -import org.elasticsearch.entitlement.runtime.api.NotEntitledException; -import org.elasticsearch.entitlement.runtime.internals.EntitlementInternals; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.ESTestCase.WithoutSecurityManager; -import org.junit.After; - -/** - * This is an end-to-end test of the agent and entitlement runtime. - * It runs with the agent installed, and exhaustively tests every instrumented method - * to make sure it works with the entitlement granted and throws without it. - * The only exception is {@link System#exit}, where we can't that it works without - * terminating the JVM. - *

- * If you're trying to debug the instrumentation code, take a look at {@code InstrumenterTests}. - * That tests the bytecode portion without firing up an agent, which makes everything easier to troubleshoot. - *

- * See {@code build.gradle} for how we set the command line arguments for this test. - */ -@WithoutSecurityManager -public class EntitlementAgentTests extends ESTestCase { - - public static final ElasticsearchEntitlementManager ENTITLEMENT_MANAGER = ElasticsearchEntitlementManager.get(); - - @After - public void resetEverything() { - EntitlementInternals.reset(); - } - - /** - * We can't really check that this one passes because it will just exit the JVM. - */ - @SuppressForbidden("Specifically testing System.exit") - public void testSystemExitNotEntitled() { - ENTITLEMENT_MANAGER.activate(); - assertThrows(NotEntitledException.class, () -> System.exit(123)); - } - -} diff --git a/libs/entitlement/asm-provider/README.md b/libs/entitlement/asm-provider/README.md new file mode 100644 index 0000000000000..5343e56c2aa02 --- /dev/null +++ b/libs/entitlement/asm-provider/README.md @@ -0,0 +1,2 @@ +This module uses the ASM library to implement various things, including bytecode instrumentation. +It is loaded using the Embedded Provider Gradle plugin. diff --git a/libs/entitlement/agent/impl/build.gradle b/libs/entitlement/asm-provider/build.gradle similarity index 94% rename from libs/entitlement/agent/impl/build.gradle rename to libs/entitlement/asm-provider/build.gradle index e95f89612700d..5f968629fe557 100644 --- a/libs/entitlement/agent/impl/build.gradle +++ b/libs/entitlement/asm-provider/build.gradle @@ -10,7 +10,7 @@ apply plugin: 'elasticsearch.build' dependencies { - compileOnly project(':libs:entitlement:agent') + compileOnly project(':libs:entitlement') implementation 'org.ow2.asm:asm:9.7' testImplementation project(":test:framework") testImplementation project(":libs:entitlement:bridge") diff --git a/libs/entitlement/agent/impl/licenses/asm-LICENSE.txt b/libs/entitlement/asm-provider/licenses/asm-LICENSE.txt similarity index 100% rename from libs/entitlement/agent/impl/licenses/asm-LICENSE.txt rename to libs/entitlement/asm-provider/licenses/asm-LICENSE.txt diff --git a/libs/entitlement/agent/impl/licenses/asm-NOTICE.txt b/libs/entitlement/asm-provider/licenses/asm-NOTICE.txt similarity index 100% rename from libs/entitlement/agent/impl/licenses/asm-NOTICE.txt rename to libs/entitlement/asm-provider/licenses/asm-NOTICE.txt diff --git a/libs/entitlement/agent/impl/src/main/java/module-info.java b/libs/entitlement/asm-provider/src/main/java/module-info.java similarity index 88% rename from libs/entitlement/agent/impl/src/main/java/module-info.java rename to libs/entitlement/asm-provider/src/main/java/module-info.java index f47345ddfaee6..8cbeafc9013aa 100644 --- a/libs/entitlement/agent/impl/src/main/java/module-info.java +++ b/libs/entitlement/asm-provider/src/main/java/module-info.java @@ -10,9 +10,9 @@ import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.impl.InstrumentationServiceImpl; -module org.elasticsearch.entitlement.agent.impl { +module org.elasticsearch.entitlement.instrumentation { requires org.objectweb.asm; - requires org.elasticsearch.entitlement.agent; + requires org.elasticsearch.entitlement; provides InstrumentationService with InstrumentationServiceImpl; } diff --git a/libs/entitlement/agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java similarity index 100% rename from libs/entitlement/agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java rename to libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java diff --git a/libs/entitlement/agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java similarity index 95% rename from libs/entitlement/agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java rename to libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 7c2e1645ada83..63c9ccd80be70 100644 --- a/libs/entitlement/agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -174,7 +174,7 @@ private void addClassAnnotationIfNeeded() { } } - static class EntitlementMethodVisitor extends MethodVisitor { + class EntitlementMethodVisitor extends MethodVisitor { private final boolean instrumentedMethodIsStatic; private final String instrumentedMethodDescriptor; private final Method instrumentationMethod; @@ -203,21 +203,15 @@ public AnnotationVisitor visitAnnotation(String descriptor, boolean visible) { @Override public void visitCode() { - pushEntitlementChecksObject(); + pushEntitlementChecker(); pushCallerClass(); forwardIncomingArguments(); invokeInstrumentationMethod(); super.visitCode(); } - private void pushEntitlementChecksObject() { - mv.visitMethodInsn( - INVOKESTATIC, - "org/elasticsearch/entitlement/api/EntitlementProvider", - "checks", - "()Lorg/elasticsearch/entitlement/api/EntitlementChecks;", - false - ); + private void pushEntitlementChecker() { + InstrumenterImpl.this.pushEntitlementChecker(mv); } private void pushCallerClass() { @@ -276,7 +270,15 @@ private void invokeInstrumentationMethod() { } } - // private static final Logger LOGGER = LogManager.getLogger(Instrumenter.class); + protected void pushEntitlementChecker(MethodVisitor mv) { + mv.visitMethodInsn( + INVOKESTATIC, + "org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle", + "instance", + "()Lorg/elasticsearch/entitlement/bridge/EntitlementChecker;", + false + ); + } public record ClassFileInfo(String fileName, byte[] bytecodes) {} } diff --git a/libs/entitlement/agent/impl/src/main/resources/META-INF/services/org.elasticsearch.entitlement.instrumentation.InstrumentationService b/libs/entitlement/asm-provider/src/main/resources/META-INF/services/org.elasticsearch.entitlement.instrumentation.InstrumentationService similarity index 100% rename from libs/entitlement/agent/impl/src/main/resources/META-INF/services/org.elasticsearch.entitlement.instrumentation.InstrumentationService rename to libs/entitlement/asm-provider/src/main/resources/META-INF/services/org.elasticsearch.entitlement.instrumentation.InstrumentationService diff --git a/libs/entitlement/agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/ASMUtils.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/ASMUtils.java similarity index 100% rename from libs/entitlement/agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/ASMUtils.java rename to libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/ASMUtils.java diff --git a/libs/entitlement/agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java similarity index 81% rename from libs/entitlement/agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java rename to libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java index f05c7ccae62e6..9a57e199d4907 100644 --- a/libs/entitlement/agent/impl/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java @@ -10,13 +10,13 @@ package org.elasticsearch.entitlement.instrumentation.impl; import org.elasticsearch.common.Strings; -import org.elasticsearch.entitlement.api.EntitlementChecks; -import org.elasticsearch.entitlement.api.EntitlementProvider; +import org.elasticsearch.entitlement.bridge.EntitlementChecker; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Type; import java.lang.reflect.InvocationTargetException; @@ -27,6 +27,7 @@ import static org.elasticsearch.entitlement.instrumentation.impl.ASMUtils.bytecode2text; import static org.elasticsearch.entitlement.instrumentation.impl.InstrumenterImpl.getClassFileInfo; import static org.hamcrest.Matchers.is; +import static org.objectweb.asm.Opcodes.INVOKESTATIC; /** * This tests {@link InstrumenterImpl} in isolation, without a java agent. @@ -37,13 +38,15 @@ public class InstrumenterTests extends ESTestCase { final InstrumentationService instrumentationService = new InstrumentationServiceImpl(); - private static TestEntitlementManager getTestChecks() { - return (TestEntitlementManager) EntitlementProvider.checks(); + static volatile TestEntitlementChecker testChecker; + + public static TestEntitlementChecker getTestEntitlementChecker() { + return testChecker; } @Before public void initialize() { - getTestChecks().isActive = false; + testChecker = new TestEntitlementChecker(); } /** @@ -73,11 +76,13 @@ public static void anotherSystemExit(int status) { static final class TestException extends RuntimeException {} /** - * We're not testing the permission checking logic here. - * This is a trivial implementation of {@link EntitlementChecks} that just always throws, + * We're not testing the permission checking logic here; + * only that the instrumented methods are calling the correct check methods with the correct arguments. + * This is a trivial implementation of {@link EntitlementChecker} that just always throws, * just to demonstrate that the injected bytecodes succeed in calling these methods. + * It also asserts that the arguments are correct. */ - public static class TestEntitlementManager implements EntitlementChecks { + public static class TestEntitlementChecker implements EntitlementChecker { /** * This allows us to test that the instrumentation is correct in both cases: * if the check throws, and if it doesn't. @@ -116,12 +121,12 @@ public void testClassIsInstrumented() throws Exception { newBytecode ); - getTestChecks().isActive = false; + getTestEntitlementChecker().isActive = false; // Before checking is active, nothing should throw callStaticMethod(newClass, "systemExit", 123); - getTestChecks().isActive = true; + getTestEntitlementChecker().isActive = true; // After checking is activated, everything should throw assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); @@ -145,11 +150,11 @@ public void testClassIsNotInstrumentedTwice() throws Exception { instrumentedTwiceBytecode ); - getTestChecks().isActive = true; - getTestChecks().checkSystemExitCallCount = 0; + getTestEntitlementChecker().isActive = true; + getTestEntitlementChecker().checkSystemExitCallCount = 0; assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); - assertThat(getTestChecks().checkSystemExitCallCount, is(1)); + assertThat(getTestEntitlementChecker().checkSystemExitCallCount, is(1)); } public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { @@ -170,14 +175,14 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { instrumentedTwiceBytecode ); - getTestChecks().isActive = true; - getTestChecks().checkSystemExitCallCount = 0; + getTestEntitlementChecker().isActive = true; + getTestEntitlementChecker().checkSystemExitCallCount = 0; assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); - assertThat(getTestChecks().checkSystemExitCallCount, is(1)); + assertThat(getTestEntitlementChecker().checkSystemExitCallCount, is(1)); assertThrows(TestException.class, () -> callStaticMethod(newClass, "anotherSystemExit", 123)); - assertThat(getTestChecks().checkSystemExitCallCount, is(2)); + assertThat(getTestEntitlementChecker().checkSystemExitCallCount, is(2)); } /** This test doesn't replace ClassToInstrument in-place but instead loads a separate @@ -187,7 +192,7 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { * is not what would happen when it's run by the agent. */ private InstrumenterImpl createInstrumenter(Class classToInstrument, String... methodNames) throws NoSuchMethodException { - Method v1 = EntitlementChecks.class.getMethod("checkSystemExit", Class.class, int.class); + Method v1 = EntitlementChecker.class.getMethod("checkSystemExit", Class.class, int.class); var methods = Arrays.stream(methodNames).map(name -> { try { return instrumentationService.methodKeyForTarget(classToInstrument.getMethod(name, int.class)); @@ -196,7 +201,23 @@ private InstrumenterImpl createInstrumenter(Class classToInstrument, String.. } }).collect(Collectors.toUnmodifiableMap(name -> name, name -> v1)); - return new InstrumenterImpl("_NEW", methods); + Method getter = InstrumenterTests.class.getMethod("getTestEntitlementChecker"); + return new InstrumenterImpl("_NEW", methods) { + /** + * We're not testing the bridge library here. + * Just call our own getter instead. + */ + @Override + protected void pushEntitlementChecker(MethodVisitor mv) { + mv.visitMethodInsn( + INVOKESTATIC, + Type.getInternalName(getter.getDeclaringClass()), + getter.getName(), + Type.getMethodDescriptor(getter), + false + ); + } + }; } /** diff --git a/libs/entitlement/agent/impl/src/test/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks b/libs/entitlement/asm-provider/src/test/resources/META-INF/services/org.elasticsearch.entitlement.bridge.EntitlementChecker similarity index 94% rename from libs/entitlement/agent/impl/src/test/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks rename to libs/entitlement/asm-provider/src/test/resources/META-INF/services/org.elasticsearch.entitlement.bridge.EntitlementChecker index 983585190b35a..172ac1d2ab30b 100644 --- a/libs/entitlement/agent/impl/src/test/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks +++ b/libs/entitlement/asm-provider/src/test/resources/META-INF/services/org.elasticsearch.entitlement.bridge.EntitlementChecker @@ -7,4 +7,4 @@ # License v3.0 only", or the "Server Side Public License, v 1". # -org.elasticsearch.entitlement.instrumentation.impl.InstrumenterTests$TestEntitlementManager +org.elasticsearch.entitlement.instrumentation.impl.InstrumenterTests$TestEntitlementChecker diff --git a/libs/entitlement/bridge/README.md b/libs/entitlement/bridge/README.md index 7204d1d2c98cf..8beea6b3fe894 100644 --- a/libs/entitlement/bridge/README.md +++ b/libs/entitlement/bridge/README.md @@ -1,11 +1,11 @@ ### Entitlement Bridge This is the code called directly from instrumented methods. -It's a minimal code stub that is loaded into the boot classloader by the entitlement agent +It's a minimal shim that is patched into the `java.base` module so that it is callable from the class library methods instrumented by the agent. -Its job is to forward the entitlement checks to the actual runtime library, +Its job is to forward the entitlement checks to the main library, which is loaded normally. It is not responsible for injecting the bytecode instrumentation (that's the agent) -nor for implementing the permission checks (that's the runtime library). +nor for implementing the permission checks (that's the main library). diff --git a/libs/entitlement/bridge/build.gradle b/libs/entitlement/bridge/build.gradle index 29969a8629648..dff5fac1e1c1f 100644 --- a/libs/entitlement/bridge/build.gradle +++ b/libs/entitlement/bridge/build.gradle @@ -9,8 +9,6 @@ apply plugin: 'elasticsearch.build' -dependencies { -} tasks.named('forbiddenApisMain').configure { replaceSignatureFiles 'jdk-signatures' diff --git a/libs/entitlement/bridge/src/main/java/module-info.java b/libs/entitlement/bridge/src/main/java/module-info.java index 7091ae34ce1e1..93292109a726e 100644 --- a/libs/entitlement/bridge/src/main/java/module-info.java +++ b/libs/entitlement/bridge/src/main/java/module-info.java @@ -7,8 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ +// This module-info is used just to satisfy your IDE. +// At build and run time, the bridge is patched into the java.base module. module org.elasticsearch.entitlement.bridge { - uses org.elasticsearch.entitlement.api.EntitlementChecks; - - exports org.elasticsearch.entitlement.api; + exports org.elasticsearch.entitlement.bridge; } diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementProvider.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementProvider.java deleted file mode 100644 index bc10adcd086e9..0000000000000 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementProvider.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.api; - -import java.util.List; -import java.util.ServiceLoader; - -public class EntitlementProvider { - private static final EntitlementChecks CHECKS = lookupEntitlementChecksImplementation(); - - public static EntitlementChecks checks() { - return CHECKS; - } - - private static EntitlementChecks lookupEntitlementChecksImplementation() { - List candidates = ServiceLoader.load(EntitlementChecks.class).stream().map(ServiceLoader.Provider::get).toList(); - if (candidates.isEmpty()) { - throw new IllegalStateException("No EntitlementChecks service"); - } else if (candidates.size() >= 2) { - throw new IllegalStateException( - "Multiple EntitlementChecks services: " + candidates.stream().map(e -> e.getClass().getSimpleName()).toList() - ); - } else { - return candidates.get(0); - } - } -} diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementChecks.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java similarity index 86% rename from libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementChecks.java rename to libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index b45313eb018a7..5ebb7d00e26f5 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementChecks.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -7,8 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.api; +package org.elasticsearch.entitlement.bridge; -public interface EntitlementChecks { +public interface EntitlementChecker { void checkSystemExit(Class callerClass, int status); } diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java new file mode 100644 index 0000000000000..2fe4a163a4136 --- /dev/null +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bridge; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; + +/** + * Makes the {@link EntitlementChecker} available to injected bytecode. + */ +public class EntitlementCheckerHandle { + + /** + * This is how the bytecodes injected by our instrumentation access the {@link EntitlementChecker} + * so they can call the appropriate check method. + */ + public static EntitlementChecker instance() { + return Holder.instance; + } + + /** + * Having a separate inner {@code Holder} class ensures that the field is initialized + * the first time {@link #instance()} is called, rather than the first time anyone anywhere + * references the {@link EntitlementCheckerHandle} class. + */ + private static class Holder { + /** + * The {@code EntitlementInitialization} class is what actually instantiates it and makes it available; + * here, we copy it into a static final variable for maximum performance. + */ + private static final EntitlementChecker instance; + static { + String initClazz = "org.elasticsearch.entitlement.initialization.EntitlementInitialization"; + final Class clazz; + try { + clazz = ClassLoader.getSystemClassLoader().loadClass(initClazz); + } catch (ClassNotFoundException e) { + throw new AssertionError("java.base cannot find entitlement initialziation", e); + } + final Method checkerMethod; + try { + checkerMethod = clazz.getMethod("checker"); + } catch (NoSuchMethodException e) { + throw new AssertionError("EntitlementInitialization is missing checker() method", e); + } + try { + instance = (EntitlementChecker) checkerMethod.invoke(null); + } catch (IllegalAccessException | InvocationTargetException e) { + throw new AssertionError(e); + } + } + } + + // no construction + private EntitlementCheckerHandle() {} +} diff --git a/libs/entitlement/build.gradle b/libs/entitlement/build.gradle index 712cf358f5883..12e0bb48a54b7 100644 --- a/libs/entitlement/build.gradle +++ b/libs/entitlement/build.gradle @@ -9,12 +9,20 @@ apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' +apply plugin: 'elasticsearch.embedded-providers' + +embeddedProviders { + impl 'entitlement', project(':libs:entitlement:asm-provider') +} + dependencies { compileOnly project(':libs:core') // For @SuppressForbidden + compileOnly project(':libs:logging') compileOnly project(":libs:x-content") // for parsing policy files - compileOnly project(':server') // To access the main server module for special permission checks compileOnly project(':libs:entitlement:bridge') - testImplementation project(":test:framework") + testImplementation(project(":test:framework")) { + exclude group: 'org.elasticsearch', module: 'entitlement' + } } tasks.named('forbiddenApisMain').configure { diff --git a/libs/entitlement/src/main/java/module-info.java b/libs/entitlement/src/main/java/module-info.java index 12e6905014512..54075ba60bbef 100644 --- a/libs/entitlement/src/main/java/module-info.java +++ b/libs/entitlement/src/main/java/module-info.java @@ -7,14 +7,19 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -module org.elasticsearch.entitlement.runtime { - requires org.elasticsearch.entitlement.bridge; +module org.elasticsearch.entitlement { requires org.elasticsearch.xcontent; - requires org.elasticsearch.server; + requires org.elasticsearch.logging; + requires java.instrument; + requires org.elasticsearch.base; + requires jdk.attach; + + requires static org.elasticsearch.entitlement.bridge; // At runtime, this will be in java.base exports org.elasticsearch.entitlement.runtime.api; + exports org.elasticsearch.entitlement.instrumentation; + exports org.elasticsearch.entitlement.bootstrap to org.elasticsearch.server; + exports org.elasticsearch.entitlement.initialization to java.base; - provides org.elasticsearch.entitlement.api.EntitlementChecks - with - org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementManager; + uses org.elasticsearch.entitlement.instrumentation.InstrumentationService; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java new file mode 100644 index 0000000000000..7f68457baea9e --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bootstrap; + +import com.sun.tools.attach.AgentInitializationException; +import com.sun.tools.attach.AgentLoadException; +import com.sun.tools.attach.AttachNotSupportedException; +import com.sun.tools.attach.VirtualMachine; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.initialization.EntitlementInitialization; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +public class EntitlementBootstrap { + + /** + * Activates entitlement checking. Once this method returns, calls to forbidden methods + * will throw {@link org.elasticsearch.entitlement.runtime.api.NotEntitledException}. + */ + public static void bootstrap() { + logger.debug("Loading entitlement agent"); + exportInitializationToAgent(); + loadAgent(findAgentJar()); + } + + @SuppressForbidden(reason = "The VirtualMachine API is the only way to attach a java agent dynamically") + private static void loadAgent(String agentPath) { + try { + VirtualMachine vm = VirtualMachine.attach(Long.toString(ProcessHandle.current().pid())); + try { + vm.loadAgent(agentPath); + } finally { + vm.detach(); + } + } catch (AttachNotSupportedException | IOException | AgentLoadException | AgentInitializationException e) { + throw new IllegalStateException("Unable to attach entitlement agent", e); + } + } + + private static void exportInitializationToAgent() { + String initPkg = EntitlementInitialization.class.getPackageName(); + // agent will live in unnamed module + Module unnamedModule = ClassLoader.getSystemClassLoader().getUnnamedModule(); + EntitlementInitialization.class.getModule().addExports(initPkg, unnamedModule); + } + + private static String findAgentJar() { + String propertyName = "es.entitlement.agentJar"; + String propertyValue = System.getProperty(propertyName); + if (propertyValue != null) { + return propertyValue; + } + + Path dir = Path.of("lib", "entitlement-agent"); + if (Files.exists(dir) == false) { + throw new IllegalStateException("Directory for entitlement jar does not exist: " + dir); + } + try (var s = Files.list(dir)) { + var candidates = s.limit(2).toList(); + if (candidates.size() != 1) { + throw new IllegalStateException("Expected one jar in " + dir + "; found " + candidates.size()); + } + return candidates.get(0).toString(); + } catch (IOException e) { + throw new IllegalStateException("Failed to list entitlement jars in: " + dir, e); + } + } + + private static final Logger logger = LogManager.getLogger(EntitlementBootstrap.class); +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java new file mode 100644 index 0000000000000..155d5a27c606b --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.initialization; + +import org.elasticsearch.core.internal.provider.ProviderLocator; +import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.elasticsearch.entitlement.instrumentation.Transformer; +import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; + +import java.lang.instrument.Instrumentation; +import java.lang.reflect.Method; +import java.util.Map; +import java.util.Set; + +/** + * Called by the agent during {@code agentmain} to configure the entitlement system, + * instantiate and configure an {@link EntitlementChecker}, + * make it available to the bootstrap library via {@link #checker()}, + * and then install the {@link org.elasticsearch.entitlement.instrumentation.Instrumenter} + * to begin injecting our instrumentation. + */ +public class EntitlementInitialization { + private static ElasticsearchEntitlementChecker manager; + + // Note: referenced by bridge reflectively + public static EntitlementChecker checker() { + return manager; + } + + // Note: referenced by agent reflectively + public static void initialize(Instrumentation inst) throws Exception { + manager = new ElasticsearchEntitlementChecker(); + + // TODO: Configure actual entitlement grants instead of this hardcoded one + Method targetMethod = System.class.getMethod("exit", int.class); + Method instrumentationMethod = Class.forName("org.elasticsearch.entitlement.bridge.EntitlementChecker") + .getMethod("checkSystemExit", Class.class, int.class); + Map methodMap = Map.of(INSTRUMENTER_FACTORY.methodKeyForTarget(targetMethod), instrumentationMethod); + + inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), Set.of(internalName(System.class))), true); + inst.retransformClasses(System.class); + } + + private static String internalName(Class c) { + return c.getName().replace('.', '/'); + } + + private static final InstrumentationService INSTRUMENTER_FACTORY = new ProviderLocator<>( + "entitlement", + InstrumentationService.class, + "org.elasticsearch.entitlement.instrumentation", + Set.of() + ).get(); +} diff --git a/libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java similarity index 100% rename from libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java diff --git a/libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/instrumentation/Instrumenter.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/Instrumenter.java similarity index 100% rename from libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/instrumentation/Instrumenter.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/Instrumenter.java diff --git a/libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java similarity index 100% rename from libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java diff --git a/libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/agent/Transformer.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/Transformer.java similarity index 93% rename from libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/agent/Transformer.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/Transformer.java index bd9bb5bf2e5c6..c6512ee975dbf 100644 --- a/libs/entitlement/agent/src/main/java/org/elasticsearch/entitlement/agent/Transformer.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/Transformer.java @@ -7,9 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.entitlement.agent; - -import org.elasticsearch.entitlement.instrumentation.Instrumenter; +package org.elasticsearch.entitlement.instrumentation; import java.lang.instrument.ClassFileTransformer; import java.security.ProtectionDomain; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java similarity index 69% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementManager.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index a80d412f5dbd7..330205997d21c 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -9,25 +9,21 @@ package org.elasticsearch.entitlement.runtime.api; -import org.elasticsearch.entitlement.api.EntitlementChecks; -import org.elasticsearch.entitlement.api.EntitlementProvider; +import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import java.util.Optional; import static org.elasticsearch.entitlement.runtime.internals.EntitlementInternals.isActive; /** - * Implementation of the {@link EntitlementChecks} interface, providing additional + * Implementation of the {@link EntitlementChecker} interface, providing additional * API methods for managing the checks. * The trampoline module loads this object via SPI. */ -public class ElasticsearchEntitlementManager implements EntitlementChecks { - /** - * @return the same instance of {@link ElasticsearchEntitlementManager} returned by {@link EntitlementProvider}. - */ - public static ElasticsearchEntitlementManager get() { - return (ElasticsearchEntitlementManager) EntitlementProvider.checks(); - } +public class ElasticsearchEntitlementChecker implements EntitlementChecker { + private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); /** * Causes entitlements to be enforced. @@ -40,7 +36,6 @@ public void activate() { public void checkSystemExit(Class callerClass, int status) { var requestingModule = requestingModule(callerClass); if (isTriviallyAllowed(requestingModule)) { - // System.out.println(" - Trivially allowed"); return; } // Hard-forbidden until we develop the permission granting scheme @@ -71,7 +66,20 @@ private static Module requestingModule(Class callerClass) { } private static boolean isTriviallyAllowed(Module requestingModule) { - return isActive == false || (requestingModule == null) || requestingModule == System.class.getModule(); + if (isActive == false) { + logger.debug("Trivially allowed: entitlements are inactive"); + return true; + } + if (requestingModule == null) { + logger.debug("Trivially allowed: Entire call stack is in the boot module layer"); + return true; + } + if (requestingModule == System.class.getModule()) { + logger.debug("Trivially allowed: Caller is in {}", System.class.getModule().getName()); + return true; + } + logger.trace("Not trivially allowed"); + return false; } } diff --git a/libs/entitlement/src/main/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks b/libs/entitlement/src/main/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks deleted file mode 100644 index 5865e43e2b85a..0000000000000 --- a/libs/entitlement/src/main/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks +++ /dev/null @@ -1,10 +0,0 @@ -# - # Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - # or more contributor license agreements. Licensed under the "Elastic License - # 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - # Public License v 1"; you may not use this file except in compliance with, at - # your election, the "Elastic License 2.0", the "GNU Affero General Public - # License v3.0 only", or the "Server Side Public License, v 1". -# - -org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementManager diff --git a/libs/entitlement/tools/build.gradle b/libs/entitlement/tools/build.gradle new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/entitlement/agent/src/main/java/module-info.java b/libs/entitlement/tools/common/build.gradle similarity index 57% rename from libs/entitlement/agent/src/main/java/module-info.java rename to libs/entitlement/tools/common/build.gradle index 0eb87aeee3f6c..3373a8f747430 100644 --- a/libs/entitlement/agent/src/main/java/module-info.java +++ b/libs/entitlement/tools/common/build.gradle @@ -7,13 +7,9 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.entitlement.instrumentation.InstrumentationService; - -module org.elasticsearch.entitlement.agent { - requires java.instrument; - requires org.elasticsearch.base; // for @SuppressForbidden +plugins { + id 'java' +} - exports org.elasticsearch.entitlement.instrumentation to org.elasticsearch.entitlement.agent.impl; +group = 'org.elasticsearch.entitlement.tools' - uses InstrumentationService; -} diff --git a/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java b/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java new file mode 100644 index 0000000000000..c72e550a529cd --- /dev/null +++ b/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.tools; + +import java.io.IOException; +import java.lang.module.ModuleDescriptor; +import java.nio.file.FileSystem; +import java.nio.file.Files; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +public class Utils { + + public static Map> findModuleExports(FileSystem fs) throws IOException { + var modulesExports = new HashMap>(); + try (var stream = Files.walk(fs.getPath("modules"))) { + stream.filter(p -> p.getFileName().toString().equals("module-info.class")).forEach(x -> { + try (var is = Files.newInputStream(x)) { + var md = ModuleDescriptor.read(is); + modulesExports.put( + md.name(), + md.exports() + .stream() + .filter(e -> e.isQualified() == false) + .map(ModuleDescriptor.Exports::source) + .collect(Collectors.toSet()) + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + return modulesExports; + } + +} diff --git a/libs/entitlement/tools/securitymanager-scanner/build.gradle b/libs/entitlement/tools/securitymanager-scanner/build.gradle new file mode 100644 index 0000000000000..8d035c9e847c6 --- /dev/null +++ b/libs/entitlement/tools/securitymanager-scanner/build.gradle @@ -0,0 +1,61 @@ +plugins { + id 'application' +} + +apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.publish' + +tasks.named("dependencyLicenses").configure { + mapping from: /asm-.*/, to: 'asm' +} + +group = 'org.elasticsearch.entitlement.tools' + +ext { + javaMainClass = "org.elasticsearch.entitlement.tools.securitymanager.scanner.Main" +} + +application { + mainClass.set(javaMainClass) + applicationDefaultJvmArgs = [ + '--add-exports', 'java.base/sun.security.util=ALL-UNNAMED', + '--add-opens', 'java.base/java.lang=ALL-UNNAMED', + '--add-opens', 'java.base/java.net=ALL-UNNAMED', + '--add-opens', 'java.base/java.net.spi=ALL-UNNAMED', + '--add-opens', 'java.base/java.util.concurrent=ALL-UNNAMED', + '--add-opens', 'java.base/javax.crypto=ALL-UNNAMED', + '--add-opens', 'java.base/javax.security.auth=ALL-UNNAMED', + '--add-opens', 'java.base/jdk.internal.logger=ALL-UNNAMED', + '--add-opens', 'java.base/sun.nio.ch=ALL-UNNAMED', + '--add-opens', 'jdk.management.jfr/jdk.management.jfr=ALL-UNNAMED', + '--add-opens', 'java.logging/java.util.logging=ALL-UNNAMED', + '--add-opens', 'java.logging/sun.util.logging.internal=ALL-UNNAMED', + '--add-opens', 'java.naming/javax.naming.ldap.spi=ALL-UNNAMED', + '--add-opens', 'java.rmi/sun.rmi.runtime=ALL-UNNAMED', + '--add-opens', 'jdk.dynalink/jdk.dynalink=ALL-UNNAMED', + '--add-opens', 'jdk.dynalink/jdk.dynalink.linker=ALL-UNNAMED', + '--add-opens', 'java.desktop/sun.awt=ALL-UNNAMED', + '--add-opens', 'java.sql.rowset/javax.sql.rowset.spi=ALL-UNNAMED', + '--add-opens', 'java.sql/java.sql=ALL-UNNAMED', + '--add-opens', 'java.xml.crypto/com.sun.org.apache.xml.internal.security.utils=ALL-UNNAMED' + ] +} + +repositories { + mavenCentral() +} + +dependencies { + compileOnly(project(':libs:core')) + implementation 'org.ow2.asm:asm:9.7' + implementation 'org.ow2.asm:asm-util:9.7' + implementation(project(':libs:entitlement:tools:common')) +} + +tasks.named('forbiddenApisMain').configure { + replaceSignatureFiles 'jdk-signatures' +} + +tasks.named("thirdPartyAudit").configure { + ignoreMissingClasses() +} diff --git a/libs/entitlement/tools/securitymanager-scanner/licenses/asm-LICENSE.txt b/libs/entitlement/tools/securitymanager-scanner/licenses/asm-LICENSE.txt new file mode 100644 index 0000000000000..afb064f2f2666 --- /dev/null +++ b/libs/entitlement/tools/securitymanager-scanner/licenses/asm-LICENSE.txt @@ -0,0 +1,26 @@ +Copyright (c) 2012 France Télécom +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +THE POSSIBILITY OF SUCH DAMAGE. diff --git a/libs/entitlement/tools/securitymanager-scanner/licenses/asm-NOTICE.txt b/libs/entitlement/tools/securitymanager-scanner/licenses/asm-NOTICE.txt new file mode 100644 index 0000000000000..8d1c8b69c3fce --- /dev/null +++ b/libs/entitlement/tools/securitymanager-scanner/licenses/asm-NOTICE.txt @@ -0,0 +1 @@ + diff --git a/libs/entitlement/tools/securitymanager-scanner/src/README.md b/libs/entitlement/tools/securitymanager-scanner/src/README.md new file mode 100644 index 0000000000000..c01ba1387d1c8 --- /dev/null +++ b/libs/entitlement/tools/securitymanager-scanner/src/README.md @@ -0,0 +1,47 @@ +This tool scans the JDK on which it is running, looking for any location where `SecurityManager` is currently used, thus giving us a list of "entry points" inside the JDK where security checks are currently happening. + +More in detail, the tool scans for calls to any `SecurityManager` method starting with `check` (e.g. `checkWrite`). The tool treats the generic `checkPermission` method a little bit differently: `checkPermission` accepts a generic `Permission` object, it tries to read the permission type and permission name to give more information about it, trying to match two patterns that are used frequently inside the JDK: + +Pattern 1: private static permission field + +```java +private static final RuntimePermission INET_ADDRESS_RESOLVER_PERMISSION = +new RuntimePermission("inetAddressResolverProvider"); +... +sm.checkPermission(INET_ADDRESS_RESOLVER_PERMISSION); +``` +Pattern 2: direct object creation + +```java +sm.checkPermission(new LinkPermission("symbolic")); +``` + +The tool will recognize this pattern, and report the permission type and name alongside the `checkPermission` entry point (type `RuntimePermission` and name `inetAddressResolverProvider` in the first case, type `LinkPermission` and name `symbolic` in the second). + +This allows to give more information (either a specific type like `LinkPermission`, or a specific name like `inetAddressResolverProvider`) to generic `checkPermission` to help in deciding how to classify the permission check. The 2 patterns work quite well and cover roughly 90% of the cases. + +In order to run the tool, use: +```shell +./gradlew :libs:entitlement:tools:securitymanager-scanner:run +``` +The output of the tool is a CSV file, with one line for each entry-point, columns separated by `TAB` + +The columns are: +1. Module name +2. File name (from source root) +3. Line number +4. Fully qualified class name (ASM style, with `/` separators) +5. Method name +6. Method descriptor (ASM signature) +6. Visibility (PUBLIC/PUBLIC-METHOD/PRIVATE) +7. Check detail 1 (method name, or in case of checkPermission, permission name. Might be `MISSING`) +8. Check detail 2 (in case of checkPermission, the argument type (`Permission` subtype). Might be `MISSING`) + +Examples: +``` +java.base sun/nio/ch/DatagramChannelImpl.java 1360 sun/nio/ch/DatagramChannelImpl connect (Ljava/net/SocketAddress;Z)Ljava/nio/channels/DatagramChannel; PRIVATE checkConnect +``` +or +``` +java.base java/net/ResponseCache.java 118 java/net/ResponseCache setDefault (Ljava/net/ResponseCache;)V PUBLIC setResponseCache java/net/NetPermission +``` diff --git a/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java b/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java new file mode 100644 index 0000000000000..bea49e0296e67 --- /dev/null +++ b/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.tools.securitymanager.scanner; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.tools.Utils; +import org.objectweb.asm.ClassReader; + +import java.io.IOException; +import java.net.URI; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.util.HashMap; +import java.util.List; +import java.util.Set; + +public class Main { + + static final Set excludedModules = Set.of("java.desktop"); + + private static void identifySMChecksEntryPoints() throws IOException { + + FileSystem fs = FileSystems.getFileSystem(URI.create("jrt:/")); + + var moduleExports = Utils.findModuleExports(fs); + + var callers = new HashMap>(); + var visitor = new SecurityCheckClassVisitor(callers); + + try (var stream = Files.walk(fs.getPath("modules"))) { + stream.filter(x -> x.toString().endsWith(".class")).forEach(x -> { + var moduleName = x.subpath(1, 2).toString(); + if (excludedModules.contains(moduleName) == false) { + try { + ClassReader cr = new ClassReader(Files.newInputStream(x)); + visitor.setCurrentModule(moduleName, moduleExports.get(moduleName)); + var path = x.getNameCount() > 3 ? x.subpath(2, x.getNameCount() - 1).toString() : ""; + visitor.setCurrentSourcePath(path); + cr.accept(visitor, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + }); + } + + printToStdout(callers); + } + + @SuppressForbidden(reason = "This simple tool just prints to System.out") + private static void printToStdout(HashMap> callers) { + for (var kv : callers.entrySet()) { + for (var e : kv.getValue()) { + System.out.println(toString(kv.getKey(), e)); + } + } + } + + private static final String SEPARATOR = "\t"; + + private static String toString(String calleeName, SecurityCheckClassVisitor.CallerInfo callerInfo) { + var s = callerInfo.moduleName() + SEPARATOR + callerInfo.source() + SEPARATOR + callerInfo.line() + SEPARATOR + callerInfo + .className() + SEPARATOR + callerInfo.methodName() + SEPARATOR + callerInfo.methodDescriptor() + SEPARATOR; + + if (callerInfo.externalAccess().contains(SecurityCheckClassVisitor.ExternalAccess.METHOD) + && callerInfo.externalAccess().contains(SecurityCheckClassVisitor.ExternalAccess.CLASS)) { + s += "PUBLIC"; + } else if (callerInfo.externalAccess().contains(SecurityCheckClassVisitor.ExternalAccess.METHOD)) { + s += "PUBLIC-METHOD"; + } else { + s += "PRIVATE"; + } + + if (callerInfo.runtimePermissionType() != null) { + s += SEPARATOR + callerInfo.runtimePermissionType(); + } else if (calleeName.equals("checkPermission")) { + s += SEPARATOR + "MISSING"; // missing information + } else { + s += SEPARATOR + calleeName; + } + + if (callerInfo.permissionType() != null) { + s += SEPARATOR + callerInfo.permissionType(); + } else if (calleeName.equals("checkPermission")) { + s += SEPARATOR + "MISSING"; // missing information + } else { + s += SEPARATOR; + } + return s; + } + + public static void main(String[] args) throws IOException { + identifySMChecksEntryPoints(); + } +} diff --git a/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/SecurityCheckClassVisitor.java b/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/SecurityCheckClassVisitor.java new file mode 100644 index 0000000000000..a75fd5fc685f1 --- /dev/null +++ b/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/SecurityCheckClassVisitor.java @@ -0,0 +1,279 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.tools.securitymanager.scanner; + +import org.elasticsearch.core.SuppressForbidden; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.Label; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Type; + +import java.lang.constant.ClassDesc; +import java.lang.reflect.InaccessibleObjectException; +import java.lang.reflect.Modifier; +import java.nio.file.Path; +import java.security.Permission; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.EnumSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.objectweb.asm.Opcodes.ACC_PUBLIC; +import static org.objectweb.asm.Opcodes.ASM9; +import static org.objectweb.asm.Opcodes.GETSTATIC; +import static org.objectweb.asm.Opcodes.INVOKEDYNAMIC; +import static org.objectweb.asm.Opcodes.INVOKEINTERFACE; +import static org.objectweb.asm.Opcodes.INVOKESPECIAL; +import static org.objectweb.asm.Opcodes.INVOKESTATIC; +import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL; +import static org.objectweb.asm.Opcodes.NEW; + +class SecurityCheckClassVisitor extends ClassVisitor { + + static final String SECURITY_MANAGER_INTERNAL_NAME = "java/lang/SecurityManager"; + static final Set excludedClasses = Set.of(SECURITY_MANAGER_INTERNAL_NAME); + + enum ExternalAccess { + CLASS, + METHOD + } + + record CallerInfo( + String moduleName, + String source, + int line, + String className, + String methodName, + String methodDescriptor, + EnumSet externalAccess, + String permissionType, + String runtimePermissionType + ) {} + + private final Map> callerInfoByMethod; + private String className; + private int classAccess; + private String source; + private String moduleName; + private String sourcePath; + private Set moduleExports; + + protected SecurityCheckClassVisitor(Map> callerInfoByMethod) { + super(ASM9); + this.callerInfoByMethod = callerInfoByMethod; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + this.className = name; + this.classAccess = access; + } + + @Override + public void visitSource(String source, String debug) { + super.visitSource(source, debug); + this.source = source; + } + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + if (excludedClasses.contains(this.className)) { + return super.visitMethod(access, name, descriptor, signature, exceptions); + } + return new SecurityCheckMethodVisitor(super.visitMethod(access, name, descriptor, signature, exceptions), name, access, descriptor); + } + + public void setCurrentModule(String moduleName, Set moduleExports) { + this.moduleName = moduleName; + this.moduleExports = moduleExports; + } + + public void setCurrentSourcePath(String path) { + this.sourcePath = path; + } + + private class SecurityCheckMethodVisitor extends MethodVisitor { + + private final String methodName; + private final String methodDescriptor; + private int line; + private String permissionType; + private String runtimePermissionType; + private final int methodAccess; + + protected SecurityCheckMethodVisitor(MethodVisitor mv, String methodName, int methodAccess, String methodDescriptor) { + super(ASM9, mv); + this.methodName = methodName; + this.methodAccess = methodAccess; + this.methodDescriptor = methodDescriptor; + } + + private static final Set KNOWN_PERMISSIONS = Set.of("jdk.vm.ci.services.JVMCIPermission"); + + @SuppressForbidden(reason = "System.err is OK for this simple command-line tool") + private void handleException(String className, Throwable e) { + System.err.println("Cannot process " + className + ": " + e.getMessage()); + } + + @Override + public void visitTypeInsn(int opcode, String type) { + super.visitTypeInsn(opcode, type); + if (opcode == NEW) { + if (type.endsWith("Permission")) { + var objectType = Type.getObjectType(type); + if (KNOWN_PERMISSIONS.contains(objectType.getClassName())) { + permissionType = type; + } else { + try { + var clazz = Class.forName(objectType.getClassName()); + if (Permission.class.isAssignableFrom(clazz)) { + permissionType = type; + } + } catch (ClassNotFoundException e) { + handleException(objectType.getClassName(), e); + } + } + } + } + } + + @Override + @SuppressForbidden(reason = "We need to violate java's access system to access private parts") + public void visitFieldInsn(int opcode, String owner, String name, String descriptor) { + super.visitFieldInsn(opcode, owner, name, descriptor); + if (opcode == GETSTATIC && descriptor.endsWith("Permission;")) { + var permissionType = Type.getType(descriptor); + if (permissionType.getSort() == Type.ARRAY) { + permissionType = permissionType.getElementType(); + } + try { + var clazz = Class.forName(permissionType.getClassName()); + if (Permission.class.isAssignableFrom(clazz)) { + this.permissionType = permissionType.getInternalName(); + } + } catch (ClassNotFoundException e) { + handleException(permissionType.getClassName(), e); + } + + var objectType = Type.getObjectType(owner); + try { + var clazz = Class.forName(objectType.getClassName()); + Arrays.stream(clazz.getDeclaredFields()) + .filter(f -> Modifier.isStatic(f.getModifiers()) && Modifier.isFinal(f.getModifiers())) + .filter(f -> f.getName().equals(name)) + .findFirst() + .ifPresent(x -> { + if (Permission.class.isAssignableFrom(x.getType())) { + try { + x.setAccessible(true); + var p = (Permission) (x.get(null)); + this.runtimePermissionType = p.getName(); + } catch (IllegalAccessException | InaccessibleObjectException e) { + handleException(x.getName(), e); + } + } + }); + + } catch (ClassNotFoundException | NoClassDefFoundError | UnsatisfiedLinkError e) { + handleException(objectType.getClassName(), e); + } + } + } + + @Override + public void visitLdcInsn(Object value) { + super.visitLdcInsn(value); + if (permissionType != null && permissionType.equals("java/lang/RuntimePermission")) { + this.runtimePermissionType = value.toString(); + } + } + + @Override + public void visitMethodInsn(int opcode, String owner, String name, String descriptor, boolean isInterface) { + super.visitMethodInsn(opcode, owner, name, descriptor, isInterface); + if (opcode == INVOKEVIRTUAL + || opcode == INVOKESPECIAL + || opcode == INVOKESTATIC + || opcode == INVOKEINTERFACE + || opcode == INVOKEDYNAMIC) { + + if (SECURITY_MANAGER_INTERNAL_NAME.equals(owner)) { + EnumSet externalAccesses = EnumSet.noneOf(ExternalAccess.class); + if (moduleExports.contains(getPackageName(className))) { + if ((methodAccess & ACC_PUBLIC) != 0) { + externalAccesses.add(ExternalAccess.METHOD); + } + if ((classAccess & ACC_PUBLIC) != 0) { + externalAccesses.add(ExternalAccess.CLASS); + } + } + + if (name.equals("checkPermission")) { + var callers = callerInfoByMethod.computeIfAbsent(name, ignored -> new ArrayList<>()); + callers.add( + new CallerInfo( + moduleName, + Path.of(sourcePath, source).toString(), + line, + className, + methodName, + methodDescriptor, + externalAccesses, + permissionType, + runtimePermissionType + ) + ); + this.permissionType = null; + this.runtimePermissionType = null; + } else if (name.startsWith("check")) { + // Non-generic methods (named methods that which already tell us the permission type) + var callers = callerInfoByMethod.computeIfAbsent(name, ignored -> new ArrayList<>()); + callers.add( + new CallerInfo( + moduleName, + Path.of(sourcePath, source).toString(), + line, + className, + methodName, + methodDescriptor, + externalAccesses, + null, + null + ) + ); + } + } + } + } + + private String getPackageName(String className) { + return ClassDesc.ofInternalName(className).packageName(); + } + + @Override + public void visitParameter(String name, int access) { + if (name != null) super.visitParameter(name, access); + } + + @Override + public void visitLineNumber(int line, Label start) { + super.visitLineNumber(line, start); + this.line = line; + } + + @Override + public void visitEnd() { + super.visitEnd(); + } + } +} diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java index 91193d5fa6eaf..de2cb9042610b 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java @@ -9,13 +9,36 @@ package org.elasticsearch.simdvec; +import org.apache.lucene.util.BitUtil; +import org.apache.lucene.util.Constants; import org.elasticsearch.simdvec.internal.vectorization.ESVectorUtilSupport; import org.elasticsearch.simdvec.internal.vectorization.ESVectorizationProvider; +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; + import static org.elasticsearch.simdvec.internal.vectorization.ESVectorUtilSupport.B_QUERY; public class ESVectorUtil { + private static final MethodHandle BIT_COUNT_MH; + static { + try { + // For xorBitCount we stride over the values as either 64-bits (long) or 32-bits (int) at a time. + // On ARM Long::bitCount is not vectorized, and therefore produces less than optimal code, when + // compared to Integer::bitCount. While Long::bitCount is optimal on x64. See + // https://bugs.openjdk.org/browse/JDK-8336000 + BIT_COUNT_MH = Constants.OS_ARCH.equals("aarch64") + ? MethodHandles.lookup() + .findStatic(ESVectorUtil.class, "andBitCountInt", MethodType.methodType(int.class, byte[].class, byte[].class)) + : MethodHandles.lookup() + .findStatic(ESVectorUtil.class, "andBitCountLong", MethodType.methodType(int.class, byte[].class, byte[].class)); + } catch (NoSuchMethodException | IllegalAccessException e) { + throw new AssertionError(e); + } + } + private static final ESVectorUtilSupport IMPL = ESVectorizationProvider.getInstance().getVectorUtilSupport(); public static long ipByteBinByte(byte[] q, byte[] d) { @@ -24,4 +47,103 @@ public static long ipByteBinByte(byte[] q, byte[] d) { } return IMPL.ipByteBinByte(q, d); } + + /** + * Compute the inner product of two vectors, where the query vector is a byte vector and the document vector is a bit vector. + * This will return the sum of the query vector values using the document vector as a mask. + * @param q the query vector + * @param d the document vector + * @return the inner product of the two vectors + */ + public static int ipByteBit(byte[] q, byte[] d) { + if (q.length != d.length * Byte.SIZE) { + throw new IllegalArgumentException("vector dimensions incompatible: " + q.length + "!= " + Byte.SIZE + " x " + d.length); + } + int result = 0; + // now combine the two vectors, summing the byte dimensions where the bit in d is `1` + for (int i = 0; i < d.length; i++) { + byte mask = d[i]; + for (int j = 0; j < Byte.SIZE; j++) { + if ((mask & (1 << j)) != 0) { + result += q[i * Byte.SIZE + j]; + } + } + } + return result; + } + + /** + * Compute the inner product of two vectors, where the query vector is a float vector and the document vector is a bit vector. + * This will return the sum of the query vector values using the document vector as a mask. + * @param q the query vector + * @param d the document vector + * @return the inner product of the two vectors + */ + public static float ipFloatBit(float[] q, byte[] d) { + if (q.length != d.length * Byte.SIZE) { + throw new IllegalArgumentException("vector dimensions incompatible: " + q.length + "!= " + Byte.SIZE + " x " + d.length); + } + float result = 0; + for (int i = 0; i < d.length; i++) { + byte mask = d[i]; + for (int j = 0; j < Byte.SIZE; j++) { + if ((mask & (1 << j)) != 0) { + result += q[i * Byte.SIZE + j]; + } + } + } + return result; + } + + /** + * AND bit count computed over signed bytes. + * Copied from Lucene's XOR implementation + * @param a bytes containing a vector + * @param b bytes containing another vector, of the same dimension + * @return the value of the AND bit count of the two vectors + */ + public static int andBitCount(byte[] a, byte[] b) { + if (a.length != b.length) { + throw new IllegalArgumentException("vector dimensions differ: " + a.length + "!=" + b.length); + } + try { + return (int) BIT_COUNT_MH.invokeExact(a, b); + } catch (Throwable e) { + if (e instanceof Error err) { + throw err; + } else if (e instanceof RuntimeException re) { + throw re; + } else { + throw new RuntimeException(e); + } + } + } + + /** AND bit count striding over 4 bytes at a time. */ + static int andBitCountInt(byte[] a, byte[] b) { + int distance = 0, i = 0; + // limit to number of int values in the array iterating by int byte views + for (final int upperBound = a.length & -Integer.BYTES; i < upperBound; i += Integer.BYTES) { + distance += Integer.bitCount((int) BitUtil.VH_NATIVE_INT.get(a, i) & (int) BitUtil.VH_NATIVE_INT.get(b, i)); + } + // tail: + for (; i < a.length; i++) { + distance += Integer.bitCount((a[i] & b[i]) & 0xFF); + } + return distance; + } + + /** AND bit count striding over 8 bytes at a time**/ + static int andBitCountLong(byte[] a, byte[] b) { + int distance = 0, i = 0; + // limit to number of long values in the array iterating by long byte views + for (final int upperBound = a.length & -Long.BYTES; i < upperBound; i += Long.BYTES) { + distance += Long.bitCount((long) BitUtil.VH_NATIVE_LONG.get(a, i) & (long) BitUtil.VH_NATIVE_LONG.get(b, i)); + } + // tail: + for (; i < a.length; i++) { + distance += Integer.bitCount((a[i] & b[i]) & 0xFF); + } + return distance; + } } diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java index 0dbc41c0c1055..e9e0fd58f7638 100644 --- a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java @@ -21,6 +21,10 @@ public class ESVectorUtilTests extends BaseVectorizationTests { static final ESVectorizationProvider defaultedProvider = BaseVectorizationTests.defaultProvider(); static final ESVectorizationProvider defOrPanamaProvider = BaseVectorizationTests.maybePanamaProvider(); + public void testBitAndCount() { + testBasicBitAndImpl(ESVectorUtil::andBitCountLong); + } + public void testIpByteBinInvariants() { int iterations = atLeast(10); for (int i = 0; i < iterations; i++) { @@ -41,6 +45,23 @@ interface IpByteBin { long apply(byte[] q, byte[] d); } + interface BitOps { + long apply(byte[] q, byte[] d); + } + + void testBasicBitAndImpl(BitOps bitAnd) { + assertEquals(0, bitAnd.apply(new byte[] { 0 }, new byte[] { 0 })); + assertEquals(0, bitAnd.apply(new byte[] { 1 }, new byte[] { 0 })); + assertEquals(0, bitAnd.apply(new byte[] { 0 }, new byte[] { 1 })); + assertEquals(1, bitAnd.apply(new byte[] { 1 }, new byte[] { 1 })); + byte[] a = new byte[31]; + byte[] b = new byte[31]; + random().nextBytes(a); + random().nextBytes(b); + int expected = scalarBitAnd(a, b); + assertEquals(expected, bitAnd.apply(a, b)); + } + void testBasicIpByteBinImpl(IpByteBin ipByteBinFunc) { assertEquals(15L, ipByteBinFunc.apply(new byte[] { 1, 1, 1, 1 }, new byte[] { 1 })); assertEquals(30L, ipByteBinFunc.apply(new byte[] { 1, 2, 1, 2, 1, 2, 1, 2 }, new byte[] { 1, 2 })); @@ -115,6 +136,14 @@ static int scalarIpByteBin(byte[] q, byte[] d) { return res; } + static int scalarBitAnd(byte[] a, byte[] b) { + int res = 0; + for (int i = 0; i < a.length; i++) { + res += Integer.bitCount((a[i] & b[i]) & 0xFF); + } + return res; + } + public static int popcount(byte[] a, int aOffset, byte[] b, int length) { int res = 0; for (int j = 0; j < length; j++) { diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index f558ce8b9cfdb..a1ab6363166cb 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -49,4 +49,5 @@ dependencies { tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("aggregations/date_agg_per_day_of_week/Date aggregartion per day of week", "week-date behaviour has changed") task.skipTest("aggregations/time_series/Configure with no synthetic source", "temporary until backport") + task.skipTest("aggregations/percentiles_hdr_metric/Negative values test", "returned exception has changed") }) diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_hdr_metric.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_hdr_metric.yml index 6bf37425d9af4..8b67ba7056f37 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_hdr_metric.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/percentiles_hdr_metric.yml @@ -446,4 +446,4 @@ setup: - match: { aggregations.percentiles_int.values.75\.0: 101.0615234375 } - match: { aggregations.percentiles_int.values.95\.0: 151.1240234375 } - match: { aggregations.percentiles_int.values.99\.0: 151.1240234375 } - - match: { _shards.failures.0.reason.type: array_index_out_of_bounds_exception } + - match: { _shards.failures.0.reason.type: illegal_argument_exception } diff --git a/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidationPlugin.java b/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidationPlugin.java index bca90147c869d..c462dbdcf6c40 100644 --- a/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidationPlugin.java +++ b/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidationPlugin.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.MappedActionFilter; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; @@ -45,4 +46,9 @@ public Collection createComponents(PluginServices services) { public Collection getMappedActionFilters() { return actionFilters.get(); } + + @Override + public List> getSettings() { + return List.of(DotPrefixValidator.VALIDATE_DOT_PREFIXES, DotPrefixValidator.IGNORED_INDEX_PATTERNS_SETTING); + } } diff --git a/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java b/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java index fc8d701b953f6..7f65bbb6937d9 100644 --- a/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java +++ b/modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java @@ -23,9 +23,12 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.tasks.Task; +import java.util.List; import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; +import java.util.stream.Collectors; /** * DotPrefixValidator provides an abstract class implementing a mapped action filter. @@ -39,7 +42,7 @@ * method, which subclasses must implement. * * Some built-in index names and patterns are also elided from the check, as defined in - * {@link #IGNORED_INDEX_NAMES} and {@link #IGNORED_INDEX_PATTERNS}. + * {@link #IGNORED_INDEX_NAMES} and {@link #IGNORED_INDEX_PATTERNS_SETTING}. */ public abstract class DotPrefixValidator implements MappedActionFilter { public static final Setting VALIDATE_DOT_PREFIXES = Setting.boolSetting( @@ -64,20 +67,43 @@ public abstract class DotPrefixValidator implements MappedActionFil ".ml-state", ".ml-anomalies-unrelated" ); - private static Set IGNORED_INDEX_PATTERNS = Set.of( - Pattern.compile("\\.ml-state-\\d+"), - Pattern.compile("\\.slo-observability\\.sli-v\\d+.*"), - Pattern.compile("\\.slo-observability\\.summary-v\\d+.*") + public static Setting> IGNORED_INDEX_PATTERNS_SETTING = Setting.stringListSetting( + "cluster.indices.validate_ignored_dot_patterns", + List.of( + "\\.ml-state-\\d+", + "\\.slo-observability\\.sli-v\\d+.*", + "\\.slo-observability\\.summary-v\\d+.*", + "\\.entities\\.v\\d+\\.latest\\..*" + ), + (patternList) -> patternList.forEach(pattern -> { + try { + Pattern.compile(pattern); + } catch (PatternSyntaxException e) { + throw new IllegalArgumentException("invalid dot validation exception pattern: [" + pattern + "]", e); + } + }), + Setting.Property.NodeScope, + Setting.Property.Dynamic ); DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DotPrefixValidator.class); private final ThreadContext threadContext; private final boolean isEnabled; + private volatile Set ignoredIndexPatterns; public DotPrefixValidator(ThreadContext threadContext, ClusterService clusterService) { this.threadContext = threadContext; this.isEnabled = VALIDATE_DOT_PREFIXES.get(clusterService.getSettings()); + this.ignoredIndexPatterns = IGNORED_INDEX_PATTERNS_SETTING.get(clusterService.getSettings()) + .stream() + .map(Pattern::compile) + .collect(Collectors.toSet()); + clusterService.getClusterSettings().addSettingsUpdateConsumer(IGNORED_INDEX_PATTERNS_SETTING, this::updateIgnoredIndexPatterns); + } + + private void updateIgnoredIndexPatterns(List patterns) { + this.ignoredIndexPatterns = patterns.stream().map(Pattern::compile).collect(Collectors.toSet()); } protected abstract Set getIndicesFromRequest(RequestType request); @@ -108,7 +134,7 @@ void validateIndices(@Nullable Set indices) { if (IGNORED_INDEX_NAMES.contains(strippedName)) { return; } - if (IGNORED_INDEX_PATTERNS.stream().anyMatch(p -> p.matcher(strippedName).matches())) { + if (this.ignoredIndexPatterns.stream().anyMatch(p -> p.matcher(strippedName).matches())) { return; } deprecationLogger.warn( diff --git a/modules/dot-prefix-validation/src/test/java/org/elasticsearch/validation/DotPrefixValidatorTests.java b/modules/dot-prefix-validation/src/test/java/org/elasticsearch/validation/DotPrefixValidatorTests.java index 9adb33d51f510..6953f1cfc63df 100644 --- a/modules/dot-prefix-validation/src/test/java/org/elasticsearch/validation/DotPrefixValidatorTests.java +++ b/modules/dot-prefix-validation/src/test/java/org/elasticsearch/validation/DotPrefixValidatorTests.java @@ -10,8 +10,8 @@ package org.elasticsearch.validation; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; @@ -19,7 +19,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.junit.BeforeClass; -import java.util.HashSet; +import java.util.ArrayList; +import java.util.List; import java.util.Set; import static org.mockito.Mockito.mock; @@ -28,23 +29,24 @@ public class DotPrefixValidatorTests extends ESTestCase { private final OperatorValidator opV = new OperatorValidator<>(); private final NonOperatorValidator nonOpV = new NonOperatorValidator<>(); - private static final Set> settings; private static ClusterService clusterService; - private static ClusterSettings clusterSettings; - - static { - Set> cSettings = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - cSettings.add(DotPrefixValidator.VALIDATE_DOT_PREFIXES); - settings = cSettings; - } @BeforeClass public static void beforeClass() { + List allowed = new ArrayList<>(DotPrefixValidator.IGNORED_INDEX_PATTERNS_SETTING.getDefault(Settings.EMPTY)); + // Add a new allowed pattern for testing + allowed.add("\\.potato\\d+"); + Settings settings = Settings.builder() + .put(DotPrefixValidator.IGNORED_INDEX_PATTERNS_SETTING.getKey(), Strings.collectionToCommaDelimitedString(allowed)) + .build(); clusterService = mock(ClusterService.class); - clusterSettings = new ClusterSettings(Settings.EMPTY, Sets.newHashSet(DotPrefixValidator.VALIDATE_DOT_PREFIXES)); + ClusterSettings clusterSettings = new ClusterSettings( + settings, + Sets.newHashSet(DotPrefixValidator.VALIDATE_DOT_PREFIXES, DotPrefixValidator.IGNORED_INDEX_PATTERNS_SETTING) + ); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); - when(clusterService.getSettings()).thenReturn(Settings.EMPTY); + when(clusterService.getSettings()).thenReturn(settings); when(clusterService.threadPool()).thenReturn(mock(ThreadPool.class)); } @@ -74,18 +76,25 @@ public void testValidation() { nonOpV.validateIndices(Set.of(".slo-observability.summary-v2.3")); nonOpV.validateIndices(Set.of(".slo-observability.summary-v2.3-2024-01-01")); nonOpV.validateIndices(Set.of("<.slo-observability.summary-v3.3.{2024-10-16||/M{yyyy-MM-dd|UTC}}>")); + nonOpV.validateIndices(Set.of(".entities.v1.latest.builtin_services_from_ecs_data")); + nonOpV.validateIndices(Set.of(".entities.v92.latest.eggplant.potato")); + nonOpV.validateIndices(Set.of("<.entities.v12.latest.eggplant-{M{yyyy-MM-dd|UTC}}>")); + + // Test pattern added to the settings + nonOpV.validateIndices(Set.of(".potato5")); + nonOpV.validateIndices(Set.of("<.potato5>")); } private void assertFails(Set indices) { nonOpV.validateIndices(indices); assertWarnings( "Index [" - + indices.stream().filter(i -> i.startsWith(".") || i.startsWith("<.")).toList().getFirst() + + indices.stream().filter(i -> i.startsWith(".") || i.startsWith("<.")).toList().get(0) + "] name begins with a dot (.), which is deprecated, and will not be allowed in a future Elasticsearch version." ); } - private class NonOperatorValidator extends DotPrefixValidator { + private static class NonOperatorValidator extends DotPrefixValidator { private NonOperatorValidator() { super(new ThreadContext(Settings.EMPTY), clusterService); @@ -107,7 +116,7 @@ boolean isInternalRequest() { } } - private class OperatorValidator extends NonOperatorValidator { + private static class OperatorValidator extends NonOperatorValidator { @Override boolean isInternalRequest() { return true; diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index f99f8dbe2fdd0..9e0392b2b7974 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -36,8 +36,6 @@ public final class GeoIpProcessor extends AbstractProcessor { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(GeoIpProcessor.class); - static final String DEFAULT_DATABASES_DEPRECATION_MESSAGE = "the [fallback_to_default_databases] has been deprecated, because " - + "Elasticsearch no longer includes the default Maxmind geoip databases. This setting will be removed in Elasticsearch 9.0"; static final String UNSUPPORTED_DATABASE_DEPRECATION_MESSAGE = "the geoip processor will no longer support database type [{}] " + "in a future version of Elasticsearch"; // TODO add a message about migration? @@ -241,12 +239,6 @@ public Processor create( // validate (and consume) the download_database_on_pipeline_creation property even though the result is not used by the factory readBooleanProperty(type, processorTag, config, "download_database_on_pipeline_creation", true); - // noop, should be removed in 9.0 - Object value = config.remove("fallback_to_default_databases"); - if (value != null) { - deprecationLogger.warn(DeprecationCategory.OTHER, "default_databases_message", DEFAULT_DATABASES_DEPRECATION_MESSAGE); - } - final String databaseType; try (IpDatabase ipDatabase = ipDatabaseProvider.getDatabase(databaseFile)) { if (ipDatabase == null) { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 5ac0c76054d33..34003b79fc18b 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -473,15 +473,6 @@ public void testLoadingCustomDatabase() throws IOException { threadPool.shutdown(); } - public void testFallbackUsingDefaultDatabases() throws Exception { - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(GEOIP_TYPE, databaseNodeService); - Map config = new HashMap<>(); - config.put("field", "source_field"); - config.put("fallback_to_default_databases", randomBoolean()); - factory.create(null, null, null, config); - assertWarnings(GeoIpProcessor.DEFAULT_DATABASES_DEPRECATION_MESSAGE); - } - public void testDownloadDatabaseOnPipelineCreation() throws IOException { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(GEOIP_TYPE, databaseNodeService); Map config = new HashMap<>(); diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index 0e8195d4d48ce..b01c9201dcff1 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -50,10 +50,14 @@ tasks.named("dependencyLicenses").configure { mapping from: /asm-.*/, to: 'asm' } +tasks.named("yamlRestCompatTestTransform").configure({ task -> + task.skipTest("painless/146_dense_vector_bit_basic/Dot Product is not supported", "inner product is now supported") +}) + restResources { restApi { include '_common', 'cluster', 'nodes', 'indices', 'index', 'search', 'get', 'bulk', 'update', - 'scripts_painless_execute', 'put_script', 'delete_script' + 'scripts_painless_execute', 'put_script', 'delete_script', 'capabilities' } } diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml index 4c195a0e32623..2ee38f849e9d4 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml @@ -101,9 +101,15 @@ setup: - match: {hits.hits.2._id: "3"} - close_to: {hits.hits.2._score: {value: 3.4641016, error: 0.01}} - --- "Dot Product is not supported": + - skip: + features: [capabilities] + capabilities: + - method: POST + path: /_search + capabilities: [ byte_float_bit_dot_product ] + reason: Capability required to run test - do: catch: bad_request headers: @@ -131,7 +137,6 @@ setup: source: "dotProduct(params.query_vector, 'vector')" params: query_vector: "006ff30e84" - --- "Cosine Similarity is not supported": - do: @@ -388,3 +393,119 @@ setup: - match: {hits.hits.2._id: "3"} - match: {hits.hits.2._score: 11.0} +--- +"Dot product with float": + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ byte_float_bit_dot_product ] + test_runner_features: [capabilities, close_to] + reason: Capability required to run test + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: { match_all: { } } + script: + source: "dotProduct(params.query_vector, 'vector')" + params: + query_vector: [0.23, 1.45, 3.67, 4.89, -0.56, 2.34, 3.21, 1.78, -2.45, 0.98, -0.12, 3.45, 4.56, 2.78, 1.23, 0.67, 3.89, 4.12, -2.34, 1.56, 0.78, 3.21, 4.12, 2.45, -1.67, 0.34, -3.45, 4.56, -2.78, 1.23, -0.67, 3.89, -4.34, 2.12, -1.56, 0.78, -3.21, 4.45, 2.12, 1.67] + + - match: { hits.total: 3 } + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 35.999, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score:{value: 27.23, error: 0.01}} + + - match: {hits.hits.2._id: "1"} + - close_to: {hits.hits.2._score: {value: 16.57, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: { match_all: { } } + script: + source: "dotProduct(params.query_vector, 'indexed_vector')" + params: + query_vector: [0.23, 1.45, 3.67, 4.89, -0.56, 2.34, 3.21, 1.78, -2.45, 0.98, -0.12, 3.45, 4.56, 2.78, 1.23, 0.67, 3.89, 4.12, -2.34, 1.56, 0.78, 3.21, 4.12, 2.45, -1.67, 0.34, -3.45, 4.56, -2.78, 1.23, -0.67, 3.89, -4.34, 2.12, -1.56, 0.78, -3.21, 4.45, 2.12, 1.67] + + - match: { hits.total: 3 } + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 35.999, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score:{value: 27.23, error: 0.01}} + + - match: {hits.hits.2._id: "1"} + - close_to: {hits.hits.2._score: {value: 16.57, error: 0.01}} +--- +"Dot product with byte": + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ byte_float_bit_dot_product ] + test_runner_features: capabilities + reason: Capability required to run test + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: { match_all: { } } + script: + source: "dotProduct(params.query_vector, 'vector')" + params: + query_vector: [12, -34, 56, -78, 90, 12, 34, -56, 78, -90, 23, -45, 67, -89, 12, 34, 56, 78, 90, -12, 34, -56, 78, -90, 23, -45, 67, -89, 12, -34, 56, -78, 90, -12, 34, -56, 78, 90, 23, -45] + + - match: { hits.total: 3 } + + - match: {hits.hits.0._id: "1"} + - match: {hits.hits.0._score: 248} + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1._score: 136} + + - match: {hits.hits.2._id: "3"} + - match: {hits.hits.2._score: 20} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: { match_all: { } } + script: + source: "dotProduct(params.query_vector, 'indexed_vector')" + params: + query_vector: [12, -34, 56, -78, 90, 12, 34, -56, 78, -90, 23, -45, 67, -89, 12, 34, 56, 78, 90, -12, 34, -56, 78, -90, 23, -45, 67, -89, 12, -34, 56, -78, 90, -12, 34, -56, 78, 90, 23, -45] + + - match: { hits.total: 3 } + + - match: {hits.hits.0._id: "1"} + - match: {hits.hits.0._score: 248} + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1._score: 136} + + - match: {hits.hits.2._id: "3"} + - match: {hits.hits.2._score: 20} diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 9757d3af861a9..e13cc40dd3e0f 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -897,8 +897,13 @@ public void compareAndExchangeRegister( final var clientReference = blobStore.clientReference(); ActionListener.run(ActionListener.releaseAfter(listener.delegateResponse((delegate, e) -> { logger.trace(() -> Strings.format("[%s]: compareAndExchangeRegister failed", key), e); - if (e instanceof AmazonS3Exception amazonS3Exception && amazonS3Exception.getStatusCode() == 404) { - // an uncaught 404 means that our multipart upload was aborted by a concurrent operation before we could complete it + if (e instanceof AmazonS3Exception amazonS3Exception + && (amazonS3Exception.getStatusCode() == 404 + || amazonS3Exception.getStatusCode() == 0 && "NoSuchUpload".equals(amazonS3Exception.getErrorCode()))) { + // An uncaught 404 means that our multipart upload was aborted by a concurrent operation before we could complete it. + // Also (rarely) S3 can start processing the request during a concurrent abort and this can result in a 200 OK with an + // NoSuchUpload... in the response, which the SDK translates to status code 0. Either way, this means + // that our write encountered contention: delegate.onResponse(OptionalBytesReference.MISSING); } else { delegate.onFailure(e); diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java index afe1bb1a03c76..8bf0c983ea74a 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.AmazonWebServiceRequest; +import com.amazonaws.retry.PredefinedRetryPolicies; import com.amazonaws.services.s3.model.AmazonS3Exception; import org.elasticsearch.cluster.metadata.RepositoryMetadata; @@ -19,6 +20,7 @@ import java.io.IOException; +import static org.hamcrest.CoreMatchers.equalTo; import static org.mockito.Mockito.mock; public class S3ServiceTests extends ESTestCase { @@ -47,19 +49,33 @@ public void testRetryOn403RetryPolicy() { e.setStatusCode(403); e.setErrorCode("InvalidAccessKeyId"); - // Retry on 403 invalid access key id + // AWS default retry condition does not retry on 403 + assertFalse(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION.shouldRetry(mock(AmazonWebServiceRequest.class), e, between(0, 9))); + + // The retryable 403 condition retries on 403 invalid access key id assertTrue( S3Service.RETRYABLE_403_RETRY_POLICY.getRetryCondition().shouldRetry(mock(AmazonWebServiceRequest.class), e, between(0, 9)) ); - // Not retry if not 403 or not invalid access key id if (randomBoolean()) { + // Random for another error status that is not 403 e.setStatusCode(randomValueOtherThan(403, () -> between(0, 600))); + // Retryable 403 condition delegates to the AWS default retry condition. Its result must be consistent with the decision + // by the AWS default, e.g. some error status like 429 is retryable by default, the retryable 403 condition respects it. + boolean actual = S3Service.RETRYABLE_403_RETRY_POLICY.getRetryCondition() + .shouldRetry(mock(AmazonWebServiceRequest.class), e, between(0, 9)); + boolean expected = PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION.shouldRetry( + mock(AmazonWebServiceRequest.class), + e, + between(0, 9) + ); + assertThat(actual, equalTo(expected)); } else { + // Not retry for 403 with error code that is not invalid access key id e.setErrorCode(randomAlphaOfLength(10)); + assertFalse( + S3Service.RETRYABLE_403_RETRY_POLICY.getRetryCondition().shouldRetry(mock(AmazonWebServiceRequest.class), e, between(0, 9)) + ); } - assertFalse( - S3Service.RETRYABLE_403_RETRY_POLICY.getRetryCondition().shouldRetry(mock(AmazonWebServiceRequest.class), e, between(0, 9)) - ); } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java index 3bec37c0997db..8fdb7051e2be6 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java @@ -14,10 +14,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.network.ThreadWatchdog; -import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasables; import org.elasticsearch.transport.InboundPipeline; import org.elasticsearch.transport.Transports; @@ -52,9 +50,8 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception final ByteBuf buffer = (ByteBuf) msg; Netty4TcpChannel channel = ctx.channel().attr(Netty4Transport.CHANNEL_KEY).get(); - final BytesReference wrapped = Netty4Utils.toBytesReference(buffer); activityTracker.startActivity(); - try (ReleasableBytesReference reference = new ReleasableBytesReference(wrapped, new ByteBufRefCounted(buffer))) { + try (ReleasableBytesReference reference = Netty4Utils.toReleasableBytesReference(buffer)) { pipeline.handleBytes(channel, reference); } finally { activityTracker.stopActivity(); @@ -81,35 +78,4 @@ public void channelInactive(ChannelHandlerContext ctx) throws Exception { super.channelInactive(ctx); } - private record ByteBufRefCounted(ByteBuf buffer) implements RefCounted { - - @Override - public void incRef() { - buffer.retain(); - } - - @Override - public boolean tryIncRef() { - if (hasReferences() == false) { - return false; - } - try { - buffer.retain(); - } catch (RuntimeException e) { - assert hasReferences() == false; - return false; - } - return true; - } - - @Override - public boolean decRef() { - return buffer.release(); - } - - @Override - public boolean hasReferences() { - return buffer.refCnt() > 0; - } - } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index f57aa0e680fa1..459b6c77be8c3 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.http.HttpBody; import org.elasticsearch.transport.TransportException; @@ -130,8 +131,51 @@ public static BytesReference toBytesReference(final ByteBuf buffer) { } } + /** + * Wrap Netty's {@link ByteBuf} into {@link ReleasableBytesReference} and delegating reference count to ByteBuf. + */ public static ReleasableBytesReference toReleasableBytesReference(final ByteBuf buffer) { - return new ReleasableBytesReference(toBytesReference(buffer), buffer::release); + return new ReleasableBytesReference(toBytesReference(buffer), toRefCounted(buffer)); + } + + static ByteBufRefCounted toRefCounted(final ByteBuf buf) { + return new ByteBufRefCounted(buf); + } + + record ByteBufRefCounted(ByteBuf buffer) implements RefCounted { + + public int refCnt() { + return buffer.refCnt(); + } + + @Override + public void incRef() { + buffer.retain(); + } + + @Override + public boolean tryIncRef() { + if (hasReferences() == false) { + return false; + } + try { + buffer.retain(); + } catch (RuntimeException e) { + assert hasReferences() == false; + return false; + } + return true; + } + + @Override + public boolean decRef() { + return buffer.release(); + } + + @Override + public boolean hasReferences() { + return buffer.refCnt() > 0; + } } public static HttpBody.Full fullHttpBodyFrom(final ByteBuf buf) { diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java index 5676ef6dfc5ee..3844953628777 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java @@ -11,6 +11,7 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.CompositeByteBuf; +import io.netty.buffer.PooledByteBufAllocator; import io.netty.buffer.Unpooled; import org.apache.lucene.util.BytesRef; @@ -68,6 +69,43 @@ public void testToChannelBuffer() throws IOException { assertArrayEquals(BytesReference.toBytes(ref), BytesReference.toBytes(bytesReference)); } + /** + * Test that wrapped reference counted object from netty reflects correct counts in ES RefCounted + */ + public void testToRefCounted() { + var buf = PooledByteBufAllocator.DEFAULT.buffer(1); + assertEquals(1, buf.refCnt()); + + var refCounted = Netty4Utils.toRefCounted(buf); + assertEquals(1, refCounted.refCnt()); + + buf.retain(); + assertEquals(2, refCounted.refCnt()); + + refCounted.incRef(); + assertEquals(3, refCounted.refCnt()); + assertEquals(buf.refCnt(), refCounted.refCnt()); + + refCounted.decRef(); + assertEquals(2, refCounted.refCnt()); + assertEquals(buf.refCnt(), refCounted.refCnt()); + assertTrue(refCounted.hasReferences()); + + refCounted.decRef(); + refCounted.decRef(); + assertFalse(refCounted.hasReferences()); + } + + /** + * Ensures that released ByteBuf cannot be accessed from ReleasableBytesReference + */ + public void testToReleasableBytesReferenceThrowOnByteBufRelease() { + var buf = PooledByteBufAllocator.DEFAULT.buffer(1); + var relBytes = Netty4Utils.toReleasableBytesReference(buf); + buf.release(); + assertThrows(AssertionError.class, () -> relBytes.get(0)); + } + private BytesReference getRandomizedBytesReference(int length) throws IOException { // we know bytes stream output always creates a paged bytes reference, we use it to create randomized content ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(length, bigarrays); diff --git a/muted-tests.yml b/muted-tests.yml index 3a471ad5d12d2..97d33b3b14b8f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -2,15 +2,9 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/esql/esql-async-query-api/line_17} issue: https://github.com/elastic/elasticsearch/issues/109260 -- class: "org.elasticsearch.xpack.ml.integration.ClassificationHousePricingIT" - issue: "https://github.com/elastic/elasticsearch/issues/101598" - method: "testFeatureImportanceValues" - class: "org.elasticsearch.client.RestClientSingleHostIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/102717" method: "testRequestResetAndAbort" -- class: "org.elasticsearch.xpack.deprecation.DeprecationHttpIT" - issue: "https://github.com/elastic/elasticsearch/issues/108628" - method: "testDeprecatedSettingsReturnWarnings" - class: "org.elasticsearch.xpack.searchablesnapshots.FrozenSearchableSnapshotsIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/110408" method: "testCreateAndRestorePartialSearchableSnapshot" @@ -29,9 +23,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/inference_processor/Test create processor with missing mandatory fields} issue: https://github.com/elastic/elasticsearch/issues/112191 -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/rest-api/watcher/put-watch/line_120} - issue: https://github.com/elastic/elasticsearch/issues/99517 - class: org.elasticsearch.xpack.esql.action.ManyShardsIT method: testRejection issue: https://github.com/elastic/elasticsearch/issues/112406 @@ -85,15 +76,9 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=mtermvectors/10_basic/Tests catching other exceptions per item} issue: https://github.com/elastic/elasticsearch/issues/113325 -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/ccr/apis/follow/post-resume-follow/line_84} - issue: https://github.com/elastic/elasticsearch/issues/113343 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=search/500_date_range/from, to, include_lower, include_upper deprecated} issue: https://github.com/elastic/elasticsearch/pull/113286 -- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT - method: test {categorize.Categorize} - issue: https://github.com/elastic/elasticsearch/issues/113428 - class: org.elasticsearch.integration.KibanaUserRoleIntegTests method: testFieldMappings issue: https://github.com/elastic/elasticsearch/issues/113592 @@ -103,18 +88,9 @@ tests: - class: org.elasticsearch.xpack.transform.integration.TransformIT method: testStopWaitForCheckpoint issue: https://github.com/elastic/elasticsearch/issues/106113 -- class: org.elasticsearch.smoketest.MlWithSecurityIT - method: test {yaml=ml/3rd_party_deployment/Test start and stop multiple deployments} - issue: https://github.com/elastic/elasticsearch/issues/101458 -- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT - method: test {categorize.Categorize ASYNC} - issue: https://github.com/elastic/elasticsearch/issues/113721 - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT method: test {categorize.Categorize SYNC} issue: https://github.com/elastic/elasticsearch/issues/113722 -- class: org.elasticsearch.threadpool.SimpleThreadPoolIT - method: testThreadPoolMetrics - issue: https://github.com/elastic/elasticsearch/issues/108320 - class: org.elasticsearch.kibana.KibanaThreadPoolIT method: testBlockedThreadPoolsRejectUserRequests issue: https://github.com/elastic/elasticsearch/issues/113939 @@ -130,21 +106,12 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceCrudIT method: testGet issue: https://github.com/elastic/elasticsearch/issues/114135 -- class: org.elasticsearch.xpack.inference.integration.ModelRegistryIT - method: testGetModel - issue: https://github.com/elastic/elasticsearch/issues/114657 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/usage/line_38} issue: https://github.com/elastic/elasticsearch/issues/113694 -- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT - method: testEveryActionIsEitherOperatorOnlyOrNonOperator - issue: https://github.com/elastic/elasticsearch/issues/102992 - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT method: testTracingCrossCluster issue: https://github.com/elastic/elasticsearch/issues/112731 -- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT - method: testInferDeploysDefaultElser - issue: https://github.com/elastic/elasticsearch/issues/114913 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry)} issue: https://github.com/elastic/elasticsearch/issues/115231 @@ -175,9 +142,6 @@ tests: - class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT method: testGeoShapeGeoHex issue: https://github.com/elastic/elasticsearch/issues/115705 -- class: org.elasticsearch.xpack.core.ml.calendars.ScheduledEventTests - method: testBuild_SucceedsWithDefaultSkipResultAndSkipModelUpdatesValues - issue: https://github.com/elastic/elasticsearch/issues/115476 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_start_stop/Verify start transform reuses destination index} issue: https://github.com/elastic/elasticsearch/issues/115808 @@ -187,9 +151,6 @@ tests: - class: org.elasticsearch.search.StressSearchServiceReaperIT method: testStressReaper issue: https://github.com/elastic/elasticsearch/issues/115816 -- class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests - method: testProcessFileChanges - issue: https://github.com/elastic/elasticsearch/issues/115280 - class: org.elasticsearch.search.SearchServiceTests method: testWaitOnRefreshTimeout issue: https://github.com/elastic/elasticsearch/issues/115935 @@ -199,12 +160,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/inference_crud/Test delete given model referenced by pipeline} issue: https://github.com/elastic/elasticsearch/issues/115970 -- class: org.elasticsearch.monitor.jvm.JvmStatsTests - method: testJvmStats - issue: https://github.com/elastic/elasticsearch/issues/115711 -- class: org.elasticsearch.repositories.s3.S3ServiceTests - method: testRetryOn403RetryPolicy - issue: https://github.com/elastic/elasticsearch/issues/115986 - class: org.elasticsearch.search.slice.SearchSliceIT method: testPointInTime issue: https://github.com/elastic/elasticsearch/issues/115988 @@ -226,14 +181,8 @@ tests: - class: org.elasticsearch.search.functionscore.QueryRescorerIT method: testScoring issue: https://github.com/elastic/elasticsearch/issues/116050 -- class: org.elasticsearch.indexing.IndexActionIT - method: testAutoGenerateIdNoDuplicates - issue: https://github.com/elastic/elasticsearch/issues/115716 - class: org.elasticsearch.xpack.application.connector.ConnectorIndexServiceTests issue: https://github.com/elastic/elasticsearch/issues/116087 -- class: org.elasticsearch.compute.operator.FilterOperatorTests - method: testSimpleDescription - issue: https://github.com/elastic/elasticsearch/issues/116094 - class: org.elasticsearch.xpack.searchbusinessrules.PinnedQueryBuilderIT method: testPinnedPromotions issue: https://github.com/elastic/elasticsearch/issues/116097 @@ -258,9 +207,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/inference_crud/Test delete given model with alias referenced by pipeline} issue: https://github.com/elastic/elasticsearch/issues/116133 -- class: org.elasticsearch.xpack.inference.InferenceRestIT - method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} - issue: https://github.com/elastic/elasticsearch/issues/114412 - class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT method: test {categorize.Categorize SYNC} issue: https://github.com/elastic/elasticsearch/issues/113054 @@ -285,6 +231,63 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=terms_enum/10_basic/Test security} issue: https://github.com/elastic/elasticsearch/issues/116178 +- class: org.elasticsearch.search.basic.SearchWithRandomDisconnectsIT + method: testSearchWithRandomDisconnects + issue: https://github.com/elastic/elasticsearch/issues/116175 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/start_stop_datafeed/Test start datafeed given index pattern with no matching indices} + issue: https://github.com/elastic/elasticsearch/issues/116220 +- class: org.elasticsearch.search.basic.SearchWhileRelocatingIT + method: testSearchAndRelocateConcurrentlyRandomReplicas + issue: https://github.com/elastic/elasticsearch/issues/116145 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/filter_crud/Test update filter} + issue: https://github.com/elastic/elasticsearch/issues/116271 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/get_datafeeds/Test explicit get all datafeeds} + issue: https://github.com/elastic/elasticsearch/issues/116284 +- class: org.elasticsearch.xpack.deprecation.DeprecationHttpIT + method: testDeprecatedSettingsReturnWarnings + issue: https://github.com/elastic/elasticsearch/issues/108628 +- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT + method: testEveryActionIsEitherOperatorOnlyOrNonOperator + issue: https://github.com/elastic/elasticsearch/issues/102992 +- class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests + method: testBottomFieldSort + issue: https://github.com/elastic/elasticsearch/issues/116249 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set} + issue: https://github.com/elastic/elasticsearch/issues/116332 +- class: org.elasticsearch.xpack.shutdown.NodeShutdownIT + method: testAllocationPreventedForRemoval + issue: https://github.com/elastic/elasticsearch/issues/116363 +- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT + method: test {categorize.Categorize ASYNC} + issue: https://github.com/elastic/elasticsearch/issues/116373 +- class: org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsIntegTests + method: testCreateAndRestoreSearchableSnapshot + issue: https://github.com/elastic/elasticsearch/issues/116377 +- class: org.elasticsearch.threadpool.SimpleThreadPoolIT + method: testThreadPoolMetrics + issue: https://github.com/elastic/elasticsearch/issues/108320 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/jobs_crud/Test put job deprecated bucket span} + issue: https://github.com/elastic/elasticsearch/issues/116419 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/explain_data_frame_analytics/Test both job id and body} + issue: https://github.com/elastic/elasticsearch/issues/116433 +- class: org.elasticsearch.smoketest.MlWithSecurityIT + method: test {yaml=ml/inference_crud/Test force delete given model with alias referenced by pipeline} + issue: https://github.com/elastic/elasticsearch/issues/116443 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry) non-snapshot version} + issue: https://github.com/elastic/elasticsearch/issues/116448 +- class: org.elasticsearch.xpack.downsample.ILMDownsampleDisruptionIT + method: testILMDownsampleRollingRestart + issue: https://github.com/elastic/elasticsearch/issues/114233 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=ml/data_frame_analytics_crud/Test put config with unknown field in outlier detection analysis} + issue: https://github.com/elastic/elasticsearch/issues/116458 # Examples: # diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index 23d7af7603d56..f3fd57f3fc8ae 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -61,6 +61,9 @@ excludeList.add('cluster.desired_nodes/20_dry_run/Test validation works for dry // Excluded because they create dot-prefixed indices on older versions excludeList.add('indices.resolve_index/20_resolve_system_index/*') +// Excluded because the error has changed +excludeList.add('aggregations/percentiles_hdr_metric/Negative values test') + BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> if (bwcVersion != VersionProperties.getElasticsearchVersion()) { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 3532e08e8f659..8e1df37804708 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -57,11 +57,6 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") - task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") - task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") - task.skipTest("indices.create/21_synthetic_source_stored/index param - nested array within array - disabled second pass", "temporary until backported") - task.skipTest("indices.create/21_synthetic_source_stored/index param - root arrays", "temporary until backported") - task.skipTest("indices.create/21_synthetic_source_stored/object param - nested object with stored array", "temporary until backported") task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") task.skipTest("cat.shards/10_basic/Help", "sync_id is removed in 9.0") }) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json index 08dc7128234b9..0a2f7b33498cf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_data_lifecycle.json @@ -7,7 +7,8 @@ "stability":"stable", "visibility":"public", "headers":{ - "accept": [ "application/json"] + "accept": [ "application/json"], + "content_type": ["application/json"] }, "url": { "paths": [ diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/200_dense_vector_docvalue_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/200_dense_vector_docvalue_fields.yml new file mode 100644 index 0000000000000..161fc23a84651 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/200_dense_vector_docvalue_fields.yml @@ -0,0 +1,163 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ dense_vector_docvalue_fields ] + test_runner_features: [ capabilities, close_to ] + reason: Capability required to run test + - do: + indices.create: + index: test + body: + mappings: + properties: + name: + type: keyword + vector1: + type: dense_vector + element_type: float + dims: 5 + index: true + vector2: + type: dense_vector + element_type: float + dims: 5 + index: false + vector3: + type: dense_vector + element_type: byte + dims: 5 + index: true + vector4: + type: dense_vector + element_type: byte + dims: 5 + index: false + vector5: + type: dense_vector + element_type: bit + dims: 40 + index: true + vector6: + type: dense_vector + element_type: bit + dims: 40 + index: false + - do: + index: + index: test + id: "1" + body: + name: cow.jpg + vector1: [230.0, 300.33, -34.8988, 15.555, -200.0] + vector2: [130.0, 115.0, -1.02, 15.555, -100.0] + vector3: [-1, 100, -13, 15, -128] + vector4: [-1, 50, -1, 1, 120] + vector5: [1, 111, -13, 15, -128] + vector6: [-1, 11, 0, 12, 111] + - do: + index: + index: test + id: "2" + body: + name: moose.jpg + vector1: [-0.5, 100.0, -13, 14.8, -156.0] + vector4: [-1, 50, -1, 1, 120] + vector5: [1, 111, -13, 15, -128] + vector6: null + - do: + index: + index: test + id: "3" + body: + name: rabbit.jpg + vector2: [130.0, 115.0, -1.02, 15.555, -100.0] + vector3: [-1, 100, -13, 15, -128] + + - do: + indices.refresh: {} + +--- +"Enable docvalue_fields parameter for dense_vector fields": + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ dense_vector_docvalue_fields ] + test_runner_features: capabilities + reason: "Support for dense vector doc value fields capability required" + - do: + search: + _source: false + index: test + body: + docvalue_fields: [name, vector1, vector2, vector3, vector4, vector5, vector6] + sort: name + + + - match: {hits.hits.0._id: "1"} + - match: {hits.hits.0.fields.name.0: "cow.jpg"} + + - length: {hits.hits.0.fields.vector1.0: 5} + - length: {hits.hits.0.fields.vector2.0: 5} + - length: {hits.hits.0.fields.vector3.0: 5} + - length: {hits.hits.0.fields.vector4.0: 5} + - length: {hits.hits.0.fields.vector5.0: 5} + - length: {hits.hits.0.fields.vector6.0: 5} + + - close_to: { hits.hits.0.fields.vector1.0.0: { value: 230.0, error: 0.001 } } + - close_to: { hits.hits.0.fields.vector1.0.1: { value: 300.33, error: 0.001 } } + - close_to: { hits.hits.0.fields.vector1.0.2: { value: -34.8988, error: 0.001 } } + - close_to: { hits.hits.0.fields.vector1.0.3: { value: 15.555, error: 0.001 } } + - close_to: { hits.hits.0.fields.vector1.0.4: { value: -200.0, error: 0.001 } } + + - close_to: { hits.hits.0.fields.vector2.0.0: { value: 130.0, error: 0.001 } } + - close_to: { hits.hits.0.fields.vector2.0.1: { value: 115.0, error: 0.001 } } + - close_to: { hits.hits.0.fields.vector2.0.2: { value: -1.02, error: 0.001 } } + - close_to: { hits.hits.0.fields.vector2.0.3: { value: 15.555, error: 0.001 } } + - close_to: { hits.hits.0.fields.vector2.0.4: { value: -100.0, error: 0.001 } } + + - match: {hits.hits.0.fields.vector3.0: [-1, 100, -13, 15, -128]} + - match: {hits.hits.0.fields.vector4.0: [-1, 50, -1, 1, 120]} + - match: {hits.hits.0.fields.vector5.0: [1, 111, -13, 15, -128]} + - match: {hits.hits.0.fields.vector6.0: [-1, 11, 0, 12, 111]} + + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1.fields.name.0: "moose.jpg"} + + - length: {hits.hits.1.fields.vector1.0: 5} + - length: {hits.hits.1.fields.vector4.0: 5} + - length: {hits.hits.1.fields.vector5.0: 5} + - match: {hits.hits.1.fields.vector2: null} + - match: {hits.hits.1.fields.vector3: null} + - match: {hits.hits.1.fields.vector6: null} + + - close_to: { hits.hits.1.fields.vector1.0.0: { value: -0.5, error: 0.001 } } + - close_to: { hits.hits.1.fields.vector1.0.1: { value: 100.0, error: 0.001 } } + - close_to: { hits.hits.1.fields.vector1.0.2: { value: -13, error: 0.001 } } + - close_to: { hits.hits.1.fields.vector1.0.3: { value: 14.8, error: 0.001 } } + - close_to: { hits.hits.1.fields.vector1.0.4: { value: -156.0, error: 0.001 } } + + - match: {hits.hits.1.fields.vector4.0: [-1, 50, -1, 1, 120]} + - match: {hits.hits.1.fields.vector5.0: [1, 111, -13, 15, -128]} + + + - match: {hits.hits.2._id: "3"} + - match: {hits.hits.2.fields.name.0: "rabbit.jpg"} + + - length: {hits.hits.2.fields.vector2.0: 5} + - length: {hits.hits.2.fields.vector3.0: 5} + - match: {hits.hits.2.fields.vector1: null} + - match: {hits.hits.2.fields.vector4: null} + - match: {hits.hits.2.fields.vector5: null} + - match: {hits.hits.2.fields.vector6: null} + + - close_to: { hits.hits.2.fields.vector2.0.0: { value: 130.0, error: 0.001 } } + - close_to: { hits.hits.2.fields.vector2.0.1: { value: 115.0, error: 0.001 } } + - close_to: { hits.hits.2.fields.vector2.0.2: { value: -1.02, error: 0.001 } } + - close_to: { hits.hits.2.fields.vector2.0.3: { value: 15.555, error: 0.001 } } + - close_to: { hits.hits.2.fields.vector2.0.4: { value: -100.0, error: 0.001 } } + + - match: {hits.hits.2.fields.vector3.0: [-1, 100, -13, 15, -128]} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml index 1ceb5b43b8129..ac01f2dc0178a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml @@ -38,7 +38,6 @@ settings: index: number_of_shards: 1 - number_of_replicas: 0 analysis: filter: my_synonym_filter: @@ -72,7 +71,6 @@ settings: index: number_of_shards: 1 - number_of_replicas: 0 analysis: filter: my_synonym_filter: @@ -99,7 +97,6 @@ - '{"index": {"_index": "my_index2", "_id": "2"}}' - '{"my_field": "goodbye"}' - # An update of synonyms_set1 must trigger auto-reloading of analyzers only for synonyms_set1 - do: synonyms.put_synonym: @@ -109,8 +106,9 @@ - synonyms: "hello, salute" - synonyms: "ciao => goodbye" - match: { result: "updated" } - - match: { reload_analyzers_details._shards.total: 2 } # shard requests are still sent to 2 indices - - match: { reload_analyzers_details._shards.successful: 2 } + - gt: { reload_analyzers_details._shards.total: 0 } + - gt: { reload_analyzers_details._shards.successful: 0 } + - match: { reload_analyzers_details._shards.failed: 0 } - length: { reload_analyzers_details.reload_details: 1 } # reload details contain only a single index - match: { reload_analyzers_details.reload_details.0.index: "my_index1" } - match: { reload_analyzers_details.reload_details.0.reloaded_analyzers.0: "my_analyzer1" } diff --git a/server/build.gradle b/server/build.gradle index e8493751cb327..ef64b0746dfc4 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -39,6 +39,7 @@ dependencies { api project(':libs:grok') api project(":libs:tdigest") implementation project(":libs:simdvec") + implementation project(":libs:entitlement") // lucene api "org.apache.lucene:lucene-core:${versions.lucene}" diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java index 9b9b23e71abed..f7bf775bc4f8b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java @@ -63,6 +63,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.oneOf; public class DynamicMappingIT extends ESIntegTestCase { @@ -190,6 +192,35 @@ private Map indexConcurrently(int numberOfFieldsToCreate, Settin return properties; } + public void testConcurrentDynamicMappingsWithConflictingType() throws Throwable { + int numberOfDocsToCreate = 16; + indicesAdmin().prepareCreate("index").setSettings(Settings.builder()).get(); + ensureGreen("index"); + final AtomicReference error = new AtomicReference<>(); + startInParallel(numberOfDocsToCreate, i -> { + try { + assertEquals( + DocWriteResponse.Result.CREATED, + prepareIndex("index").setId(Integer.toString(i)).setSource("field" + i, 0, "field" + (i + 1), 0.1).get().getResult() + ); + } catch (Exception e) { + error.compareAndSet(null, e); + } + }); + if (error.get() != null) { + throw error.get(); + } + client().admin().indices().prepareRefresh("index").get(); + for (int i = 0; i < numberOfDocsToCreate; ++i) { + assertTrue(client().prepareGet("index", Integer.toString(i)).get().isExists()); + } + Map index = indicesAdmin().prepareGetMappings("index").get().getMappings().get("index").getSourceAsMap(); + for (int i = 0, j = 1; i < numberOfDocsToCreate; i++, j++) { + assertThat(new WriteField("properties.field" + i + ".type", () -> index).get(null), is(oneOf("long", "float"))); + assertThat(new WriteField("properties.field" + j + ".type", () -> index).get(null), is(oneOf("long", "float"))); + } + } + public void testPreflightCheckAvoidsMaster() throws InterruptedException, IOException { // can't use INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING nor INDEX_MAPPING_DEPTH_LIMIT_SETTING as a check here, as that is already // checked at parse time, see testTotalFieldsLimitForDynamicMappingsUpdateCheckedAtDocumentParseTime diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java index fb563ee333d07..a8028e8671450 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java @@ -11,9 +11,11 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.indices.IndicesService; @@ -329,6 +331,10 @@ public void testIndicesMetrics() { equalTo(0L) ) ); + + verifyStatsPerIndexMode( + Map.of(IndexMode.STANDARD, numStandardDocs, IndexMode.LOGSDB, numLogsdbDocs, IndexMode.TIME_SERIES, numTimeSeriesDocs) + ); } void collectThenAssertMetrics(TestTelemetryPlugin telemetry, int times, Map> matchers) { @@ -434,6 +440,16 @@ int populateLogsdbIndices(long numIndices) { return totalDocs; } + private void verifyStatsPerIndexMode(Map expectedDocs) { + var nodes = clusterService().state().nodes().stream().toArray(DiscoveryNode[]::new); + var request = new IndexModeStatsActionType.StatsRequest(nodes); + var resp = client().execute(IndexModeStatsActionType.TYPE, request).actionGet(); + var stats = resp.stats(); + for (Map.Entry e : expectedDocs.entrySet()) { + assertThat(stats.get(e.getKey()).numDocs(), equalTo(e.getValue())); + } + } + private Map parseMapping(String mapping) throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, mapping)) { return parser.map(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index a754350c8faf7..6cca07bfacc9e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -14,6 +14,7 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteUtils; +import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; @@ -21,6 +22,7 @@ import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Cancellable; import org.elasticsearch.client.Request; @@ -45,6 +47,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; @@ -319,6 +322,63 @@ public void testWithIndexAlias() { assertEquals(response1, response2); } + public void testNoIndices() { + boolean ignoreUnavailable = false; + IndicesOptions options = IndicesOptions.fromOptions(ignoreUnavailable, true, true, false, true, true, false, false); + client().admin().indices().close(new CloseIndexRequest("old_index")).actionGet(); + FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields("*").setIndicesOptions(options).get(); + assertIndices(response, "new_index"); + } + + public void testNoIndicesIgnoreUnavailable() { + boolean ignoreUnavailable = true; + IndicesOptions options = IndicesOptions.fromOptions(ignoreUnavailable, true, true, false, true, true, false, false); + client().admin().indices().close(new CloseIndexRequest("old_index")).actionGet(); + FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields("*").setIndicesOptions(options).get(); + assertIndices(response, "new_index"); + } + + public void testOneClosedIndex() { + boolean ignoreUnavailable = false; + IndicesOptions options = IndicesOptions.fromOptions(ignoreUnavailable, true, true, false, true, true, false, false); + client().admin().indices().close(new CloseIndexRequest("old_index")).actionGet(); + IndexClosedException ex = expectThrows( + IndexClosedException.class, + client().prepareFieldCaps("old_index").setFields("*").setIndicesOptions(options) + ); + assertEquals("closed", ex.getMessage()); + } + + public void testOneClosedIndexIgnoreUnavailable() { + boolean ignoreUnavailable = true; + IndicesOptions options = IndicesOptions.fromOptions(ignoreUnavailable, true, true, false, true, true, false, false); + client().admin().indices().close(new CloseIndexRequest("old_index")).actionGet(); + FieldCapabilitiesResponse response = client().prepareFieldCaps("old_index").setFields("*").setIndicesOptions(options).get(); + assertIndices(response); + } + + public void testTwoIndicesOneClosed() { + boolean ignoreUnavailable = false; + IndicesOptions options = IndicesOptions.fromOptions(ignoreUnavailable, true, true, false, true, true, false, false); + client().admin().indices().close(new CloseIndexRequest("old_index")).actionGet(); + IndexClosedException ex = expectThrows( + IndexClosedException.class, + client().prepareFieldCaps("old_index", "new_index").setFields("*").setIndicesOptions(options) + ); + assertEquals("closed", ex.getMessage()); + } + + public void testTwoIndicesOneClosedIgnoreUnavailable() { + boolean ignoreUnavailable = true; + IndicesOptions options = IndicesOptions.fromOptions(ignoreUnavailable, true, true, false, true, true, false, false); + client().admin().indices().close(new CloseIndexRequest("old_index")).actionGet(); + FieldCapabilitiesResponse response = client().prepareFieldCaps("old_index", "new_index") + .setFields("*") + .setIndicesOptions(options) + .get(); + assertIndices(response, "new_index"); + } + public void testWithIndexFilter() throws InterruptedException { assertAcked(prepareCreate("index-1").setMapping("timestamp", "type=date", "field1", "type=keyword")); assertAcked(prepareCreate("index-2").setMapping("timestamp", "type=date", "field1", "type=long")); diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 17b90f08bf051..3b3b06a1c6924 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -31,6 +31,7 @@ requires org.elasticsearch.grok; requires org.elasticsearch.tdigest; requires org.elasticsearch.simdvec; + requires org.elasticsearch.entitlement; requires hppc; requires HdrHistogram; @@ -470,5 +471,6 @@ org.elasticsearch.serverless.apifiltering; exports org.elasticsearch.lucene.spatial; exports org.elasticsearch.inference.configuration; + exports org.elasticsearch.monitor.metrics; } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 2acf80e426c82..4edeacfa754c5 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -187,6 +187,8 @@ static TransportVersion def(int id) { public static final TransportVersion QUERY_RULES_RETRIEVER = def(8_782_00_0); public static final TransportVersion ESQL_CCS_EXEC_INFO_WITH_FAILURES = def(8_783_00_0); public static final TransportVersion LOGSDB_TELEMETRY = def(8_784_00_0); + public static final TransportVersion LOGSDB_TELEMETRY_STATS = def(8_785_00_0); + public static final TransportVersion KQL_QUERY_ADDED = def(8_786_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 08558b48c08b3..9f727f49530a1 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -243,6 +243,7 @@ import org.elasticsearch.injection.guice.AbstractModule; import org.elasticsearch.injection.guice.TypeLiteral; import org.elasticsearch.injection.guice.multibindings.MapBinder; +import org.elasticsearch.monitor.metrics.IndexModeStatsActionType; import org.elasticsearch.persistent.CompletionPersistentTaskAction; import org.elasticsearch.persistent.RemovePersistentTaskAction; import org.elasticsearch.persistent.StartPersistentTaskAction; @@ -628,6 +629,7 @@ public void reg actions.register(TransportNodesFeaturesAction.TYPE, TransportNodesFeaturesAction.class); actions.register(RemoteClusterNodesAction.TYPE, RemoteClusterNodesAction.TransportAction.class); actions.register(TransportNodesStatsAction.TYPE, TransportNodesStatsAction.class); + actions.register(IndexModeStatsActionType.TYPE, IndexModeStatsActionType.TransportAction.class); actions.register(TransportNodesUsageAction.TYPE, TransportNodesUsageAction.class); actions.register(TransportNodesHotThreadsAction.TYPE, TransportNodesHotThreadsAction.class); actions.register(TransportListTasksAction.TYPE, TransportListTasksAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java index 823bff904283b..9bc088f944be0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java @@ -305,11 +305,14 @@ ClusterState execute( final CreateIndexClusterStateUpdateRequest updateRequest; if (isManagedSystemIndex) { - final SystemIndexDescriptor descriptor = mainDescriptor.getDescriptorCompatibleWith( - currentState.getMinSystemIndexMappingVersions().get(mainDescriptor.getPrimaryIndex()) - ); + final var requiredMinimumMappingVersion = currentState.getMinSystemIndexMappingVersions() + .get(mainDescriptor.getPrimaryIndex()); + final SystemIndexDescriptor descriptor = mainDescriptor.getDescriptorCompatibleWith(requiredMinimumMappingVersion); if (descriptor == null) { - final String message = mainDescriptor.getMinimumMappingsVersionMessage("auto-create index"); + final String message = mainDescriptor.getMinimumMappingsVersionMessage( + "auto-create index", + requiredMinimumMappingVersion + ); logger.warn(message); throw new IllegalStateException(message); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java index b43f2006061a5..73f35e283f7d8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java @@ -135,11 +135,10 @@ protected void masterOperation( // We check this via the request's origin. Eventually, `SystemIndexManager` will reconfigure // the index to the latest settings. if (isManagedSystemIndex && Strings.isNullOrEmpty(request.origin())) { - final SystemIndexDescriptor descriptor = mainDescriptor.getDescriptorCompatibleWith( - state.getMinSystemIndexMappingVersions().get(mainDescriptor.getPrimaryIndex()) - ); + final var requiredMinimumMappingVersion = state.getMinSystemIndexMappingVersions().get(mainDescriptor.getPrimaryIndex()); + final SystemIndexDescriptor descriptor = mainDescriptor.getDescriptorCompatibleWith(requiredMinimumMappingVersion); if (descriptor == null) { - final String message = mainDescriptor.getMinimumMappingsVersionMessage("create index"); + final String message = mainDescriptor.getMinimumMappingsVersionMessage("create index", requiredMinimumMappingVersion); logger.warn(message); listener.onFailure(new IllegalStateException(message)); return; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java index 8712430918fbf..9be1feae5ccfe 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestParser.java @@ -89,13 +89,13 @@ public BulkRequestParser(boolean deprecateOrErrorOnType, RestApiVersion restApiV .withRestApiVersion(restApiVersion); } - private static int findNextMarker(byte marker, int from, BytesReference data, boolean isIncremental) { + private static int findNextMarker(byte marker, int from, BytesReference data, boolean lastData) { final int res = data.indexOf(marker, from); if (res != -1) { assert res >= 0; return res; } - if (from != data.length() && isIncremental == false) { + if (from != data.length() && lastData) { throw new IllegalArgumentException("The bulk request must be terminated by a newline [\\n]"); } return res; @@ -140,13 +140,7 @@ public void parse( Consumer updateRequestConsumer, Consumer deleteRequestConsumer ) throws IOException { - // Bulk requests can contain a lot of repeated strings for the index, pipeline and routing parameters. This map is used to - // deduplicate duplicate strings parsed for these parameters. While it does not prevent instantiating the duplicate strings, it - // reduces their lifetime to the lifetime of this parse call instead of the lifetime of the full bulk request. - final Map stringDeduplicator = new HashMap<>(); - - incrementalParse( - data, + IncrementalParser incrementalParser = new IncrementalParser( defaultIndex, defaultRouting, defaultFetchSourceContext, @@ -158,53 +152,163 @@ public void parse( xContentType, indexRequestConsumer, updateRequestConsumer, - deleteRequestConsumer, - false, - stringDeduplicator + deleteRequestConsumer ); + + incrementalParser.parse(data, true); } - public int incrementalParse( - BytesReference data, - String defaultIndex, - String defaultRouting, - FetchSourceContext defaultFetchSourceContext, - String defaultPipeline, - Boolean defaultRequireAlias, - Boolean defaultRequireDataStream, - Boolean defaultListExecutedPipelines, + public IncrementalParser incrementalParser( + @Nullable String defaultIndex, + @Nullable String defaultRouting, + @Nullable FetchSourceContext defaultFetchSourceContext, + @Nullable String defaultPipeline, + @Nullable Boolean defaultRequireAlias, + @Nullable Boolean defaultRequireDataStream, + @Nullable Boolean defaultListExecutedPipelines, boolean allowExplicitIndex, XContentType xContentType, BiConsumer indexRequestConsumer, Consumer updateRequestConsumer, - Consumer deleteRequestConsumer, - boolean isIncremental, - Map stringDeduplicator - ) throws IOException { - XContent xContent = xContentType.xContent(); - byte marker = xContent.bulkSeparator(); - boolean typesDeprecationLogged = false; - - int line = 0; - int from = 0; - int consumed = 0; - - while (true) { - int nextMarker = findNextMarker(marker, from, data, isIncremental); - if (nextMarker == -1) { - break; + Consumer deleteRequestConsumer + ) { + return new IncrementalParser( + defaultIndex, + defaultRouting, + defaultFetchSourceContext, + defaultPipeline, + defaultRequireAlias, + defaultRequireDataStream, + defaultListExecutedPipelines, + allowExplicitIndex, + xContentType, + indexRequestConsumer, + updateRequestConsumer, + deleteRequestConsumer + ); + } + + public class IncrementalParser { + + // Bulk requests can contain a lot of repeated strings for the index, pipeline and routing parameters. This map is used to + // deduplicate duplicate strings parsed for these parameters. While it does not prevent instantiating the duplicate strings, it + // reduces their lifetime to the lifetime of this parse call instead of the lifetime of the full bulk request. + private final Map stringDeduplicator = new HashMap<>(); + + private final String defaultIndex; + private final String defaultRouting; + private final FetchSourceContext defaultFetchSourceContext; + private final String defaultPipeline; + private final Boolean defaultRequireAlias; + private final Boolean defaultRequireDataStream; + private final Boolean defaultListExecutedPipelines; + private final boolean allowExplicitIndex; + + private final XContentType xContentType; + private final byte marker; + private final BiConsumer indexRequestConsumer; + private final Consumer updateRequestConsumer; + private final Consumer deleteRequestConsumer; + + private Exception failure = null; + private int incrementalFromOffset = 0; + private int line = 0; + + private DocWriteRequest currentRequest = null; + private String currentType = null; + private String currentPipeline = null; + private boolean currentListExecutedPipelines = false; + private FetchSourceContext currentFetchSourceContext = null; + + private IncrementalParser( + @Nullable String defaultIndex, + @Nullable String defaultRouting, + @Nullable FetchSourceContext defaultFetchSourceContext, + @Nullable String defaultPipeline, + @Nullable Boolean defaultRequireAlias, + @Nullable Boolean defaultRequireDataStream, + @Nullable Boolean defaultListExecutedPipelines, + boolean allowExplicitIndex, + XContentType xContentType, + BiConsumer indexRequestConsumer, + Consumer updateRequestConsumer, + Consumer deleteRequestConsumer + ) { + this.defaultIndex = defaultIndex; + this.defaultRouting = defaultRouting; + this.defaultFetchSourceContext = defaultFetchSourceContext; + this.defaultPipeline = defaultPipeline; + this.defaultRequireAlias = defaultRequireAlias; + this.defaultRequireDataStream = defaultRequireDataStream; + this.defaultListExecutedPipelines = defaultListExecutedPipelines; + this.allowExplicitIndex = allowExplicitIndex; + this.xContentType = xContentType; + this.marker = xContentType.xContent().bulkSeparator(); + this.indexRequestConsumer = indexRequestConsumer; + this.updateRequestConsumer = updateRequestConsumer; + this.deleteRequestConsumer = deleteRequestConsumer; + } + + public int parse(BytesReference data, boolean lastData) throws IOException { + if (failure != null) { + assert false : failure.getMessage(); + throw new IllegalStateException("Parser has already encountered exception", failure); } - line++; + try { + return tryParse(data, lastData); + } catch (Exception e) { + failure = e; + throw e; + } + } + + private int tryParse(BytesReference data, boolean lastData) throws IOException { + int from = 0; + int consumed = 0; + + while (true) { + int nextMarker = findNextMarker(marker, incrementalFromOffset, data, lastData); + if (nextMarker == -1) { + incrementalFromOffset = data.length() - consumed; + break; + } + incrementalFromOffset = nextMarker + 1; + line++; + + if (currentRequest == null) { + if (parseActionLine(data, from, nextMarker)) { + if (currentRequest instanceof DeleteRequest deleteRequest) { + deleteRequestConsumer.accept(deleteRequest); + currentRequest = null; + } + } + } else { + parseAndConsumeDocumentLine(data, from, nextMarker); + currentRequest = null; + } - // now parse the action - try (XContentParser parser = createParser(xContent, data, from, nextMarker)) { - // move pointers from = nextMarker + 1; + consumed = from; + } + + return lastData ? from : consumed; + } + + private boolean parseActionLine(BytesReference data, int from, int to) throws IOException { + assert currentRequest == null; + + // Reset the fields which are accessed during document line parsing + currentType = null; + currentPipeline = defaultPipeline; + currentListExecutedPipelines = defaultListExecutedPipelines != null && defaultListExecutedPipelines; + currentFetchSourceContext = defaultFetchSourceContext; + + try (XContentParser parser = createParser(xContentType.xContent(), data, from, to)) { // Move to START_OBJECT XContentParser.Token token = parser.nextToken(); if (token == null) { - continue; + return false; } if (token != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException( @@ -242,20 +346,16 @@ public int incrementalParse( } String index = defaultIndex; - String type = null; String id = null; String routing = defaultRouting; - FetchSourceContext fetchSourceContext = defaultFetchSourceContext; String opType = null; long version = Versions.MATCH_ANY; VersionType versionType = VersionType.INTERNAL; long ifSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; long ifPrimaryTerm = UNASSIGNED_PRIMARY_TERM; int retryOnConflict = 0; - String pipeline = defaultPipeline; boolean requireAlias = defaultRequireAlias != null && defaultRequireAlias; boolean requireDataStream = defaultRequireDataStream != null && defaultRequireDataStream; - boolean listExecutedPipelines = defaultListExecutedPipelines != null && defaultListExecutedPipelines; Map dynamicTemplates = Map.of(); // at this stage, next token can either be END_OBJECT (and use default index and type, with auto generated id) @@ -279,7 +379,7 @@ public int incrementalParse( "Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]" ); } - type = stringDeduplicator.computeIfAbsent(parser.text(), Function.identity()); + currentType = stringDeduplicator.computeIfAbsent(parser.text(), Function.identity()); } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { id = parser.text(); } else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) { @@ -297,15 +397,15 @@ public int incrementalParse( } else if (RETRY_ON_CONFLICT.match(currentFieldName, parser.getDeprecationHandler())) { retryOnConflict = parser.intValue(); } else if (PIPELINE.match(currentFieldName, parser.getDeprecationHandler())) { - pipeline = stringDeduplicator.computeIfAbsent(parser.text(), Function.identity()); + currentPipeline = stringDeduplicator.computeIfAbsent(parser.text(), Function.identity()); } else if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) { - fetchSourceContext = FetchSourceContext.fromXContent(parser); + currentFetchSourceContext = FetchSourceContext.fromXContent(parser); } else if (REQUIRE_ALIAS.match(currentFieldName, parser.getDeprecationHandler())) { requireAlias = parser.booleanValue(); } else if (REQUIRE_DATA_STREAM.match(currentFieldName, parser.getDeprecationHandler())) { requireDataStream = parser.booleanValue(); } else if (LIST_EXECUTED_PIPELINES.match(currentFieldName, parser.getDeprecationHandler())) { - listExecutedPipelines = parser.booleanValue(); + currentListExecutedPipelines = parser.booleanValue(); } else { throw new IllegalArgumentException( "Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]" @@ -326,7 +426,7 @@ public int incrementalParse( dynamicTemplates = parser.mapStrings(); } else if (token == XContentParser.Token.START_OBJECT && SOURCE.match(currentFieldName, parser.getDeprecationHandler())) { - fetchSourceContext = FetchSourceContext.fromXContent(parser); + currentFetchSourceContext = FetchSourceContext.fromXContent(parser); } else if (token != XContentParser.Token.VALUE_NULL) { throw new IllegalArgumentException( "Malformed action/metadata line [" @@ -360,22 +460,13 @@ public int incrementalParse( "Delete request in line [" + line + "] does not accept " + DYNAMIC_TEMPLATES.getPreferredName() ); } - deleteRequestConsumer.accept( - new DeleteRequest(index).id(id) - .routing(routing) - .version(version) - .versionType(versionType) - .setIfSeqNo(ifSeqNo) - .setIfPrimaryTerm(ifPrimaryTerm) - ); - consumed = from; + currentRequest = new DeleteRequest(index).id(id) + .routing(routing) + .version(version) + .versionType(versionType) + .setIfSeqNo(ifSeqNo) + .setIfPrimaryTerm(ifPrimaryTerm); } else { - nextMarker = findNextMarker(marker, from, data, isIncremental); - if (nextMarker == -1) { - break; - } - line++; - // we use internalAdd so we don't fork here, this allows us not to copy over the big byte array to small chunks // of index request. if ("index".equals(action) || "create".equals(action)) { @@ -383,20 +474,19 @@ public int incrementalParse( .routing(routing) .version(version) .versionType(versionType) - .setPipeline(pipeline) + .setPipeline(currentPipeline) .setIfSeqNo(ifSeqNo) .setIfPrimaryTerm(ifPrimaryTerm) - .source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType) .setDynamicTemplates(dynamicTemplates) .setRequireAlias(requireAlias) .setRequireDataStream(requireDataStream) - .setListExecutedPipelines(listExecutedPipelines); + .setListExecutedPipelines(currentListExecutedPipelines); if ("create".equals(action)) { indexRequest = indexRequest.create(true); } else if (opType != null) { indexRequest = indexRequest.create("create".equals(opType)); } - indexRequestConsumer.accept(indexRequest, type); + currentRequest = indexRequest; } else if ("update".equals(action)) { if (version != Versions.MATCH_ANY || versionType != VersionType.INTERNAL) { throw new IllegalArgumentException( @@ -423,31 +513,38 @@ public int incrementalParse( .setIfPrimaryTerm(ifPrimaryTerm) .setRequireAlias(requireAlias) .routing(routing); - try ( - XContentParser sliceParser = createParser( - xContent, - sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType) - ) - ) { - updateRequest.fromXContent(sliceParser); - } - if (fetchSourceContext != null) { - updateRequest.fetchSource(fetchSourceContext); - } - IndexRequest upsertRequest = updateRequest.upsertRequest(); - if (upsertRequest != null) { - upsertRequest.setPipeline(pipeline).setListExecutedPipelines(listExecutedPipelines); - } - - updateRequestConsumer.accept(updateRequest); + currentRequest = updateRequest; } - // move pointers - from = nextMarker + 1; - consumed = from; } } + return true; } - return isIncremental ? consumed : from; + + private void parseAndConsumeDocumentLine(BytesReference data, int from, int to) throws IOException { + assert currentRequest != null && currentRequest instanceof DeleteRequest == false; + if (currentRequest instanceof IndexRequest indexRequest) { + indexRequest.source(sliceTrimmingCarriageReturn(data, from, to, xContentType), xContentType); + indexRequestConsumer.accept(indexRequest, currentType); + } else if (currentRequest instanceof UpdateRequest updateRequest) { + try ( + XContentParser sliceParser = createParser( + xContentType.xContent(), + sliceTrimmingCarriageReturn(data, from, to, xContentType) + ) + ) { + updateRequest.fromXContent(sliceParser); + } + if (currentFetchSourceContext != null) { + updateRequest.fetchSource(currentFetchSourceContext); + } + IndexRequest upsertRequest = updateRequest.upsertRequest(); + if (upsertRequest != null) { + upsertRequest.setPipeline(currentPipeline).setListExecutedPipelines(currentListExecutedPipelines); + } + updateRequestConsumer.accept(updateRequest); + } + } + } @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) // Remove lenient parsing in V8 BWC mode diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java index 98f7cf651ed09..80cdbf9d3d867 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java @@ -35,7 +35,7 @@ public final class FieldCapabilitiesRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContentObject { public static final String NAME = "field_caps_request"; - public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.strictExpandOpen(); + public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.strictExpandOpenAndForbidClosed(); private String[] indices = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = DEFAULT_INDICES_OPTIONS; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java index 27ef91dc116c3..ae5ad2bff26be 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java @@ -10,6 +10,7 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.index.query.QueryBuilder; @@ -38,6 +39,11 @@ public FieldCapabilitiesRequestBuilder setincludeEmptyFields(boolean includeEmpt return this; } + public FieldCapabilitiesRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) { + request().indicesOptions(indicesOptions); + return this; + } + public FieldCapabilitiesRequestBuilder setIndexFilter(QueryBuilder indexFilter) { request().indexFilter(indexFilter); return this; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 66434134fa69e..50dee7f85102e 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.RefCountingRunnable; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -113,23 +114,28 @@ public TransportFieldCapabilitiesAction( @Override protected void doExecute(Task task, FieldCapabilitiesRequest request, final ActionListener listener) { - executeRequest(task, request, REMOTE_TYPE, listener); + executeRequest( + task, + request, + (remoteClient, remoteRequest, remoteListener) -> remoteClient.execute(REMOTE_TYPE, remoteRequest, remoteListener), + listener + ); } public void executeRequest( Task task, FieldCapabilitiesRequest request, - RemoteClusterActionType remoteAction, + RemoteRequestExecutor remoteRequestExecutor, ActionListener listener ) { // workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can - searchCoordinationExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(task, request, remoteAction, l))); + searchCoordinationExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(task, request, remoteRequestExecutor, l))); } private void doExecuteForked( Task task, FieldCapabilitiesRequest request, - RemoteClusterActionType remoteAction, + RemoteRequestExecutor remoteRequestExecutor, ActionListener listener ) { if (ccsCheckCompatibility) { @@ -282,8 +288,8 @@ private void doExecuteForked( handleIndexFailure.accept(RemoteClusterAware.buildRemoteIndexName(clusterAlias, index), ex); } }); - remoteClusterClient.execute( - remoteAction, + remoteRequestExecutor.executeRemoteRequest( + remoteClusterClient, remoteRequest, // The underlying transport service may call onFailure with a thread pool other than search_coordinator. // This fork is a workaround to ensure that the merging of field-caps always occurs on the search_coordinator. @@ -298,6 +304,14 @@ private void doExecuteForked( } } + public interface RemoteRequestExecutor { + void executeRemoteRequest( + RemoteClusterClient remoteClient, + FieldCapabilitiesRequest remoteRequest, + ActionListener remoteListener + ); + } + private static void checkIndexBlocks(ClusterState clusterState, String[] concreteIndices) { var blocks = clusterState.blocks(); if (blocks.global().isEmpty() && blocks.indices().isEmpty()) { diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index 6adcd80cb9b19..9cfc441490859 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.index.VersionType; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; @@ -156,6 +157,7 @@ static Parsed parse(Map config, boolean verbose, IngestService i return new Parsed(pipeline, ingestDocumentList, verbose); } + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) // Unconditionally deprecate the _type field once V8 BWC support is removed private static List parseDocs(Map config, RestApiVersion restApiVersion) { List> docs = ConfigurationUtils.readList(null, null, config, Fields.DOCS); if (docs.isEmpty()) { @@ -172,7 +174,7 @@ private static List parseDocs(Map config, RestAp String index = ConfigurationUtils.readStringOrIntProperty(null, null, dataMap, Metadata.INDEX.getFieldName(), "_index"); String id = ConfigurationUtils.readStringOrIntProperty(null, null, dataMap, Metadata.ID.getFieldName(), "_id"); String routing = ConfigurationUtils.readOptionalStringOrIntProperty(null, null, dataMap, Metadata.ROUTING.getFieldName()); - if (restApiVersion == RestApiVersion.V_7 && dataMap.containsKey(Metadata.TYPE.getFieldName())) { + if (restApiVersion != RestApiVersion.V_8 && dataMap.containsKey(Metadata.TYPE.getFieldName())) { deprecationLogger.compatibleCritical( "simulate_pipeline_with_types", "[types removal] specifying _type in pipeline simulation requests is deprecated" diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 317d117174e94..13c085c9875d4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -48,8 +48,8 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Executor; -import java.util.concurrent.LinkedTransferQueue; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -296,33 +296,23 @@ protected void performPhaseOnShard(final int shardIndex, final SearchShardIterat } private void doPerformPhaseOnShard(int shardIndex, SearchShardIterator shardIt, SearchShardTarget shard, Releasable releasable) { - try { - executePhaseOnShard(shardIt, shard, new SearchActionListener<>(shard, shardIndex) { - @Override - public void innerOnResponse(Result result) { - try (releasable) { - onShardResult(result, shardIt); - } catch (Exception exc) { - onShardFailure(shardIndex, shard, shardIt, exc); - } + executePhaseOnShard(shardIt, shard, new SearchActionListener<>(shard, shardIndex) { + @Override + public void innerOnResponse(Result result) { + try { + releasable.close(); + onShardResult(result, shardIt); + } catch (Exception exc) { + onShardFailure(shardIndex, shard, shardIt, exc); } + } - @Override - public void onFailure(Exception e) { - try (releasable) { - onShardFailure(shardIndex, shard, shardIt, e); - } - } - }); - } catch (final Exception e) { - /* - * It is possible to run into connection exceptions here because we are getting the connection early and might - * run into nodes that are not connected. In this case, on shard failure will move us to the next shard copy. - */ - try (releasable) { + @Override + public void onFailure(Exception e) { + releasable.close(); onShardFailure(shardIndex, shard, shardIt, e); } - } + }); } private void failOnUnavailable(int shardIndex, SearchShardIterator shardIt) { @@ -699,7 +689,7 @@ private void raisePhaseFailure(SearchPhaseExecutionException exception) { * @see #onShardResult(SearchPhaseResult, SearchShardIterator) */ final void onPhaseDone() { // as a tribute to @kimchy aka. finishHim() - executeNextPhase(this, () -> getNextPhase(results, this)); + executeNextPhase(this, this::getNextPhase); } @Override @@ -756,15 +746,12 @@ protected final ShardSearchRequest buildShardSearchRequest(SearchShardIterator s /** * Returns the next phase based on the results of the initial search phase - * @param results the results of the initial search phase. Each non null element in the result array represent a successfully - * executed shard request - * @param context the search context for the next phase */ - protected abstract SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context); + protected abstract SearchPhase getNextPhase(); private static final class PendingExecutions { private final Semaphore semaphore; - private final LinkedTransferQueue> queue = new LinkedTransferQueue<>(); + private final ConcurrentLinkedQueue> queue = new ConcurrentLinkedQueue<>(); PendingExecutions(int permits) { assert permits > 0 : "not enough permits: " + permits; @@ -783,11 +770,10 @@ void submit(Consumer task) { } } } - } private void executeAndRelease(Consumer task) { - while (task != null) { + do { final SubscribableListener onDone = new SubscribableListener<>(); task.accept(() -> onDone.onResponse(null)); if (onDone.isDone()) { @@ -810,13 +796,21 @@ public void onFailure(Exception e) { }); return; } - } + } while (task != null); } private Consumer pollNextTaskOrReleasePermit() { var task = queue.poll(); if (task == null) { semaphore.release(); + while (queue.peek() != null && semaphore.tryAcquire()) { + task = queue.poll(); + if (task == null) { + semaphore.release(); + } else { + return task; + } + } } return task; } diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index 0b587e72141ff..36d73c0db166a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -65,10 +65,6 @@ final class DfsQueryPhase extends SearchPhase { this.nextPhaseFactory = nextPhaseFactory; this.context = context; this.searchTransportService = context.getSearchTransport(); - - // register the release of the query consumer to free up the circuit breaker memory - // at the end of the search - context.addReleasable(queryResult); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java index 6c654d9235ec2..37d5065fdd031 100644 --- a/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.io.stream.DelayableWriteable; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.concurrent.AbstractRunnable; -import org.elasticsearch.core.Releasable; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchShardTarget; @@ -65,9 +64,26 @@ public class QueryPhaseResultConsumer extends ArraySearchPhaseResults onPartialMergeFailure; + private final int batchReduceSize; + private final List buffer = new ArrayList<>(); + private final List emptyResults = new ArrayList<>(); + // the memory that is accounted in the circuit breaker for this consumer + private volatile long circuitBreakerBytes; + // the memory that is currently used in the buffer + private volatile long aggsCurrentBufferSize; + private volatile long maxAggsCurrentBufferSize = 0; + + private final ArrayDeque queue = new ArrayDeque<>(); + private final AtomicReference runningTask = new AtomicReference<>(); + private final AtomicReference failure = new AtomicReference<>(); + + private final TopDocsStats topDocsStats; + private volatile MergeResult mergeResult; + private volatile boolean hasPartialReduce; + private volatile int numReducePhases; + /** * Creates a {@link QueryPhaseResultConsumer} that incrementally reduces aggregation results * as shard results are consumed. @@ -99,13 +115,31 @@ public QueryPhaseResultConsumer( this.hasTopDocs = (source == null || size != 0) && queryPhaseRankCoordinatorContext == null; this.hasAggs = source != null && source.aggregations() != null; this.aggReduceContextBuilder = hasAggs ? controller.getReduceContext(isCanceled, source.aggregations()) : null; - int batchReduceSize = (hasAggs || hasTopDocs) ? Math.min(request.getBatchedReduceSize(), expectedResultSize) : expectedResultSize; - this.pendingMerges = new PendingMerges(batchReduceSize, request.resolveTrackTotalHitsUpTo()); + batchReduceSize = (hasAggs || hasTopDocs) ? Math.min(request.getBatchedReduceSize(), expectedResultSize) : expectedResultSize; + topDocsStats = new TopDocsStats(request.resolveTrackTotalHitsUpTo()); } @Override - protected void doClose() { - pendingMerges.close(); + protected synchronized void doClose() { + assert assertFailureAndBreakerConsistent(); + releaseBuffer(); + circuitBreaker.addWithoutBreaking(-circuitBreakerBytes); + circuitBreakerBytes = 0; + + if (hasPendingMerges()) { + // This is a theoretically unreachable exception. + throw new IllegalStateException("Attempted to close with partial reduce in-flight"); + } + } + + private boolean assertFailureAndBreakerConsistent() { + boolean hasFailure = failure.get() != null; + if (hasFailure) { + assert circuitBreakerBytes == 0; + } else { + assert circuitBreakerBytes >= 0; + } + return true; } @Override @@ -113,35 +147,35 @@ public void consumeResult(SearchPhaseResult result, Runnable next) { super.consumeResult(result, () -> {}); QuerySearchResult querySearchResult = result.queryResult(); progressListener.notifyQueryResult(querySearchResult.getShardIndex(), querySearchResult); - pendingMerges.consume(querySearchResult, next); + consume(querySearchResult, next); } @Override public SearchPhaseController.ReducedQueryPhase reduce() throws Exception { - if (pendingMerges.hasPendingMerges()) { + if (hasPendingMerges()) { throw new AssertionError("partial reduce in-flight"); } - Exception failure = pendingMerges.failure.get(); - if (failure != null) { - throw failure; + Exception f = failure.get(); + if (f != null) { + throw f; } // ensure consistent ordering - pendingMerges.sortBuffer(); - final TopDocsStats topDocsStats = pendingMerges.topDocsStats; - final int resultSize = pendingMerges.buffer.size() + (pendingMerges.mergeResult == null ? 0 : 1); + sortBuffer(); + final TopDocsStats topDocsStats = this.topDocsStats; + final int resultSize = buffer.size() + (mergeResult == null ? 0 : 1); final List topDocsList = hasTopDocs ? new ArrayList<>(resultSize) : null; final List> aggsList = hasAggs ? new ArrayList<>(resultSize) : null; - synchronized (pendingMerges) { - if (pendingMerges.mergeResult != null) { + synchronized (this) { + if (mergeResult != null) { if (topDocsList != null) { - topDocsList.add(pendingMerges.mergeResult.reducedTopDocs); + topDocsList.add(mergeResult.reducedTopDocs); } if (aggsList != null) { - aggsList.add(DelayableWriteable.referencing(pendingMerges.mergeResult.reducedAggs)); + aggsList.add(DelayableWriteable.referencing(mergeResult.reducedAggs)); } } - for (QuerySearchResult result : pendingMerges.buffer) { + for (QuerySearchResult result : buffer) { topDocsStats.add(result.topDocs(), result.searchTimedOut(), result.terminatedEarly()); if (topDocsList != null) { TopDocsAndMaxScore topDocs = result.consumeTopDocs(); @@ -154,25 +188,25 @@ public SearchPhaseController.ReducedQueryPhase reduce() throws Exception { } } SearchPhaseController.ReducedQueryPhase reducePhase; - long breakerSize = pendingMerges.circuitBreakerBytes; + long breakerSize = circuitBreakerBytes; try { if (aggsList != null) { // Add an estimate of the final reduce size - breakerSize = pendingMerges.addEstimateAndMaybeBreak(PendingMerges.estimateRamBytesUsedForReduce(breakerSize)); + breakerSize = addEstimateAndMaybeBreak(estimateRamBytesUsedForReduce(breakerSize)); } reducePhase = SearchPhaseController.reducedQueryPhase( results.asList(), aggsList, topDocsList == null ? Collections.emptyList() : topDocsList, topDocsStats, - pendingMerges.numReducePhases, + numReducePhases, false, aggReduceContextBuilder, queryPhaseRankCoordinatorContext, performFinalReduce ); } finally { - pendingMerges.releaseAggs(); + releaseAggs(); } if (hasAggs // reduced aggregations can be null if all shards failed @@ -180,8 +214,8 @@ public SearchPhaseController.ReducedQueryPhase reduce() throws Exception { // Update the circuit breaker to replace the estimation with the serialized size of the newly reduced result long finalSize = DelayableWriteable.getSerializedSize(reducePhase.aggregations()) - breakerSize; - pendingMerges.addWithoutBreaking(finalSize); - logger.trace("aggs final reduction [{}] max [{}]", pendingMerges.aggsCurrentBufferSize, pendingMerges.maxAggsCurrentBufferSize); + addWithoutBreaking(finalSize); + logger.trace("aggs final reduction [{}] max [{}]", aggsCurrentBufferSize, maxAggsCurrentBufferSize); } if (progressListener != SearchProgressListener.NOOP) { progressListener.notifyFinalReduce( @@ -262,276 +296,226 @@ private MergeResult partialReduce( } public int getNumReducePhases() { - return pendingMerges.numReducePhases; + return numReducePhases; } - private class PendingMerges implements Releasable { - private final int batchReduceSize; - private final List buffer = new ArrayList<>(); - private final List emptyResults = new ArrayList<>(); - // the memory that is accounted in the circuit breaker for this consumer - private volatile long circuitBreakerBytes; - // the memory that is currently used in the buffer - private volatile long aggsCurrentBufferSize; - private volatile long maxAggsCurrentBufferSize = 0; - - private final ArrayDeque queue = new ArrayDeque<>(); - private final AtomicReference runningTask = new AtomicReference<>(); - private final AtomicReference failure = new AtomicReference<>(); - - private final TopDocsStats topDocsStats; - private volatile MergeResult mergeResult; - private volatile boolean hasPartialReduce; - private volatile int numReducePhases; - - PendingMerges(int batchReduceSize, int trackTotalHitsUpTo) { - this.batchReduceSize = batchReduceSize; - this.topDocsStats = new TopDocsStats(trackTotalHitsUpTo); - } - - @Override - public synchronized void close() { - assert assertFailureAndBreakerConsistent(); - - releaseBuffer(); - circuitBreaker.addWithoutBreaking(-circuitBreakerBytes); - circuitBreakerBytes = 0; - - if (hasPendingMerges()) { - // This is a theoretically unreachable exception. - throw new IllegalStateException("Attempted to close with partial reduce in-flight"); - } - } - - private boolean assertFailureAndBreakerConsistent() { - boolean hasFailure = failure.get() != null; - if (hasFailure) { - assert circuitBreakerBytes == 0; - } else { - assert circuitBreakerBytes >= 0; - } - return true; - } - - boolean hasFailure() { - return failure.get() != null; - } + private boolean hasFailure() { + return failure.get() != null; + } - boolean hasPendingMerges() { - return queue.isEmpty() == false || runningTask.get() != null; - } + private boolean hasPendingMerges() { + return queue.isEmpty() == false || runningTask.get() != null; + } - void sortBuffer() { - if (buffer.size() > 0) { - buffer.sort(RESULT_COMPARATOR); - } + void sortBuffer() { + if (buffer.size() > 0) { + buffer.sort(RESULT_COMPARATOR); } + } - synchronized void addWithoutBreaking(long size) { - circuitBreaker.addWithoutBreaking(size); - circuitBreakerBytes += size; - maxAggsCurrentBufferSize = Math.max(maxAggsCurrentBufferSize, circuitBreakerBytes); - } + private synchronized void addWithoutBreaking(long size) { + circuitBreaker.addWithoutBreaking(size); + circuitBreakerBytes += size; + maxAggsCurrentBufferSize = Math.max(maxAggsCurrentBufferSize, circuitBreakerBytes); + } - synchronized long addEstimateAndMaybeBreak(long estimatedSize) { - circuitBreaker.addEstimateBytesAndMaybeBreak(estimatedSize, ""); - circuitBreakerBytes += estimatedSize; - maxAggsCurrentBufferSize = Math.max(maxAggsCurrentBufferSize, circuitBreakerBytes); - return circuitBreakerBytes; - } + private synchronized long addEstimateAndMaybeBreak(long estimatedSize) { + circuitBreaker.addEstimateBytesAndMaybeBreak(estimatedSize, ""); + circuitBreakerBytes += estimatedSize; + maxAggsCurrentBufferSize = Math.max(maxAggsCurrentBufferSize, circuitBreakerBytes); + return circuitBreakerBytes; + } - /** - * Returns the size of the serialized aggregation that is contained in the - * provided {@link QuerySearchResult}. - */ - long ramBytesUsedQueryResult(QuerySearchResult result) { - return hasAggs ? result.aggregations().getSerializedSize() : 0; - } + /** + * Returns the size of the serialized aggregation that is contained in the + * provided {@link QuerySearchResult}. + */ + private long ramBytesUsedQueryResult(QuerySearchResult result) { + return hasAggs ? result.aggregations().getSerializedSize() : 0; + } - /** - * Returns an estimation of the size that a reduce of the provided size - * would take on memory. - * This size is estimated as roughly 1.5 times the size of the serialized - * aggregations that need to be reduced. This estimation can be completely - * off for some aggregations but it is corrected with the real size after - * the reduce completes. - */ - static long estimateRamBytesUsedForReduce(long size) { - return Math.round(1.5d * size - size); - } + /** + * Returns an estimation of the size that a reduce of the provided size + * would take on memory. + * This size is estimated as roughly 1.5 times the size of the serialized + * aggregations that need to be reduced. This estimation can be completely + * off for some aggregations but it is corrected with the real size after + * the reduce completes. + */ + private static long estimateRamBytesUsedForReduce(long size) { + return Math.round(1.5d * size - size); + } - public void consume(QuerySearchResult result, Runnable next) { - if (hasFailure()) { - result.consumeAll(); - next.run(); - } else if (result.isNull()) { - result.consumeAll(); - SearchShardTarget target = result.getSearchShardTarget(); - SearchShard searchShard = new SearchShard(target.getClusterAlias(), target.getShardId()); - synchronized (this) { - emptyResults.add(searchShard); - } - next.run(); - } else { - final long aggsSize = ramBytesUsedQueryResult(result); - boolean executeNextImmediately = true; - boolean hasFailure = false; - synchronized (this) { - if (hasFailure()) { - hasFailure = true; - } else { - if (hasAggs) { - try { - addEstimateAndMaybeBreak(aggsSize); - } catch (Exception exc) { - releaseBuffer(); - onMergeFailure(exc); - hasFailure = true; - } + private void consume(QuerySearchResult result, Runnable next) { + if (hasFailure()) { + result.consumeAll(); + next.run(); + } else if (result.isNull()) { + result.consumeAll(); + SearchShardTarget target = result.getSearchShardTarget(); + SearchShard searchShard = new SearchShard(target.getClusterAlias(), target.getShardId()); + synchronized (this) { + emptyResults.add(searchShard); + } + next.run(); + } else { + final long aggsSize = ramBytesUsedQueryResult(result); + boolean executeNextImmediately = true; + boolean hasFailure = false; + synchronized (this) { + if (hasFailure()) { + hasFailure = true; + } else { + if (hasAggs) { + try { + addEstimateAndMaybeBreak(aggsSize); + } catch (Exception exc) { + releaseBuffer(); + onMergeFailure(exc); + hasFailure = true; } - if (hasFailure == false) { - aggsCurrentBufferSize += aggsSize; - // add one if a partial merge is pending - int size = buffer.size() + (hasPartialReduce ? 1 : 0); - if (size >= batchReduceSize) { - hasPartialReduce = true; - executeNextImmediately = false; - QuerySearchResult[] clone = buffer.toArray(QuerySearchResult[]::new); - MergeTask task = new MergeTask(clone, aggsCurrentBufferSize, new ArrayList<>(emptyResults), next); - aggsCurrentBufferSize = 0; - buffer.clear(); - emptyResults.clear(); - queue.add(task); - tryExecuteNext(); - } - buffer.add(result); + } + if (hasFailure == false) { + aggsCurrentBufferSize += aggsSize; + // add one if a partial merge is pending + int size = buffer.size() + (hasPartialReduce ? 1 : 0); + if (size >= batchReduceSize) { + hasPartialReduce = true; + executeNextImmediately = false; + QuerySearchResult[] clone = buffer.toArray(QuerySearchResult[]::new); + MergeTask task = new MergeTask(clone, aggsCurrentBufferSize, new ArrayList<>(emptyResults), next); + aggsCurrentBufferSize = 0; + buffer.clear(); + emptyResults.clear(); + queue.add(task); + tryExecuteNext(); } + buffer.add(result); } } - if (hasFailure) { - result.consumeAll(); - } - if (executeNextImmediately) { - next.run(); - } + } + if (hasFailure) { + result.consumeAll(); + } + if (executeNextImmediately) { + next.run(); } } + } - private void releaseBuffer() { - for (QuerySearchResult querySearchResult : buffer) { - querySearchResult.releaseAggs(); - } - buffer.clear(); + private void releaseBuffer() { + for (QuerySearchResult querySearchResult : buffer) { + querySearchResult.releaseAggs(); } + buffer.clear(); + } - private synchronized void onMergeFailure(Exception exc) { - if (failure.compareAndSet(null, exc) == false) { - assert circuitBreakerBytes == 0; - return; - } - assert circuitBreakerBytes >= 0; - if (circuitBreakerBytes > 0) { - // make sure that we reset the circuit breaker - circuitBreaker.addWithoutBreaking(-circuitBreakerBytes); - circuitBreakerBytes = 0; - } - onPartialMergeFailure.accept(exc); - final MergeTask task = runningTask.getAndSet(null); - if (task != null) { - task.cancel(); - } - MergeTask mergeTask; - while ((mergeTask = queue.pollFirst()) != null) { - mergeTask.cancel(); - } - mergeResult = null; + private synchronized void onMergeFailure(Exception exc) { + if (failure.compareAndSet(null, exc) == false) { + assert circuitBreakerBytes == 0; + return; + } + assert circuitBreakerBytes >= 0; + if (circuitBreakerBytes > 0) { + // make sure that we reset the circuit breaker + circuitBreaker.addWithoutBreaking(-circuitBreakerBytes); + circuitBreakerBytes = 0; } + onPartialMergeFailure.accept(exc); + final MergeTask task = runningTask.getAndSet(null); + if (task != null) { + task.cancel(); + } + MergeTask mergeTask; + while ((mergeTask = queue.pollFirst()) != null) { + mergeTask.cancel(); + } + mergeResult = null; + } - private void tryExecuteNext() { - final MergeTask task; - synchronized (this) { - if (hasFailure() || runningTask.get() != null) { - return; - } - task = queue.poll(); - runningTask.set(task); - } - if (task == null) { - return; - } + private void tryExecuteNext() { + assert Thread.holdsLock(this); + final MergeTask task; + if (hasFailure() || runningTask.get() != null) { + return; + } + task = queue.poll(); + runningTask.set(task); + if (task == null) { + return; + } - executor.execute(new AbstractRunnable() { - @Override - protected void doRun() { - MergeTask mergeTask = task; - QuerySearchResult[] toConsume = mergeTask.consumeBuffer(); - while (mergeTask != null) { - final MergeResult thisMergeResult = mergeResult; - long estimatedTotalSize = (thisMergeResult != null ? thisMergeResult.estimatedSize : 0) + mergeTask.aggsBufferSize; - final MergeResult newMerge; - try { - long estimatedMergeSize = estimateRamBytesUsedForReduce(estimatedTotalSize); - addEstimateAndMaybeBreak(estimatedMergeSize); - estimatedTotalSize += estimatedMergeSize; - ++numReducePhases; - newMerge = partialReduce(toConsume, mergeTask.emptyResults, topDocsStats, thisMergeResult, numReducePhases); - } catch (Exception t) { - QueryPhaseResultConsumer.releaseAggs(toConsume); - onMergeFailure(t); + executor.execute(new AbstractRunnable() { + @Override + protected void doRun() { + MergeTask mergeTask = task; + QuerySearchResult[] toConsume = mergeTask.consumeBuffer(); + while (mergeTask != null) { + final MergeResult thisMergeResult = mergeResult; + long estimatedTotalSize = (thisMergeResult != null ? thisMergeResult.estimatedSize : 0) + mergeTask.aggsBufferSize; + final MergeResult newMerge; + try { + long estimatedMergeSize = estimateRamBytesUsedForReduce(estimatedTotalSize); + addEstimateAndMaybeBreak(estimatedMergeSize); + estimatedTotalSize += estimatedMergeSize; + ++numReducePhases; + newMerge = partialReduce(toConsume, mergeTask.emptyResults, topDocsStats, thisMergeResult, numReducePhases); + } catch (Exception t) { + QueryPhaseResultConsumer.releaseAggs(toConsume); + onMergeFailure(t); + return; + } + synchronized (QueryPhaseResultConsumer.this) { + if (hasFailure()) { return; } - synchronized (QueryPhaseResultConsumer.this) { - if (hasFailure()) { - return; - } - mergeResult = newMerge; - if (hasAggs) { - // Update the circuit breaker to remove the size of the source aggregations - // and replace the estimation with the serialized size of the newly reduced result. - long newSize = mergeResult.estimatedSize - estimatedTotalSize; - addWithoutBreaking(newSize); - if (logger.isTraceEnabled()) { - logger.trace( - "aggs partial reduction [{}->{}] max [{}]", - estimatedTotalSize, - mergeResult.estimatedSize, - maxAggsCurrentBufferSize - ); - } + mergeResult = newMerge; + if (hasAggs) { + // Update the circuit breaker to remove the size of the source aggregations + // and replace the estimation with the serialized size of the newly reduced result. + long newSize = mergeResult.estimatedSize - estimatedTotalSize; + addWithoutBreaking(newSize); + if (logger.isTraceEnabled()) { + logger.trace( + "aggs partial reduction [{}->{}] max [{}]", + estimatedTotalSize, + mergeResult.estimatedSize, + maxAggsCurrentBufferSize + ); } } - Runnable r = mergeTask.consumeListener(); - synchronized (QueryPhaseResultConsumer.this) { - while (true) { - mergeTask = queue.poll(); - runningTask.set(mergeTask); - if (mergeTask == null) { - break; - } - toConsume = mergeTask.consumeBuffer(); - if (toConsume != null) { - break; - } + } + Runnable r = mergeTask.consumeListener(); + synchronized (QueryPhaseResultConsumer.this) { + while (true) { + mergeTask = queue.poll(); + runningTask.set(mergeTask); + if (mergeTask == null) { + break; + } + toConsume = mergeTask.consumeBuffer(); + if (toConsume != null) { + break; } } - if (r != null) { - r.run(); - } + } + if (r != null) { + r.run(); } } + } - @Override - public void onFailure(Exception exc) { - onMergeFailure(exc); - } - }); - } + @Override + public void onFailure(Exception exc) { + onMergeFailure(exc); + } + }); + } - public synchronized void releaseAggs() { - if (hasAggs) { - for (QuerySearchResult result : buffer) { - result.releaseAggs(); - } + private synchronized void releaseAggs() { + if (hasAggs) { + for (QuerySearchResult result : buffer) { + result.releaseAggs(); } } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 26eb266cd457e..69ca1569a7c07 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -98,7 +98,7 @@ protected void executePhaseOnShard( } @Override - protected SearchPhase getNextPhase(final SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { final List dfsSearchResults = results.getAtomicArray().asList(); final AggregatedDfs aggregatedDfs = SearchPhaseController.aggregateDfs(dfsSearchResults); final List mergedKnnResults = SearchPhaseController.mergeKnnResults(getRequest(), dfsSearchResults); @@ -107,8 +107,8 @@ protected SearchPhase getNextPhase(final SearchPhaseResults res aggregatedDfs, mergedKnnResults, queryPhaseResultConsumer, - (queryResults) -> SearchQueryThenFetchAsyncAction.nextPhase(client, context, queryResults, aggregatedDfs), - context + (queryResults) -> SearchQueryThenFetchAsyncAction.nextPhase(client, this, queryResults, aggregatedDfs), + this ); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index 33b2cdf74cd79..e92b5bbf4b5e5 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -147,7 +147,7 @@ static SearchPhase nextPhase( } @Override - protected SearchPhase getNextPhase(final SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { return nextPhase(client, this, results, null); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index c4a078d9d00ad..010f96f212116 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -277,7 +277,7 @@ protected void executePhaseOnShard( } @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { return new SearchPhase(getName()) { private void onExecuteFailure(Exception e) { diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 8f718972c2eaa..35f106ab58cbc 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.RemoteClusterActionType; @@ -52,7 +53,6 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; @@ -160,7 +160,7 @@ public class TransportSearchAction extends HandledTransportAction listener) { - executeRequest( - (SearchTask) task, - searchRequest, - new SearchResponseActionListener((SearchTask) task, listener), - AsyncSearchActionProvider::new - ); + executeRequest((SearchTask) task, searchRequest, new SearchResponseActionListener(listener), AsyncSearchActionProvider::new); } void executeRequest( @@ -372,7 +368,7 @@ void executeRequest( searchPhaseProvider.apply(delegate) ); } else { - if (listener instanceof TelemetryListener tl) { + if (delegate instanceof TelemetryListener tl) { tl.setRemotes(resolvedIndices.getRemoteClusterIndices().size()); if (task.isAsync()) { tl.setFeature(CCSUsageTelemetry.ASYNC_FEATURE); @@ -398,7 +394,7 @@ void executeRequest( } final TaskId parentTaskId = task.taskInfo(clusterService.localNode().getId(), false).taskId(); if (shouldMinimizeRoundtrips(rewritten)) { - if (listener instanceof TelemetryListener tl) { + if (delegate instanceof TelemetryListener tl) { tl.setFeature(CCSUsageTelemetry.MRT_FEATURE); } final AggregationReduceContext.Builder aggregationReduceContextBuilder = rewritten.source() != null @@ -508,7 +504,7 @@ void executeRequest( // We set the keep alive to -1 to indicate that we don't need the pit id in the response. // This is needed since we delete the pit prior to sending the response so the id doesn't exist anymore. source.pointInTimeBuilder(new PointInTimeBuilder(resp.getPointInTimeId()).setKeepAlive(TimeValue.MINUS_ONE)); - var pitListener = new SearchResponseActionListener(task, listener) { + var pitListener = new SearchResponseActionListener(delegate) { @Override public void onResponse(SearchResponse response) { // we need to close the PIT first so we delay the release of the response to after the closing @@ -516,13 +512,13 @@ public void onResponse(SearchResponse response) { closePIT( client, original.source().pointInTimeBuilder(), - () -> ActionListener.respondAndRelease(listener, response) + () -> ActionListener.respondAndRelease(delegate, response) ); } @Override public void onFailure(Exception e) { - closePIT(client, original.source().pointInTimeBuilder(), () -> listener.onFailure(e)); + closePIT(client, original.source().pointInTimeBuilder(), () -> delegate.onFailure(e)); } }; executeRequest(task, original, pitListener, searchPhaseProvider); @@ -1874,14 +1870,13 @@ private interface TelemetryListener { void setClient(String client); } - private class SearchResponseActionListener implements ActionListener, TelemetryListener { - private final SearchTask task; - private final ActionListener listener; + private class SearchResponseActionListener extends DelegatingActionListener + implements + TelemetryListener { private final CCSUsage.Builder usageBuilder; - SearchResponseActionListener(SearchTask task, ActionListener listener) { - this.task = task; - this.listener = listener; + SearchResponseActionListener(ActionListener listener) { + super(listener); if (listener instanceof SearchResponseActionListener srListener) { usageBuilder = srListener.usageBuilder; } else { @@ -1893,7 +1888,7 @@ private class SearchResponseActionListener implements ActionListener 0; + return collectTelemetry && usageBuilder.getRemotesCount() > 0; } public void setRemotes(int count) { @@ -1942,7 +1937,7 @@ public void onResponse(SearchResponse searchResponse) { return; } // This is last because we want to collect telemetry before returning the response. - listener.onResponse(searchResponse); + delegate.onResponse(searchResponse); } @Override @@ -1952,7 +1947,7 @@ public void onFailure(Exception e) { usageBuilder.setFailure(e); recordTelemetry(); } - listener.onFailure(e); + super.onFailure(e); } private void recordTelemetry() { diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 5eda399fee2ba..236baf89a04e9 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -30,6 +30,7 @@ import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.jdk.JarHell; @@ -198,12 +199,16 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { VectorUtil.class ); - // install SM after natives, shutdown hooks, etc. - org.elasticsearch.bootstrap.Security.configure( - nodeEnv, - SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(args.nodeSettings()), - args.pidFile() - ); + if (Boolean.parseBoolean(System.getProperty("es.entitlements.enabled"))) { + EntitlementBootstrap.bootstrap(); + } else { + // install SM after natives, shutdown hooks, etc. + org.elasticsearch.bootstrap.Security.configure( + nodeEnv, + SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(args.nodeSettings()), + args.pidFile() + ); + } } private static void ensureInitialized(Class... classes) { diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index f7cad013554c6..1364f0c78f3be 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.xcontent.ToXContent; @@ -1025,29 +1026,14 @@ public static ClusterState readFrom(StreamInput in, DiscoveryNode localNode) thr builder.metadata = Metadata.readFrom(in); builder.routingTable = RoutingTable.readFrom(in); builder.nodes = DiscoveryNodes.readFrom(in, localNode); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { - builder.nodeIdsToCompatibilityVersions(in.readMap(CompatibilityVersions::readVersion)); - } else { - // this clusterstate is from a pre-8.8.0 node - // infer the versions from discoverynodes for now - // leave mappings versions empty - builder.nodes() - .getNodes() - .values() - .forEach(n -> builder.putCompatibilityVersions(n.getId(), inferTransportVersion(n), Map.of())); - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - builder.nodeFeatures(ClusterFeatures.readFrom(in)); - } + builder.nodeIdsToCompatibilityVersions(in.readMap(CompatibilityVersions::readVersion)); + builder.nodeFeatures(ClusterFeatures.readFrom(in)); builder.blocks = ClusterBlocks.readFrom(in); int customSize = in.readVInt(); for (int i = 0; i < customSize; i++) { Custom customIndexMetadata = in.readNamedWriteable(Custom.class); builder.putCustom(customIndexMetadata.getWriteableName(), customIndexMetadata); } - if (in.getTransportVersion().before(TransportVersions.V_8_0_0)) { - in.readVInt(); // used to be minimumMasterNodesOnPublishingMaster, which was used in 7.x for BWC with 6.x - } return builder.build(); } @@ -1055,22 +1041,10 @@ public static ClusterState readFrom(StreamInput in, DiscoveryNode localNode) thr * If the cluster state does not contain transport version information, this is the version * that is inferred for all nodes on version 8.8.0 or above. */ + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public static final TransportVersion INFERRED_TRANSPORT_VERSION = TransportVersions.V_8_8_0; - public static final Version VERSION_INTRODUCING_TRANSPORT_VERSIONS = Version.V_8_8_0; - private static TransportVersion inferTransportVersion(DiscoveryNode node) { - TransportVersion tv; - if (node.getVersion().before(VERSION_INTRODUCING_TRANSPORT_VERSIONS)) { - // 1-to-1 mapping between Version and TransportVersion - tv = TransportVersion.fromId(node.getPre811VersionId().getAsInt()); - } else { - // use the lowest value it could be for now - tv = INFERRED_TRANSPORT_VERSION; - } - return tv; - } - @Override public void writeTo(StreamOutput out) throws IOException { clusterName.writeTo(out); @@ -1079,17 +1053,10 @@ public void writeTo(StreamOutput out) throws IOException { metadata.writeTo(out); routingTable.writeTo(out); nodes.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { - out.writeMap(compatibilityVersions, StreamOutput::writeWriteable); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - clusterFeatures.writeTo(out); - } + out.writeMap(compatibilityVersions, StreamOutput::writeWriteable); + clusterFeatures.writeTo(out); blocks.writeTo(out); VersionedNamedWriteable.writeVersionedWritables(out, customs); - if (out.getTransportVersion().before(TransportVersions.V_8_0_0)) { - out.writeVInt(-1); // used to be minimumMasterNodesOnPublishingMaster, which was used in 7.x for BWC with 6.x - } } private static class ClusterStateDiff implements Diff { @@ -1106,7 +1073,6 @@ private static class ClusterStateDiff implements Diff { private final Diff nodes; - @Nullable private final Diff> versions; private final Diff features; @@ -1142,26 +1108,13 @@ private static class ClusterStateDiff implements Diff { toVersion = in.readLong(); routingTable = RoutingTable.readDiffFrom(in); nodes = DiscoveryNodes.readDiffFrom(in, localNode); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) && in.readBoolean()) { - versions = DiffableUtils.readJdkMapDiff( - in, - DiffableUtils.getStringKeySerializer(), - COMPATIBILITY_VERSIONS_VALUE_SERIALIZER - ); - } else { - versions = null; // infer at application time - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - features = ClusterFeatures.readDiffFrom(in); - } else { - features = null; // fill in when nodes re-register with a master that understands features - } + boolean versionPresent = in.readBoolean(); + if (versionPresent == false) throw new IOException("ClusterStateDiff stream must have versions"); + versions = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), COMPATIBILITY_VERSIONS_VALUE_SERIALIZER); + features = ClusterFeatures.readDiffFrom(in); metadata = Metadata.readDiffFrom(in); blocks = ClusterBlocks.readDiffFrom(in); customs = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), CUSTOM_VALUE_SERIALIZER); - if (in.getTransportVersion().before(TransportVersions.V_8_0_0)) { - in.readVInt(); // used to be minimumMasterNodesOnPublishingMaster, which was used in 7.x for BWC with 6.x - } } @Override @@ -1172,18 +1125,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(toVersion); routingTable.writeTo(out); nodes.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { - out.writeOptionalWriteable(versions); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - features.writeTo(out); - } + out.writeBoolean(true); + versions.writeTo(out); + features.writeTo(out); metadata.writeTo(out); blocks.writeTo(out); customs.writeTo(out); - if (out.getTransportVersion().before(TransportVersions.V_8_0_0)) { - out.writeVInt(-1); // used to be minimumMasterNodesOnPublishingMaster, which was used in 7.x for BWC with 6.x - } } @Override @@ -1200,19 +1147,8 @@ public ClusterState apply(ClusterState state) { builder.version(toVersion); builder.routingTable(routingTable.apply(state.routingTable)); builder.nodes(nodes.apply(state.nodes)); - if (versions != null) { - builder.nodeIdsToCompatibilityVersions(this.versions.apply(state.compatibilityVersions)); - } else { - // infer the versions from discoverynodes for now - // leave mappings versions empty - builder.nodes() - .getNodes() - .values() - .forEach(n -> builder.putCompatibilityVersions(n.getId(), inferTransportVersion(n), Map.of())); - } - if (features != null) { - builder.nodeFeatures(this.features.apply(state.clusterFeatures)); - } + builder.nodeIdsToCompatibilityVersions(this.versions.apply(state.compatibilityVersions)); + builder.nodeFeatures(this.features.apply(state.clusterFeatures)); builder.metadata(metadata.apply(state.metadata)); builder.blocks(blocks.apply(state.blocks)); builder.customs(customs.apply(state.customs)); diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateSupplier.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateSupplier.java new file mode 100644 index 0000000000000..61bb049ffd5c5 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateSupplier.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster; + +import java.util.Optional; +import java.util.function.Function; +import java.util.function.Supplier; + +/** + * Utility to access {@link ClusterState} only when it is "ready", with a fallback if it's not. The definition of "ready" is left to the + * class implementations. + */ +public interface ClusterStateSupplier extends Supplier> { + default T withCurrentClusterState(Function clusterStateFunction, T fallbackIfNotReady) { + var x = get(); + return x.map(clusterStateFunction).orElse(fallbackIfNotReady); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/SafeClusterStateSupplier.java b/server/src/main/java/org/elasticsearch/cluster/SafeClusterStateSupplier.java new file mode 100644 index 0000000000000..b12ef3d78f864 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/SafeClusterStateSupplier.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster; + +import java.util.Optional; + +import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; + +/** + * Utility to access {@link ClusterState} only when it is "ready", where "ready" means that we received a first clusterChanged event + * with no global block of type {@code STATE_NOT_RECOVERED_BLOCK} + * This guarantees that: + * - the initial cluster state has been set (see + * {@link org.elasticsearch.cluster.service.ClusterApplierService#setInitialState(ClusterState)}); + * - the initial recovery process has completed. + */ +public class SafeClusterStateSupplier implements ClusterStateSupplier, ClusterStateListener { + private volatile ClusterState currentClusterState; + + @Override + public void clusterChanged(ClusterChangedEvent event) { + // In this default implementation, "ready" is really "is cluster state available", which after the initial recovery it should be. + // If you need a different condition, feel free to add a different implementation of ClusterStateSupplier + if (isInitialized() || event.state().blocks().hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK) == false) { + currentClusterState = event.state(); + } + } + + private boolean isInitialized() { + return currentClusterState != null; + } + + @Override + public Optional get() { + return Optional.ofNullable(currentClusterState); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java index 2247d7a92cf41..5f499634fdbaa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java @@ -8,8 +8,6 @@ */ package org.elasticsearch.cluster.coordination; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.io.stream.StreamInput; @@ -17,7 +15,6 @@ import org.elasticsearch.transport.TransportRequest; import java.io.IOException; -import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; @@ -72,21 +69,8 @@ public JoinRequest( public JoinRequest(StreamInput in) throws IOException { super(in); sourceNode = new DiscoveryNode(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { - compatibilityVersions = CompatibilityVersions.readVersion(in); - } else { - // there's a 1-1 mapping from Version to TransportVersion before 8.8.0 - // no known mapping versions here - compatibilityVersions = new CompatibilityVersions( - TransportVersion.fromId(sourceNode.getPre811VersionId().getAsInt()), - Map.of() - ); - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - features = in.readCollectionAsSet(StreamInput::readString); - } else { - features = Set.of(); - } + compatibilityVersions = CompatibilityVersions.readVersion(in); + features = in.readCollectionAsSet(StreamInput::readString); minimumTerm = in.readLong(); optionalJoin = Optional.ofNullable(in.readOptionalWriteable(Join::new)); } @@ -95,12 +79,8 @@ public JoinRequest(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); sourceNode.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { - compatibilityVersions.writeTo(out); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - out.writeCollection(features, StreamOutput::writeString); - } + compatibilityVersions.writeTo(out); + out.writeCollection(features, StreamOutput::writeString); out.writeLong(minimumTerm); out.writeOptionalWriteable(optionalJoin.orElse(null)); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ContinuousComputation.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ContinuousComputation.java index d82dcbac17c21..3846f7f9e5740 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ContinuousComputation.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ContinuousComputation.java @@ -49,6 +49,16 @@ public void onNewInput(T input) { } } + /** + * enqueues {@code input} if {@code expectedLatestKnownInput} is the latest known input. + * Neither of the parameters can be null. + */ + protected boolean compareAndEnqueue(T expectedLatestKnownInput, T input) { + assert expectedLatestKnownInput != null; + assert input != null; + return enqueuedInput.compareAndSet(Objects.requireNonNull(expectedLatestKnownInput), Objects.requireNonNull(input)); + } + /** * @return {@code false} iff there are no active/enqueued computations */ @@ -67,7 +77,7 @@ protected boolean isFresh(T input) { /** * Process the given input. * - * @param input the value that was last received by {@link #onNewInput} before invocation. + * @param input the value that was last received by {@link #onNewInput} or {@link #compareAndEnqueue} before invocation. */ protected abstract void processInput(T input); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java index aeedbb56b9df2..9de95804b49b2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java @@ -26,11 +26,18 @@ public record DesiredBalance( long lastConvergedIndex, Map assignments, - Map weightsPerNode + Map weightsPerNode, + ComputationFinishReason finishReason ) { + enum ComputationFinishReason { + CONVERGED, + YIELD_TO_NEW_INPUT, + STOP_EARLY + } + public DesiredBalance(long lastConvergedIndex, Map assignments) { - this(lastConvergedIndex, assignments, Map.of()); + this(lastConvergedIndex, assignments, Map.of(), ComputationFinishReason.CONVERGED); } public static final DesiredBalance INITIAL = new DesiredBalance(-1, Map.of()); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java index 56c48492a2051..42240a996c531 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java @@ -38,6 +38,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; +import java.util.function.LongSupplier; import java.util.function.Predicate; import static java.util.stream.Collectors.toUnmodifiableSet; @@ -49,8 +50,8 @@ public class DesiredBalanceComputer { private static final Logger logger = LogManager.getLogger(DesiredBalanceComputer.class); - private final ThreadPool threadPool; private final ShardsAllocator delegateAllocator; + private final LongSupplier timeSupplierMillis; // stats protected final MeanMetric iterations = new MeanMetric(); @@ -63,12 +64,28 @@ public class DesiredBalanceComputer { Setting.Property.NodeScope ); + public static final Setting MAX_BALANCE_COMPUTATION_TIME_DURING_INDEX_CREATION_SETTING = Setting.timeSetting( + "cluster.routing.allocation.desired_balance.max_balance_computation_time_during_index_creation", + TimeValue.timeValueSeconds(1), + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + private TimeValue progressLogInterval; + private long maxBalanceComputationTimeDuringIndexCreationMillis; public DesiredBalanceComputer(ClusterSettings clusterSettings, ThreadPool threadPool, ShardsAllocator delegateAllocator) { - this.threadPool = threadPool; + this(clusterSettings, delegateAllocator, threadPool::relativeTimeInMillis); + } + + DesiredBalanceComputer(ClusterSettings clusterSettings, ShardsAllocator delegateAllocator, LongSupplier timeSupplierMillis) { this.delegateAllocator = delegateAllocator; + this.timeSupplierMillis = timeSupplierMillis; clusterSettings.initializeAndWatch(PROGRESS_LOG_INTERVAL_SETTING, value -> this.progressLogInterval = value); + clusterSettings.initializeAndWatch( + MAX_BALANCE_COMPUTATION_TIME_DURING_INDEX_CREATION_SETTING, + value -> this.maxBalanceComputationTimeDuringIndexCreationMillis = value.millis() + ); } public DesiredBalance compute( @@ -77,7 +94,6 @@ public DesiredBalance compute( Queue> pendingDesiredBalanceMoves, Predicate isFresh ) { - if (logger.isTraceEnabled()) { logger.trace( "Recomputing desired balance for [{}]: {}, {}, {}, {}", @@ -97,9 +113,10 @@ public DesiredBalance compute( final var changes = routingAllocation.changes(); final var ignoredShards = getIgnoredShardsWithDiscardedAllocationStatus(desiredBalanceInput.ignoredShards()); final var clusterInfoSimulator = new ClusterInfoSimulator(routingAllocation); + DesiredBalance.ComputationFinishReason finishReason = DesiredBalance.ComputationFinishReason.CONVERGED; if (routingNodes.size() == 0) { - return new DesiredBalance(desiredBalanceInput.index(), Map.of()); + return new DesiredBalance(desiredBalanceInput.index(), Map.of(), Map.of(), finishReason); } // we assume that all ongoing recoveries will complete @@ -263,11 +280,12 @@ public DesiredBalance compute( final int iterationCountReportInterval = computeIterationCountReportInterval(routingAllocation); final long timeWarningInterval = progressLogInterval.millis(); - final long computationStartedTime = threadPool.relativeTimeInMillis(); + final long computationStartedTime = timeSupplierMillis.getAsLong(); long nextReportTime = computationStartedTime + timeWarningInterval; int i = 0; boolean hasChanges = false; + boolean assignedNewlyCreatedPrimaryShards = false; while (true) { if (hasChanges) { // Not the first iteration, so every remaining unassigned shard has been ignored, perhaps due to throttling. We must bring @@ -293,6 +311,15 @@ public DesiredBalance compute( for (final var shardRouting : routingNode) { if (shardRouting.initializing()) { hasChanges = true; + if (shardRouting.primary() + && shardRouting.unassignedInfo() != null + && shardRouting.unassignedInfo().reason() == UnassignedInfo.Reason.INDEX_CREATED) { + // TODO: we could include more cases that would cause early publishing of desired balance in case of a long + // computation. e.g.: + // - unassigned search replicas in case the shard has no assigned shard replicas + // - other reasons for an unassigned shard such as NEW_INDEX_RESTORED + assignedNewlyCreatedPrimaryShards = true; + } clusterInfoSimulator.simulateShardStarted(shardRouting); routingNodes.startShard(shardRouting, changes, 0L); } @@ -301,14 +328,14 @@ public DesiredBalance compute( i++; final int iterations = i; - final long currentTime = threadPool.relativeTimeInMillis(); + final long currentTime = timeSupplierMillis.getAsLong(); final boolean reportByTime = nextReportTime <= currentTime; final boolean reportByIterationCount = i % iterationCountReportInterval == 0; if (reportByTime || reportByIterationCount) { nextReportTime = currentTime + timeWarningInterval; } - if (hasChanges == false) { + if (hasChanges == false && hasEnoughIterations(i)) { logger.debug( "Desired balance computation for [{}] converged after [{}] and [{}] iterations", desiredBalanceInput.index(), @@ -324,9 +351,25 @@ public DesiredBalance compute( "Desired balance computation for [{}] interrupted after [{}] and [{}] iterations as newer cluster state received. " + "Publishing intermediate desired balance and restarting computation", desiredBalanceInput.index(), + TimeValue.timeValueMillis(currentTime - computationStartedTime).toString(), + i + ); + finishReason = DesiredBalance.ComputationFinishReason.YIELD_TO_NEW_INPUT; + break; + } + + if (assignedNewlyCreatedPrimaryShards + && currentTime - computationStartedTime >= maxBalanceComputationTimeDuringIndexCreationMillis) { + logger.info( + "Desired balance computation for [{}] interrupted after [{}] and [{}] iterations " + + "in order to not delay assignment of newly created index shards for more than [{}]. " + + "Publishing intermediate desired balance and restarting computation", + desiredBalanceInput.index(), + TimeValue.timeValueMillis(currentTime - computationStartedTime).toString(), i, - TimeValue.timeValueMillis(currentTime - computationStartedTime).toString() + TimeValue.timeValueMillis(maxBalanceComputationTimeDuringIndexCreationMillis).toString() ); + finishReason = DesiredBalance.ComputationFinishReason.STOP_EARLY; break; } @@ -368,7 +411,12 @@ public DesiredBalance compute( } long lastConvergedIndex = hasChanges ? previousDesiredBalance.lastConvergedIndex() : desiredBalanceInput.index(); - return new DesiredBalance(lastConvergedIndex, assignments, routingNodes.getBalanceWeightStatsPerNode()); + return new DesiredBalance(lastConvergedIndex, assignments, routingNodes.getBalanceWeightStatsPerNode(), finishReason); + } + + // visible for testing + boolean hasEnoughIterations(int currentIteration) { + return true; } private static Map collectShardAssignments(RoutingNodes routingNodes) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index 4171100191211..0cfb3af87f012 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -136,7 +136,16 @@ protected void processInput(DesiredBalanceInput desiredBalanceInput) { ) ); computationsExecuted.inc(); - if (isFresh(desiredBalanceInput)) { + + if (currentDesiredBalance.finishReason() == DesiredBalance.ComputationFinishReason.STOP_EARLY) { + logger.debug( + "Desired balance computation for [{}] terminated early with partial result, scheduling reconciliation", + index + ); + submitReconcileTask(currentDesiredBalance); + var newInput = DesiredBalanceInput.create(indexGenerator.incrementAndGet(), desiredBalanceInput.routingAllocation()); + desiredBalanceComputation.compareAndEnqueue(desiredBalanceInput, newInput); + } else if (isFresh(desiredBalanceInput)) { logger.debug("Desired balance computation for [{}] is completed, scheduling reconciliation", index); computationsConverged.inc(); submitReconcileTask(currentDesiredBalance); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java index 79daceaf11851..f946402d1fa09 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java @@ -136,11 +136,13 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) allocation.decision( Decision.NO, "cancel_allocation_command", - "can't cancel " + shardId + ", failed to find it on node " + discoNode + "can't cancel [" + index + "][" + shardId + "], failed to find it on node " + discoNode ) ); } - throw new IllegalArgumentException("[cancel_allocation] can't cancel " + shardId + ", failed to find it on node " + discoNode); + throw new IllegalArgumentException( + "[cancel_allocation] can't cancel [" + index + "][" + shardId + "], failed to find it on node " + discoNode + ); } if (shardRouting.primary() && allowPrimary == false) { if ((shardRouting.initializing() && shardRouting.relocatingNodeId() != null) == false) { @@ -151,9 +153,11 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) allocation.decision( Decision.NO, "cancel_allocation_command", - "can't cancel " + "can't cancel [" + + index + + "][" + shardId - + " on node " + + "] on node " + discoNode + ", shard is primary and " + shardRouting.state().name().toLowerCase(Locale.ROOT) @@ -161,9 +165,11 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) ); } throw new IllegalArgumentException( - "[cancel_allocation] can't cancel " + "[cancel_allocation] can't cancel [" + + index + + "][" + shardId - + " on node " + + "] on node " + discoNode + ", shard is primary and " + shardRouting.state().name().toLowerCase(Locale.ROOT) @@ -178,7 +184,7 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) allocation.decision( Decision.YES, "cancel_allocation_command", - "shard " + shardId + " on node " + discoNode + " can be cancelled" + "shard [" + index + "][" + shardId + "] on node " + discoNode + " can be cancelled" ) ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java index 4295a6178168a..b937ebdc33091 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java @@ -141,11 +141,21 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) if (explain) { return new RerouteExplanation( this, - allocation.decision(Decision.NO, "move_allocation_command", "shard " + shardId + " has not been started") + allocation.decision( + Decision.NO, + "move_allocation_command", + "shard [" + index + "][" + shardId + "] has not been started" + ) ); } throw new IllegalArgumentException( - "[move_allocation] can't move " + shardId + ", shard is not started (state = " + shardRouting.state() + "]" + "[move_allocation] can't move [" + + index + + "][" + + shardId + + "], shard is not started (state = " + + shardRouting.state() + + "]" ); } @@ -155,9 +165,11 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) return new RerouteExplanation(this, decision); } throw new IllegalArgumentException( - "[move_allocation] can't move " + "[move_allocation] can't move [" + + index + + "][" + shardId - + ", from " + + "], from " + fromDiscoNode + ", to " + toDiscoNode @@ -182,10 +194,12 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) if (explain) { return new RerouteExplanation( this, - allocation.decision(Decision.NO, "move_allocation_command", "shard " + shardId + " not found") + allocation.decision(Decision.NO, "move_allocation_command", "shard [" + index + "][" + shardId + "] not found") ); } - throw new IllegalArgumentException("[move_allocation] can't move " + shardId + ", failed to find it on node " + fromDiscoNode); + throw new IllegalArgumentException( + "[move_allocation] can't move [" + index + "][" + shardId + "], failed to find it on node " + fromDiscoNode + ); } return new RerouteExplanation(this, decision); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index ea6f93cbc881b..35ab786c96e21 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -25,9 +25,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import java.util.Map; @@ -72,25 +70,6 @@ public class DiskThresholdDecider extends AllocationDecider { public static final String NAME = "disk_threshold"; - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - public static final Setting ENABLE_FOR_SINGLE_DATA_NODE = Setting.boolSetting( - "cluster.routing.allocation.disk.watermark.enable_for_single_data_node", - true, - new Setting.Validator<>() { - @Override - public void validate(Boolean value) { - if (value == Boolean.FALSE) { - throw new SettingsException( - "setting [{}=false] is not allowed, only true is valid", - ENABLE_FOR_SINGLE_DATA_NODE.getKey() - ); - } - } - }, - Setting.Property.NodeScope, - Setting.Property.DeprecatedWarning - ); - public static final Setting SETTING_IGNORE_DISK_WATERMARKS = Setting.boolSetting( "index.routing.allocation.disk.watermark.ignore", false, @@ -102,9 +81,6 @@ public void validate(Boolean value) { public DiskThresholdDecider(Settings settings, ClusterSettings clusterSettings) { this.diskThresholdSettings = new DiskThresholdSettings(settings, clusterSettings); - // get deprecation warnings. - boolean enabledForSingleDataNode = ENABLE_FOR_SINGLE_DATA_NODE.get(settings); - assert enabledForSingleDataNode; } /** diff --git a/server/src/main/java/org/elasticsearch/common/lucene/BytesRefs.java b/server/src/main/java/org/elasticsearch/common/lucene/BytesRefs.java index ff8af9b80edcc..ed88c3a5a9c91 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/BytesRefs.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/BytesRefs.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.UnicodeUtil; public class BytesRefs { @@ -56,6 +57,25 @@ public static BytesRef checkIndexableLength(BytesRef input) { return input; } + /** + * Converts a given string to a {@link BytesRef} object with an exactly sized byte array. + *

+ * This method alternative method to the standard {@link BytesRef} constructor's allocates the + * exact byte array size needed for the string. This is done by parsing the UTF-16 string two + * times the first to estimate the array length and the second to copy the string value inside + * the array. + *

+ * + * @param s the input string to convert + * @return a BytesRef object representing the input string + */ + public static BytesRef toExactSizedBytesRef(String s) { + int l = s.length(); + byte[] b = new byte[UnicodeUtil.calcUTF16toUTF8Length(s, 0, l)]; + UnicodeUtil.UTF16toUTF8(s, 0, l, b); + return new BytesRef(b, 0, b.length); + } + /** * Produces a UTF-string prefix of the input BytesRef. If the prefix cutoff would produce * ill-formed UTF, it falls back to the hexadecimal representation. @@ -70,5 +90,4 @@ private static String safeStringPrefix(BytesRef input, int prefixLength) { return prefix.toString(); } } - } diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 60626b9e2375f..c65f75df663d2 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -599,6 +599,15 @@ void validate(final String key, final Settings settings, final boolean validateV ); } } + + if (setting instanceof SecureSetting && settings.hasValue(key)) { + throw new IllegalArgumentException( + "Setting [" + + key + + "] is a secure setting" + + " and must be stored inside the Elasticsearch keystore, but was found inside elasticsearch.yml" + ); + } } if (validateValue) { setting.get(settings); diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 7bb78eabc8727..a9a9411de8e1f 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -51,7 +51,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider; -import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; @@ -221,6 +220,7 @@ public void apply(Settings value, Settings current, Settings previous) { DataStreamAutoShardingService.CLUSTER_AUTO_SHARDING_MAX_WRITE_THREADS, DataStreamAutoShardingService.CLUSTER_AUTO_SHARDING_MIN_WRITE_THREADS, DesiredBalanceComputer.PROGRESS_LOG_INTERVAL_SETTING, + DesiredBalanceComputer.MAX_BALANCE_COMPUTATION_TIME_DURING_INDEX_CREATION_SETTING, DesiredBalanceReconciler.UNDESIRED_ALLOCATIONS_LOG_INTERVAL_SETTING, DesiredBalanceReconciler.UNDESIRED_ALLOCATIONS_LOG_THRESHOLD_SETTING, BreakerSettings.CIRCUIT_BREAKER_LIMIT_SETTING, @@ -271,7 +271,6 @@ public void apply(Settings value, Settings current, Settings previous) { ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING, - DiskThresholdDecider.ENABLE_FOR_SINGLE_DATA_NODE, DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING, DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_MAX_HEADROOM_SETTING, DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java index 67ac55f7b19eb..36ca2df08724d 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java @@ -82,21 +82,14 @@ public boolean exists(Settings.Builder builder) { public T get(Settings settings) { checkDeprecation(settings); final SecureSettings secureSettings = settings.getSecureSettings(); - if (secureSettings == null || secureSettings.getSettingNames().contains(getKey()) == false) { - if (super.exists(settings)) { - throw new IllegalArgumentException( - "Setting [" - + getKey() - + "] is a secure setting" - + " and must be stored inside the Elasticsearch keystore, but was found inside elasticsearch.yml" - ); - } + String key = getKey(); + if (secureSettings == null || secureSettings.getSettingNames().contains(key) == false) { return getFallback(settings); } try { return getSecret(secureSettings); } catch (GeneralSecurityException e) { - throw new RuntimeException("failed to read secure setting " + getKey(), e); + throw new RuntimeException("failed to read secure setting " + key, e); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 70c4a3ac213a2..023f6fcea0bfe 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -9,6 +9,8 @@ package org.elasticsearch.index.mapper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.index.LeafReader; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; @@ -41,6 +43,7 @@ import java.util.stream.Stream; public class ObjectMapper extends Mapper { + private static final Logger logger = LogManager.getLogger(ObjectMapper.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ObjectMapper.class); public static final FeatureFlag SUB_OBJECTS_AUTO_FEATURE_FLAG = new FeatureFlag("sub_objects_auto"); @@ -679,6 +682,13 @@ private static Map buildMergedMappers( // replaces an existing one. if (objectMergeContext.getMapperBuilderContext().getMergeReason() == MergeReason.INDEX_TEMPLATE) { putMergedMapper(mergedMappers, mergeWithMapper); + } else if (isConflictingDynamicMapping(objectMergeContext, mergeWithMapper, mergeIntoMapper)) { + logger.trace( + "ignoring conflicting dynamic mapping update for field={} current_type={} new_type={}", + mergeIntoMapper.fullPath(), + mergeIntoMapper.typeName(), + mergeWithMapper.typeName() + ); } else { putMergedMapper(mergedMappers, mergeIntoMapper.merge(mergeWithMapper, objectMergeContext)); } @@ -687,6 +697,22 @@ private static Map buildMergedMappers( return Map.copyOf(mergedMappers); } + /* + * We're ignoring the field if a dynamic mapping update tries to define a conflicting field type. + * This is caused by another index request with a different value racing to update the mappings. + * After updating the mappings, the index request will be re-tried and sees the updated mappings for this field. + * The updated mappings will then be taken into account when parsing the document + * (for example by coercing the value, ignore_malformed values, or failing the index request due to a type conflict). + */ + private static boolean isConflictingDynamicMapping( + MapperMergeContext objectMergeContext, + Mapper mergeWithMapper, + Mapper mergeIntoMapper + ) { + return objectMergeContext.getMapperBuilderContext().getMergeReason().isAutoUpdate() + && mergeIntoMapper.typeName().equals(mergeWithMapper.typeName()) == false; + } + private static void putMergedMapper(Map mergedMappers, @Nullable Mapper merged) { if (merged != null) { mergedMappers.put(merged.leafName(), merged); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 809532c0e8f5a..1c61dcec906a1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -1904,9 +1904,7 @@ protected Object parseSourceValue(Object value) { @Override public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - throw new IllegalArgumentException( - "Field [" + name() + "] of type [" + typeName() + "] doesn't support docvalue_fields or aggregations" - ); + return DocValueFormat.DENSE_VECTOR; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorDVLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorDVLeafFieldData.java index 23d2c4b554d85..e44202d353629 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorDVLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorDVLeafFieldData.java @@ -10,9 +10,14 @@ package org.elasticsearch.index.mapper.vectors; import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.fielddata.FormattedDocValues; import org.elasticsearch.index.fielddata.LeafFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; @@ -23,8 +28,12 @@ import org.elasticsearch.script.field.vectors.ByteBinaryDenseVectorDocValuesField; import org.elasticsearch.script.field.vectors.ByteKnnDenseVectorDocValuesField; import org.elasticsearch.script.field.vectors.KnnDenseVectorDocValuesField; +import org.elasticsearch.search.DocValueFormat; import java.io.IOException; +import java.util.Arrays; + +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; final class VectorDVLeafFieldData implements LeafFieldData { @@ -76,4 +85,119 @@ public DocValuesScriptFieldFactory getScriptFieldFactory(String name) { } } + @Override + public FormattedDocValues getFormattedValues(DocValueFormat format) { + int dims = elementType == ElementType.BIT ? this.dims / Byte.SIZE : this.dims; + return switch (elementType) { + case BYTE, BIT -> new FormattedDocValues() { + private byte[] vector = new byte[dims]; + private ByteVectorValues byteVectorValues; // use when indexed + private KnnVectorValues.DocIndexIterator iterator; // use when indexed + private BinaryDocValues binary; // use when not indexed + { + try { + if (indexed) { + byteVectorValues = reader.getByteVectorValues(field); + iterator = (byteVectorValues == null) ? null : byteVectorValues.iterator(); + } else { + binary = DocValues.getBinary(reader, field); + } + } catch (IOException e) { + throw new IllegalStateException("Cannot load doc values", e); + } + + } + + @Override + public boolean advanceExact(int docId) throws IOException { + if (indexed) { + if (iteratorAdvanceExact(iterator, docId) == false) { + return false; + } + vector = byteVectorValues.vectorValue(iterator.index()); + } else { + if (binary == null || binary.advanceExact(docId) == false) { + return false; + } + BytesRef ref = binary.binaryValue(); + System.arraycopy(ref.bytes, ref.offset, vector, 0, dims); + } + return true; + } + + @Override + public int docValueCount() { + return 1; + } + + public Object nextValue() { + Byte[] vectorValue = new Byte[dims]; + for (int i = 0; i < dims; i++) { + vectorValue[i] = vector[i]; + } + return vectorValue; + } + }; + case FLOAT -> new FormattedDocValues() { + float[] vector = new float[dims]; + private FloatVectorValues floatVectorValues; // use when indexed + private KnnVectorValues.DocIndexIterator iterator; // use when indexed + private BinaryDocValues binary; // use when not indexed + { + try { + if (indexed) { + floatVectorValues = reader.getFloatVectorValues(field); + iterator = (floatVectorValues == null) ? null : floatVectorValues.iterator(); + } else { + binary = DocValues.getBinary(reader, field); + } + } catch (IOException e) { + throw new IllegalStateException("Cannot load doc values", e); + } + + } + + @Override + public boolean advanceExact(int docId) throws IOException { + if (indexed) { + if (iteratorAdvanceExact(iterator, docId) == false) { + return false; + } + vector = floatVectorValues.vectorValue(iterator.index()); + } else { + if (binary == null || binary.advanceExact(docId) == false) { + return false; + } + BytesRef ref = binary.binaryValue(); + VectorEncoderDecoder.decodeDenseVector(indexVersion, ref, vector); + } + return true; + } + + @Override + public int docValueCount() { + return 1; + } + + @Override + public Object nextValue() { + return Arrays.copyOf(vector, vector.length); + } + }; + }; + } + + private static boolean iteratorAdvanceExact(KnnVectorValues.DocIndexIterator iterator, int docId) throws IOException { + if (iterator == null) return false; + int currentDoc = iterator.docID(); + if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { + return false; + } else if (docId > currentDoc) { + currentDoc = iterator.advance(docId); + if (currentDoc != docId) { + return false; + } + } + return true; + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java index 033151da362ef..b2a816c9e5690 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.geometry.Geometry; @@ -44,10 +43,6 @@ * Base {@link QueryBuilder} that builds a Geometry Query */ public abstract class AbstractGeometryQueryBuilder> extends AbstractQueryBuilder { - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [geo_shape] queries. " - + "The type should no longer be specified in the [indexed_shape] section."; - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(AbstractGeometryQueryBuilder.class); - public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes"; public static final String DEFAULT_SHAPE_FIELD_NAME = "shape"; public static final ShapeRelation DEFAULT_SHAPE_RELATION = ShapeRelation.INTERSECTS; @@ -59,7 +54,6 @@ public abstract class AbstractGeometryQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) { return this; } - /** - * Gets whether the query builder will ignore unmapped fields (and run a - * {@link MatchNoDocsQuery} in place of this query) or throw an exception if - * the field is unmapped. - */ - public boolean ignoreUnmapped() { - return ignoreUnmapped; - } - /** builds the appropriate lucene shape query */ protected abstract Query buildShapeQuery(SearchExecutionContext context, MappedFieldType fieldType); diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index 5df63687e1786..f00e6904feac7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -216,12 +216,12 @@ public final int hashCode() { * @return the same input object or a {@link BytesRef} representation if input was of type string */ static Object maybeConvertToBytesRef(Object obj) { - if (obj instanceof String) { - return BytesRefs.checkIndexableLength(BytesRefs.toBytesRef(obj)); - } else if (obj instanceof CharBuffer) { - return BytesRefs.checkIndexableLength(new BytesRef((CharBuffer) obj)); - } else if (obj instanceof BigInteger) { - return BytesRefs.toBytesRef(obj); + if (obj instanceof String v) { + return BytesRefs.checkIndexableLength(BytesRefs.toExactSizedBytesRef(v)); + } else if (obj instanceof CharBuffer v) { + return BytesRefs.checkIndexableLength(new BytesRef(v)); + } else if (obj instanceof BigInteger v) { + return BytesRefs.toBytesRef(v); } return obj; } diff --git a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java index 1560004b13785..1ef233f952e77 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java @@ -231,23 +231,11 @@ public CombinedFieldsQueryBuilder zeroTermsQuery(ZeroTermsQueryOption zeroTermsQ return this; } - public ZeroTermsQueryOption zeroTermsQuery() { - return zeroTermsQuery; - } - public CombinedFieldsQueryBuilder autoGenerateSynonymsPhraseQuery(boolean enable) { this.autoGenerateSynonymsPhraseQuery = enable; return this; } - /** - * Whether phrase queries should be automatically generated for multi terms synonyms. - * Defaults to {@code true}. - */ - public boolean autoGenerateSynonymsPhraseQuery() { - return autoGenerateSynonymsPhraseQuery; - } - private static void validateFieldBoost(float boost) { if (boost < 1.0f) { throw new IllegalArgumentException("[" + NAME + "] requires field boosts to be >= 1.0"); diff --git a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java index b0d3065ba3a3f..e054f17ef64d6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java @@ -154,11 +154,4 @@ public String getTierPreference() { return tier.isEmpty() == false ? tier : null; } - /** - * We're holding on to the index tier in the context as otherwise we'd need - * to re-parse it from the index settings when evaluating the _tier field. - */ - public String tier() { - return tier; - } } diff --git a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java index e5c1b16b65059..fb96f85835548 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java @@ -68,7 +68,7 @@ public String fieldName() { } @Override - protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) throws IOException { + protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) { if (getMappedFields(context, fieldName).isEmpty()) { return new MatchNoneQueryBuilder("The \"" + getName() + "\" query was rewritten to a \"match_none\" query."); } else { diff --git a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 16e21f1d5650e..ada82b7a37c91 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -78,56 +78,6 @@ public FuzzyQueryBuilder(String fieldName, String value) { this(fieldName, (Object) value); } - /** - * Constructs a new fuzzy query. - * - * @param fieldName The name of the field - * @param value The value of the text - */ - public FuzzyQueryBuilder(String fieldName, int value) { - this(fieldName, (Object) value); - } - - /** - * Constructs a new fuzzy query. - * - * @param fieldName The name of the field - * @param value The value of the text - */ - public FuzzyQueryBuilder(String fieldName, long value) { - this(fieldName, (Object) value); - } - - /** - * Constructs a new fuzzy query. - * - * @param fieldName The name of the field - * @param value The value of the text - */ - public FuzzyQueryBuilder(String fieldName, float value) { - this(fieldName, (Object) value); - } - - /** - * Constructs a new fuzzy query. - * - * @param fieldName The name of the field - * @param value The value of the text - */ - public FuzzyQueryBuilder(String fieldName, double value) { - this(fieldName, (Object) value); - } - - /** - * Constructs a new fuzzy query. - * - * @param fieldName The name of the field - * @param value The value of the text - */ - public FuzzyQueryBuilder(String fieldName, boolean value) { - this(fieldName, (Object) value); - } - /** * Constructs a new fuzzy query. * @@ -193,19 +143,11 @@ public FuzzyQueryBuilder prefixLength(int prefixLength) { return this; } - public int prefixLength() { - return this.prefixLength; - } - public FuzzyQueryBuilder maxExpansions(int maxExpansions) { this.maxExpansions = maxExpansions; return this; } - public int maxExpansions() { - return this.maxExpansions; - } - public FuzzyQueryBuilder transpositions(boolean transpositions) { this.transpositions = transpositions; return this; @@ -220,10 +162,6 @@ public FuzzyQueryBuilder rewrite(String rewrite) { return this; } - public String rewrite() { - return this.rewrite; - } - @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index 7d773ce6c3fd2..e91be82730222 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.geometry.utils.Geohash; import org.elasticsearch.index.mapper.GeoShapeQueryable; @@ -45,10 +44,6 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "geo_bounding_box"; - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(GeoBoundingBoxQueryBuilder.class); - - private static final String TYPE_PARAMETER_DEPRECATION_MESSAGE = "Deprecated parameter [type] used, it should no longer be specified."; - /** * The default value for ignore_unmapped. */ @@ -204,13 +199,6 @@ public GeoBoundingBoxQueryBuilder setValidationMethod(GeoValidationMethod method return this; } - /** - * Returns geo coordinate validation method to use. - * */ - public GeoValidationMethod getValidationMethod() { - return this.validationMethod; - } - /** Returns the name of the field to base the bounding box computation on. */ public String fieldName() { return this.fieldName; @@ -226,15 +214,6 @@ public GeoBoundingBoxQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) { return this; } - /** - * Gets whether the query builder will ignore unmapped fields (and run a - * {@link MatchNoDocsQuery} in place of this query) or throw an exception if - * the field is unmapped. - */ - public boolean ignoreUnmapped() { - return ignoreUnmapped; - } - QueryValidationException checkLatLon() { if (GeoValidationMethod.isIgnoreMalformed(validationMethod)) { return null; diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java index 479534321ba30..14271fb01696a 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java @@ -183,21 +183,11 @@ public GeoDistanceQueryBuilder geoDistance(GeoDistance geoDistance) { return this; } - /** Returns geo distance calculation type to use. */ - public GeoDistance geoDistance() { - return this.geoDistance; - } - /** Set validation method for geo coordinates. */ public void setValidationMethod(GeoValidationMethod method) { this.validationMethod = method; } - /** Returns validation method for geo coordinates. */ - public GeoValidationMethod getValidationMethod() { - return this.validationMethod; - } - /** * Sets whether the query builder should ignore unmapped fields (and run a * {@link MatchNoDocsQuery} in place of this query) or throw an exception if @@ -208,15 +198,6 @@ public GeoDistanceQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) { return this; } - /** - * Gets whether the query builder will ignore unmapped fields (and run a - * {@link MatchNoDocsQuery} in place of this query) or throw an exception if - * the field is unmapped. - */ - public boolean ignoreUnmapped() { - return ignoreUnmapped; - } - @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { MappedFieldType fieldType = context.getFieldType(fieldName); diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java index d2459c72eebed..575c15d5c063e 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java @@ -126,11 +126,6 @@ public GeoPolygonQueryBuilder setValidationMethod(GeoValidationMethod method) { return this; } - /** Returns the validation method to use for geo coordinates. */ - public GeoValidationMethod getValidationMethod() { - return this.validationMethod; - } - /** * Sets whether the query builder should ignore unmapped fields (and run a * {@link MatchNoDocsQuery} in place of this query) or throw an exception if @@ -141,15 +136,6 @@ public GeoPolygonQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) { return this; } - /** - * Gets whether the query builder will ignore unmapped fields (and run a - * {@link MatchNoDocsQuery} in place of this query) or throw an exception if - * the field is unmapped. - */ - public boolean ignoreUnmapped() { - return ignoreUnmapped; - } - @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { MappedFieldType fieldType = context.getFieldType(fieldName); diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index f7639a55dd9b0..845023d2d832d 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeometryParser; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; @@ -132,7 +131,7 @@ public GeoShapeQueryBuilder relation(ShapeRelation relation) { * @return this */ public GeoShapeQueryBuilder strategy(SpatialStrategy strategy) { - if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) { + if (strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) { throw new IllegalArgumentException( "strategy [" + strategy.getStrategyName() @@ -217,11 +216,7 @@ protected boolean parseXContentField(XContentParser parser) throws IOException { } else if (STRATEGY_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { String strategyName = parser.text(); strategy = SpatialStrategy.fromString(strategyName); - if (strategy == null) { - throw new ParsingException(parser.getTokenLocation(), "Unknown strategy [" + strategyName + " ]"); - } else { - this.strategy = strategy; - } + this.strategy = strategy; return true; } return false; diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoValidationMethod.java b/server/src/main/java/org/elasticsearch/index/query/GeoValidationMethod.java index f101ee456c8c4..2c678719a25d4 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoValidationMethod.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoValidationMethod.java @@ -62,15 +62,4 @@ public static boolean isCoerce(GeoValidationMethod method) { return method == GeoValidationMethod.COERCE; } - /** Returns validation method corresponding to given coerce and ignoreMalformed values. */ - public static GeoValidationMethod infer(boolean coerce, boolean ignoreMalformed) { - if (coerce) { - return GeoValidationMethod.COERCE; - } else if (ignoreMalformed) { - return GeoValidationMethod.IGNORE_MALFORMED; - } else { - return GeoValidationMethod.STRICT; - } - } - } diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 806f28d72647a..edf32caaee4ac 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -46,7 +46,6 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { public static final ParseField NAME_FIELD = new ParseField("name"); public static final ParseField IGNORE_UNMAPPED = new ParseField("ignore_unmapped"); - public static final QueryBuilder DEFAULT_INNER_HIT_QUERY = new MatchAllQueryBuilder(); public static final ParseField COLLAPSE_FIELD = new ParseField("collapse"); public static final ParseField FIELD_FIELD = new ParseField("field"); diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java index bd26bb21e1f01..0731eef5bfe35 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalQueryBuilder.java @@ -47,10 +47,6 @@ public IntervalQueryBuilder(StreamInput in) throws IOException { this.sourceProvider = in.readNamedWriteable(IntervalsSourceProvider.class); } - public String getField() { - return field; - } - public IntervalsSourceProvider getSourceProvider() { return sourceProvider; } diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilder.java index d28fe251806f4..094627ef0711b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchBoolPrefixQueryBuilder.java @@ -103,21 +103,11 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeOptionalString(fuzzyRewrite); } - /** Returns the field name used in this query. */ - public String fieldName() { - return this.fieldName; - } - /** Returns the value used in this query. */ public Object value() { return this.value; } - /** Get the analyzer to use, if previously set, otherwise {@code null} */ - public String analyzer() { - return this.analyzer; - } - /** * Explicitly set the analyzer to use. Defaults to use explicit mapping * config for the field, or, if not set, the default search analyzer. @@ -136,11 +126,6 @@ public MatchBoolPrefixQueryBuilder operator(Operator operator) { return this; } - /** Returns the operator to use in a boolean query.*/ - public Operator operator() { - return this.operator; - } - /** Sets optional minimumShouldMatch value to apply to the query */ public MatchBoolPrefixQueryBuilder minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; @@ -194,13 +179,6 @@ public MatchBoolPrefixQueryBuilder maxExpansions(int maxExpansions) { return this; } - /** - * Get the (optional) number of term expansions when using fuzzy or prefix type query. - */ - public int maxExpansions() { - return this.maxExpansions; - } - /** * Sets whether transpositions are supported in fuzzy queries.

* The default metric used by fuzzy queries to determine a match is the Damerau-Levenshtein @@ -224,14 +202,6 @@ public MatchBoolPrefixQueryBuilder fuzzyRewrite(String fuzzyRewrite) { return this; } - /** - * Get the fuzzy_rewrite parameter - * @see #fuzzyRewrite(String) - */ - public String fuzzyRewrite() { - return this.fuzzyRewrite; - } - @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java index dae2444573124..da1a760d1414b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java @@ -102,11 +102,6 @@ public MatchPhrasePrefixQueryBuilder analyzer(String analyzer) { return this; } - /** Get the analyzer to use, if previously set, otherwise {@code null} */ - public String analyzer() { - return this.analyzer; - } - /** Sets a slop factor for phrase queries */ public MatchPhrasePrefixQueryBuilder slop(int slop) { if (slop < 0) { diff --git a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index 7e644a8800bbd..30b6134650524 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.common.lucene.search.XMoreLikeThis; import org.elasticsearch.common.lucene.uid.Versions; @@ -69,9 +68,6 @@ */ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "more_like_this"; - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(MoreLikeThisQueryBuilder.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [more_like_this] " - + "queries. The type should no longer be specified in the [like] and [unlike] sections."; public static final int DEFAULT_MAX_QUERY_TERMS = XMoreLikeThis.DEFAULT_MAX_QUERY_TERMS; public static final int DEFAULT_MIN_TERM_FREQ = XMoreLikeThis.DEFAULT_MIN_TERM_FREQ; @@ -105,7 +101,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder perFieldAnalyzer() { - return perFieldAnalyzer; - } - /** * Sets the analyzer(s) to use at any given field. */ @@ -312,10 +303,6 @@ public Item versionType(VersionType versionType) { return this; } - XContentType xContentType() { - return xContentType; - } - /** * Convert this to a {@link TermVectorsRequest} for fetching the terms of the document. */ @@ -541,10 +528,6 @@ public MoreLikeThisQueryBuilder unlike(String[] unlikeTexts) { return this; } - public String[] unlikeTexts() { - return unlikeTexts; - } - /** * Sets the documents from which the terms should not be selected from. */ @@ -553,10 +536,6 @@ public MoreLikeThisQueryBuilder unlike(Item[] unlikeItems) { return this; } - public Item[] unlikeItems() { - return unlikeItems; - } - /** * Sets the maximum number of query terms that will be included in any generated query. * Defaults to {@code 25}. @@ -569,10 +548,6 @@ public MoreLikeThisQueryBuilder maxQueryTerms(int maxQueryTerms) { return this; } - public int maxQueryTerms() { - return maxQueryTerms; - } - /** * The frequency below which terms will be ignored in the source doc. The default * frequency is {@code 2}. @@ -582,10 +557,6 @@ public MoreLikeThisQueryBuilder minTermFreq(int minTermFreq) { return this; } - public int minTermFreq() { - return minTermFreq; - } - /** * Sets the frequency at which words will be ignored which do not occur in at least this * many docs. Defaults to {@code 5}. @@ -608,10 +579,6 @@ public MoreLikeThisQueryBuilder maxDocFreq(int maxDocFreq) { return this; } - public int maxDocFreq() { - return maxDocFreq; - } - /** * Sets the minimum word length below which words will be ignored. Defaults * to {@code 0}. @@ -634,10 +601,6 @@ public MoreLikeThisQueryBuilder maxWordLength(int maxWordLength) { return this; } - public int maxWordLength() { - return maxWordLength; - } - /** * Set the set of stopwords. *

@@ -700,10 +663,6 @@ public MoreLikeThisQueryBuilder boostTerms(float boostTerms) { return this; } - public float boostTerms() { - return boostTerms; - } - /** * Whether to include the input documents. Defaults to {@code false} */ @@ -724,10 +683,6 @@ public MoreLikeThisQueryBuilder failOnUnsupportedField(boolean fail) { return this; } - public boolean failOnUnsupportedField() { - return failOnUnsupportedField; - } - @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); diff --git a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index 17e651ab24696..94a52e57d257e 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -124,7 +124,7 @@ public enum Type implements Writeable { */ BOOL_PREFIX(MatchQueryParser.Type.BOOLEAN_PREFIX, 1.0f, new ParseField("bool_prefix")); - private MatchQueryParser.Type matchQueryType; + private final MatchQueryParser.Type matchQueryType; private final float tieBreaker; private final ParseField parseField; diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/server/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index 564050c972b92..66f1c9a74d4c2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -341,14 +341,6 @@ public static SpanOrQueryBuilder spanOrQuery(SpanQueryBuilder initialClause) { return new SpanOrQueryBuilder(initialClause); } - /** Creates a new {@code span_within} builder. - * @param big the big clause, it must enclose {@code little} for a match. - * @param little the little clause, it must be contained within {@code big} for a match. - */ - public static SpanWithinQueryBuilder spanWithinQuery(SpanQueryBuilder big, SpanQueryBuilder little) { - return new SpanWithinQueryBuilder(big, little); - } - /** * Creates a new {@code span_containing} builder. * @param big the big clause, it must enclose {@code little} for a match. @@ -660,36 +652,12 @@ public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String inde return builder; } - /** - * A filter to filter indexed shapes that are contained by a shape - * - * @param name The shape field name - * @param shape Shape to use in the filter - */ - public static GeoShapeQueryBuilder geoWithinQuery(String name, Geometry shape) throws IOException { - GeoShapeQueryBuilder builder = geoShapeQuery(name, shape); - builder.relation(ShapeRelation.WITHIN); - return builder; - } - public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedShapeId) { GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId); builder.relation(ShapeRelation.WITHIN); return builder; } - /** - * A filter to filter indexed shapes that are not intersection with the query shape - * - * @param name The shape field name - * @param shape Shape to use in the filter - */ - public static GeoShapeQueryBuilder geoDisjointQuery(String name, Geometry shape) throws IOException { - GeoShapeQueryBuilder builder = geoShapeQuery(name, shape); - builder.relation(ShapeRelation.DISJOINT); - return builder; - } - public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedShapeId) { GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId); builder.relation(ShapeRelation.DISJOINT); diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/GaussDecayFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/GaussDecayFunctionBuilder.java index 7d9bcbd4aef14..6dc74ca571647 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/GaussDecayFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/GaussDecayFunctionBuilder.java @@ -14,13 +14,11 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ParseField; import java.io.IOException; public class GaussDecayFunctionBuilder extends DecayFunctionBuilder { public static final String NAME = "gauss"; - public static final ParseField FUNCTION_NAME_FIELD = new ParseField(NAME); public static final ScoreFunctionParser PARSER = new DecayFunctionParser<>(GaussDecayFunctionBuilder::new); public static final DecayFunction GAUSS_DECAY_FUNCTION = new GaussScoreFunction(); diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java index 06e4a6a3bbbd5..6d4b2dd4ab1f5 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java @@ -111,14 +111,6 @@ public RandomScoreFunctionBuilder setField(String field) { return this; } - /** - * Get the field to use for random number generation. - * @see #setField(String) - */ - public String getField() { - return field; - } - @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index 8b9610a52cc3d..9b58c37fff27e 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -316,7 +316,7 @@ protected SystemIndexDescriptor( if (prior.primaryIndex.equals(primaryIndex) == false) { throw new IllegalArgumentException("primary index must be the same"); } - if (prior.aliasName.equals(aliasName) == false) { + if (Objects.equals(prior.aliasName, aliasName) == false) { throw new IllegalArgumentException("alias name must be the same"); } } @@ -512,16 +512,21 @@ public MappingsVersion getMappingsVersion() { * @param cause the action being attempted that triggered the check. Used in the error message. * @return the standardized error message */ - public String getMinimumMappingsVersionMessage(String cause) { + public String getMinimumMappingsVersionMessage(String cause, MappingsVersion requiredMinimumMappingVersion) { Objects.requireNonNull(cause); final MappingsVersion actualMinimumMappingsVersion = priorSystemIndexDescriptors.isEmpty() ? getMappingsVersion() : priorSystemIndexDescriptors.get(priorSystemIndexDescriptors.size() - 1).mappingsVersion; return Strings.format( - "[%s] failed - system index [%s] requires all data and master nodes to have mappings versions at least of version [%s]", + "[%s] failed - requested creation of system index [%s] with version [%s], while this cluster minimum supported version is " + + "[%s]. For the cluster to support version [%s], ensure that the system index descriptor for [%s] includes a prior " + + "definition for that version.", cause, this.getPrimaryIndex(), - actualMinimumMappingsVersion + requiredMinimumMappingVersion, + actualMinimumMappingsVersion, + requiredMinimumMappingVersion, + this.getPrimaryIndex() ); } diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 24b305e382160..cd92f38e65152 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -178,6 +178,15 @@ default Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { return model; } + /** + * Update a chat completion model's max tokens if required. The default behaviour is to just return the model. + * @param model The original model without updated embedding details + * @return The model with updated chat completion details + */ + default Model updateModelWithChatCompletionDetails(Model model) { + return model; + } + /** * Defines the version required across all clusters to use this service * @return {@link TransportVersion} specifying the version diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/IndexModeStatsActionType.java b/server/src/main/java/org/elasticsearch/monitor/metrics/IndexModeStatsActionType.java new file mode 100644 index 0000000000000..f64bbb6ec3d50 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/IndexModeStatsActionType.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.monitor.metrics; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.EnumMap; +import java.util.List; +import java.util.Map; + +public final class IndexModeStatsActionType extends ActionType { + public static final IndexModeStatsActionType TYPE = new IndexModeStatsActionType(); + + private IndexModeStatsActionType() { + super("cluster:monitor/nodes/index_mode_stats"); + } + + public static final class StatsRequest extends BaseNodesRequest { + public StatsRequest(String[] nodesIds) { + super(nodesIds); + } + + public StatsRequest(DiscoveryNode... concreteNodes) { + super(concreteNodes); + } + } + + public static final class StatsResponse extends BaseNodesResponse { + StatsResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + assert false : "must be local"; + throw new UnsupportedOperationException("must be local"); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + assert false : "must be local"; + throw new UnsupportedOperationException("must be local"); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + assert false : "must be local"; + throw new UnsupportedOperationException("must be local"); + } + + public Map stats() { + final Map stats = new EnumMap<>(IndexMode.class); + for (IndexMode mode : IndexMode.values()) { + stats.put(mode, new IndexStats()); + } + for (NodeResponse node : getNodes()) { + for (Map.Entry e : node.stats.entrySet()) { + stats.get(e.getKey()).add(e.getValue()); + } + } + return stats; + } + } + + public static final class NodeRequest extends TransportRequest { + NodeRequest() { + + } + + NodeRequest(StreamInput in) throws IOException { + super(in); + } + } + + public static class NodeResponse extends BaseNodeResponse { + private final Map stats; + + NodeResponse(DiscoveryNode node, Map stats) { + super(node); + this.stats = stats; + } + + NodeResponse(StreamInput in, DiscoveryNode node) throws IOException { + super(in, node); + stats = in.readMap(IndexMode::readFrom, IndexStats::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(stats, (o, m) -> IndexMode.writeTo(m, o), (o, s) -> s.writeTo(o)); + } + } + + public static class TransportAction extends TransportNodesAction { + private final IndicesService indicesService; + + @Inject + public TransportAction( + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + IndicesService indicesService + ) { + super( + TYPE.name(), + clusterService, + transportService, + actionFilters, + NodeRequest::new, + transportService.getThreadPool().executor(ThreadPool.Names.MANAGEMENT) + ); + this.indicesService = indicesService; + } + + @Override + protected StatsResponse newResponse(StatsRequest request, List nodeResponses, List failures) { + return new StatsResponse(ClusterName.DEFAULT, nodeResponses, failures); + } + + @Override + protected NodeRequest newNodeRequest(StatsRequest request) { + return new NodeRequest(); + } + + @Override + protected NodeResponse newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException { + return new NodeResponse(in, node); + } + + @Override + protected NodeResponse nodeOperation(NodeRequest request, Task task) { + return new NodeResponse(clusterService.localNode(), IndicesMetrics.getStatsWithoutCache(indicesService)); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/IndexStats.java b/server/src/main/java/org/elasticsearch/monitor/metrics/IndexStats.java new file mode 100644 index 0000000000000..5d16150516465 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/IndexStats.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.monitor.metrics; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.index.search.stats.SearchStats; +import org.elasticsearch.index.shard.IndexingStats; + +import java.io.IOException; + +public final class IndexStats implements Writeable { + int numIndices = 0; + long numDocs = 0; + long numBytes = 0; + SearchStats.Stats search = new SearchStats().getTotal(); + IndexingStats.Stats indexing = new IndexingStats().getTotal(); + + IndexStats() { + + } + + IndexStats(StreamInput in) throws IOException { + this.numIndices = in.readVInt(); + this.numDocs = in.readVLong(); + this.numBytes = in.readVLong(); + this.search = SearchStats.Stats.readStats(in); + this.indexing = new IndexingStats.Stats(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(numIndices); + out.writeVLong(numDocs); + out.writeVLong(numBytes); + search.writeTo(out); + indexing.writeTo(out); + } + + void add(IndexStats other) { + this.numIndices += other.numIndices; + this.numDocs += other.numDocs; + this.numBytes += other.numBytes; + this.search.add(other.search); + this.indexing.add(other.indexing); + } + + public int numIndices() { + return numIndices; + } + + public long numDocs() { + return numDocs; + } + + public long numBytes() { + return numBytes; + } +} diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java index ba67bc03e1441..99011d101d342 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java @@ -18,11 +18,9 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -193,12 +191,36 @@ protected void doClose() throws IOException { }); } - static class IndexStats { - int numIndices = 0; - long numDocs = 0; - long numBytes = 0; - SearchStats.Stats search = new SearchStats().getTotal(); - IndexingStats.Stats indexing = new IndexingStats().getTotal(); + static Map getStatsWithoutCache(IndicesService indicesService) { + Map stats = new EnumMap<>(IndexMode.class); + for (IndexMode mode : IndexMode.values()) { + stats.put(mode, new IndexStats()); + } + for (IndexService indexService : indicesService) { + for (IndexShard indexShard : indexService) { + if (indexShard.isSystem()) { + continue; // skip system indices + } + final ShardRouting shardRouting = indexShard.routingEntry(); + final IndexMode indexMode = indexShard.indexSettings().getMode(); + final IndexStats indexStats = stats.get(indexMode); + try { + if (shardRouting.primary() && shardRouting.recoverySource() == null) { + if (shardRouting.shardId().id() == 0) { + indexStats.numIndices++; + } + final DocsStats docStats = indexShard.docStats(); + indexStats.numDocs += docStats.getCount(); + indexStats.numBytes += docStats.getTotalSizeInBytes(); + indexStats.indexing.add(indexShard.indexingStats().getTotal()); + } + indexStats.search.add(indexShard.searchStats().getTotal()); + } catch (IllegalIndexShardStateException | AlreadyClosedException ignored) { + // ignored + } + } + } + return stats; } private static class IndicesStatsCache extends SingleObjectCache> { @@ -219,41 +241,9 @@ private static class IndicesStatsCache extends SingleObjectCache internalGetIndicesStats() { - Map stats = new EnumMap<>(IndexMode.class); - for (IndexMode mode : IndexMode.values()) { - stats.put(mode, new IndexStats()); - } - for (IndexService indexService : indicesService) { - for (IndexShard indexShard : indexService) { - if (indexShard.isSystem()) { - continue; // skip system indices - } - final ShardRouting shardRouting = indexShard.routingEntry(); - final IndexMode indexMode = indexShard.indexSettings().getMode(); - final IndexStats indexStats = stats.get(indexMode); - try { - if (shardRouting.primary() && shardRouting.recoverySource() == null) { - if (shardRouting.shardId().id() == 0) { - indexStats.numIndices++; - } - final DocsStats docStats = indexShard.docStats(); - indexStats.numDocs += docStats.getCount(); - indexStats.numBytes += docStats.getTotalSizeInBytes(); - indexStats.indexing.add(indexShard.indexingStats().getTotal()); - } - indexStats.search.add(indexShard.searchStats().getTotal()); - } catch (IllegalIndexShardStateException | AlreadyClosedException ignored) { - // ignored - } - } - } - return stats; - } - @Override protected Map refresh() { - return refresh ? internalGetIndicesStats() : getNoRefresh(); + return refresh ? getStatsWithoutCache(indicesService) : getNoRefresh(); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index 7b82481d3d283..de3fd390ec86d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -38,9 +38,7 @@ import java.io.IOException; import java.util.ArrayDeque; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.function.Supplier; @@ -142,19 +140,10 @@ private static Exception parseFailureException(Exception e) { static class ChunkHandler implements BaseRestHandler.RequestBodyChunkConsumer { - private final boolean allowExplicitIndex; private final RestRequest request; - private final Map stringDeduplicator = new HashMap<>(); - private final String defaultIndex; - private final String defaultRouting; - private final FetchSourceContext defaultFetchSourceContext; - private final String defaultPipeline; - private final boolean defaultListExecutedPipelines; - private final Boolean defaultRequireAlias; - private final boolean defaultRequireDataStream; - private final BulkRequestParser parser; private final Supplier handlerSupplier; + private final BulkRequestParser.IncrementalParser parser; private IncrementalBulkService.Handler handler; private volatile RestChannel restChannel; @@ -164,17 +153,22 @@ static class ChunkHandler implements BaseRestHandler.RequestBodyChunkConsumer { private final ArrayList> items = new ArrayList<>(4); ChunkHandler(boolean allowExplicitIndex, RestRequest request, Supplier handlerSupplier) { - this.allowExplicitIndex = allowExplicitIndex; this.request = request; - this.defaultIndex = request.param("index"); - this.defaultRouting = request.param("routing"); - this.defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request); - this.defaultPipeline = request.param("pipeline"); - this.defaultListExecutedPipelines = request.paramAsBoolean("list_executed_pipelines", false); - this.defaultRequireAlias = request.paramAsBoolean(DocWriteRequest.REQUIRE_ALIAS, false); - this.defaultRequireDataStream = request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false); - this.parser = new BulkRequestParser(true, request.getRestApiVersion()); this.handlerSupplier = handlerSupplier; + this.parser = new BulkRequestParser(true, request.getRestApiVersion()).incrementalParser( + request.param("index"), + request.param("routing"), + FetchSourceContext.parseFromRestRequest(request), + request.param("pipeline"), + request.paramAsBoolean(DocWriteRequest.REQUIRE_ALIAS, false), + request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false), + request.paramAsBoolean("list_executed_pipelines", false), + allowExplicitIndex, + request.getXContentType(), + (indexRequest, type) -> items.add(indexRequest), + items::add, + items::add + ); } @Override @@ -210,23 +204,7 @@ public void handleChunk(RestChannel channel, ReleasableBytesReference chunk, boo // TODO: Check that the behavior here vs. globalRouting, globalPipeline, globalRequireAlias, globalRequireDatsStream in // BulkRequest#add is fine - bytesConsumed = parser.incrementalParse( - data, - defaultIndex, - defaultRouting, - defaultFetchSourceContext, - defaultPipeline, - defaultRequireAlias, - defaultRequireDataStream, - defaultListExecutedPipelines, - allowExplicitIndex, - request.getXContentType(), - (request, type) -> items.add(request), - items::add, - items::add, - isLast == false, - stringDeduplicator - ); + bytesConsumed = parser.parse(data, isLast); bytesParsed += bytesConsumed; } catch (Exception e) { @@ -253,7 +231,7 @@ public void handleChunk(RestChannel channel, ReleasableBytesReference chunk, boo items.clear(); handler.addItems(toPass, () -> Releasables.close(releasables), () -> request.contentStream().next()); } else { - assert releasables.isEmpty(); + Releasables.close(releasables); request.contentStream().next(); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java index f87dcc737f938..f85b89f774477 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java @@ -47,7 +47,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { Tuple sourceTuple = restRequest.contentOrSourceParam(); - SimulatePipelineRequest request = new SimulatePipelineRequest(sourceTuple.v2(), sourceTuple.v1()); + SimulatePipelineRequest request = new SimulatePipelineRequest(sourceTuple.v2(), sourceTuple.v1(), restRequest.getRestApiVersion()); request.setId(restRequest.param("id")); request.setVerbose(restRequest.paramAsBoolean("verbose", false)); return channel -> client.admin().cluster().simulatePipeline(request, new RestToXContentListener<>(channel)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index 7828bb956a160..d1039a6ddc74e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -9,6 +9,10 @@ package org.elasticsearch.rest.action.search; +import org.elasticsearch.Build; +import org.elasticsearch.common.util.set.Sets; + +import java.util.Collections; import java.util.Set; /** @@ -22,9 +26,34 @@ private SearchCapabilities() {} private static final String RANGE_REGEX_INTERVAL_QUERY_CAPABILITY = "range_regexp_interval_queries"; /** Support synthetic source with `bit` type in `dense_vector` field when `index` is set to `false`. */ private static final String BIT_DENSE_VECTOR_SYNTHETIC_SOURCE_CAPABILITY = "bit_dense_vector_synthetic_source"; + /** Support Byte and Float with Bit dot product. */ + private static final String BYTE_FLOAT_BIT_DOT_PRODUCT_CAPABILITY = "byte_float_bit_dot_product"; + /** Support docvalue_fields parameter for `dense_vector` field. */ + private static final String DENSE_VECTOR_DOCVALUE_FIELDS = "dense_vector_docvalue_fields"; + /** Support transforming rank rrf queries to the corresponding rrf retriever. */ + private static final String TRANSFORM_RANK_RRF_TO_RETRIEVER = "transform_rank_rrf_to_retriever"; + /** Support kql query. */ + private static final String KQL_QUERY_SUPPORTED = "kql_query"; + + public static final Set CAPABILITIES = capabilities(); + + private static Set capabilities() { + Set capabilities = Set.of( + RANGE_REGEX_INTERVAL_QUERY_CAPABILITY, + BIT_DENSE_VECTOR_SYNTHETIC_SOURCE_CAPABILITY, + BYTE_FLOAT_BIT_DOT_PRODUCT_CAPABILITY, + DENSE_VECTOR_DOCVALUE_FIELDS, + TRANSFORM_RANK_RRF_TO_RETRIEVER + ); + + if (Build.current().isSnapshot()) { + return Collections.unmodifiableSet(Sets.union(capabilities, snapshotBuildCapabilities())); + } + + return capabilities; + } - public static final Set CAPABILITIES = Set.of( - RANGE_REGEX_INTERVAL_QUERY_CAPABILITY, - BIT_DENSE_VECTOR_SYNTHETIC_SOURCE_CAPABILITY - ); + private static Set snapshotBuildCapabilities() { + return Set.of(KQL_QUERY_SUPPORTED); + } } diff --git a/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java index 809e9811f3673..e773bceb5ec05 100644 --- a/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java +++ b/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java @@ -307,6 +307,87 @@ public interface DotProductInterface { double dotProduct(); } + public static class BitDotProduct extends DenseVectorFunction implements DotProductInterface { + private final byte[] byteQueryVector; + private final float[] floatQueryVector; + + public BitDotProduct(ScoreScript scoreScript, DenseVectorDocValuesField field, byte[] queryVector) { + super(scoreScript, field); + if (field.getElementType() != DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("cannot calculate bit dot product for non-bit vectors"); + } + int fieldDims = field.get().getDims(); + if (fieldDims != queryVector.length * Byte.SIZE && fieldDims != queryVector.length) { + throw new IllegalArgumentException( + "The query vector has an incorrect number of dimensions. Must be [" + + fieldDims / 8 + + "] for bitwise operations, or [" + + fieldDims + + "] for byte wise operations: provided [" + + queryVector.length + + "]." + ); + } + this.byteQueryVector = queryVector; + this.floatQueryVector = null; + } + + public BitDotProduct(ScoreScript scoreScript, DenseVectorDocValuesField field, List queryVector) { + super(scoreScript, field); + if (field.getElementType() != DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("cannot calculate bit dot product for non-bit vectors"); + } + float[] floatQueryVector = new float[queryVector.size()]; + byte[] byteQueryVector = new byte[queryVector.size()]; + boolean isFloat = false; + for (int i = 0; i < queryVector.size(); i++) { + Number number = queryVector.get(i); + floatQueryVector[i] = number.floatValue(); + byteQueryVector[i] = number.byteValue(); + if (isFloat + || floatQueryVector[i] % 1.0f != 0.0f + || floatQueryVector[i] < Byte.MIN_VALUE + || floatQueryVector[i] > Byte.MAX_VALUE) { + isFloat = true; + } + } + int fieldDims = field.get().getDims(); + if (isFloat) { + this.floatQueryVector = floatQueryVector; + this.byteQueryVector = null; + if (fieldDims != floatQueryVector.length) { + throw new IllegalArgumentException( + "The query vector has an incorrect number of dimensions. Must be [" + + fieldDims + + "] for float wise operations: provided [" + + floatQueryVector.length + + "]." + ); + } + } else { + this.floatQueryVector = null; + this.byteQueryVector = byteQueryVector; + if (fieldDims != byteQueryVector.length * Byte.SIZE && fieldDims != byteQueryVector.length) { + throw new IllegalArgumentException( + "The query vector has an incorrect number of dimensions. Must be [" + + fieldDims / 8 + + "] for bitwise operations, or [" + + fieldDims + + "] for byte wise operations: provided [" + + byteQueryVector.length + + "]." + ); + } + } + } + + @Override + public double dotProduct() { + setNextVector(); + return byteQueryVector != null ? field.get().dotProduct(byteQueryVector) : field.get().dotProduct(floatQueryVector); + } + } + public static class ByteDotProduct extends ByteDenseVectorFunction implements DotProductInterface { public ByteDotProduct(ScoreScript scoreScript, DenseVectorDocValuesField field, List queryVector) { @@ -343,7 +424,16 @@ public static final class DotProduct { public DotProduct(ScoreScript scoreScript, Object queryVector, String fieldName) { DenseVectorDocValuesField field = (DenseVectorDocValuesField) scoreScript.field(fieldName); function = switch (field.getElementType()) { - case BYTE, BIT -> { + case BIT -> { + if (queryVector instanceof List) { + yield new BitDotProduct(scoreScript, field, (List) queryVector); + } else if (queryVector instanceof String s) { + byte[] parsedQueryVector = HexFormat.of().parseHex(s); + yield new BitDotProduct(scoreScript, field, parsedQueryVector); + } + throw new IllegalArgumentException("Unsupported input object for bit vectors: " + queryVector.getClass().getName()); + } + case BYTE -> { if (queryVector instanceof List) { yield new ByteDotProduct(scoreScript, field, (List) queryVector); } else if (queryVector instanceof String s) { diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/BitBinaryDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/BitBinaryDenseVector.java index 9c0b7ce2e5d6e..fecca9c1b3929 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/BitBinaryDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/BitBinaryDenseVector.java @@ -13,6 +13,10 @@ import java.util.List; +import static org.elasticsearch.simdvec.ESVectorUtil.andBitCount; +import static org.elasticsearch.simdvec.ESVectorUtil.ipByteBit; +import static org.elasticsearch.simdvec.ESVectorUtil.ipFloatBit; + public class BitBinaryDenseVector extends ByteBinaryDenseVector { public BitBinaryDenseVector(byte[] vectorValue, BytesRef docVector, int dims) { @@ -54,7 +58,11 @@ public double l2Norm(List queryVector) { @Override public int dotProduct(byte[] queryVector) { - throw new UnsupportedOperationException("dotProduct is not supported for bit vectors."); + if (queryVector.length == vectorValue.length) { + // assume that the query vector is a bit vector and do a bitwise AND + return andBitCount(vectorValue, queryVector); + } + return ipByteBit(queryVector, vectorValue); } @Override @@ -79,7 +87,7 @@ public double cosineSimilarity(List queryVector) { @Override public double dotProduct(float[] queryVector) { - throw new UnsupportedOperationException("dotProduct is not supported for bit vectors."); + return ipFloatBit(queryVector, vectorValue); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/BitKnnDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/BitKnnDenseVector.java index b0171325d4089..fcfc4546f6e73 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/BitKnnDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/BitKnnDenseVector.java @@ -11,6 +11,10 @@ import java.util.List; +import static org.elasticsearch.simdvec.ESVectorUtil.andBitCount; +import static org.elasticsearch.simdvec.ESVectorUtil.ipByteBit; +import static org.elasticsearch.simdvec.ESVectorUtil.ipFloatBit; + public class BitKnnDenseVector extends ByteKnnDenseVector { public BitKnnDenseVector(byte[] vector) { @@ -61,7 +65,11 @@ public double l2Norm(List queryVector) { @Override public int dotProduct(byte[] queryVector) { - throw new UnsupportedOperationException("dotProduct is not supported for bit vectors."); + if (queryVector.length == docVector.length) { + // assume that the query vector is a bit vector and do a bitwise AND + return andBitCount(docVector, queryVector); + } + return ipByteBit(queryVector, docVector); } @Override @@ -86,7 +94,7 @@ public double cosineSimilarity(List queryVector) { @Override public double dotProduct(float[] queryVector) { - throw new UnsupportedOperationException("dotProduct is not supported for bit vectors."); + return ipFloatBit(queryVector, docVector); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteBinaryDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteBinaryDenseVector.java index 8f13ada2fd604..7a6954334c829 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteBinaryDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteBinaryDenseVector.java @@ -21,7 +21,7 @@ public class ByteBinaryDenseVector implements DenseVector { public static final int MAGNITUDE_BYTES = 4; private final BytesRef docVector; - private final byte[] vectorValue; + protected final byte[] vectorValue; protected final int dims; private float[] floatDocVector; diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index f1d4f678c5fb9..bdefee988248f 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -168,6 +168,31 @@ public String toString() { } }; + DocValueFormat DENSE_VECTOR = DenseVectorDocValueFormat.INSTANCE; + + /** + * Singleton, stateless formatter, for dense vector values, no need to actually format anything + */ + class DenseVectorDocValueFormat implements DocValueFormat { + + public static final DocValueFormat INSTANCE = new DenseVectorDocValueFormat(); + + private DenseVectorDocValueFormat() {} + + @Override + public String getWriteableName() { + return "dense_vector"; + } + + @Override + public void writeTo(StreamOutput out) {} + + @Override + public String toString() { + return "dense_vector"; + } + }; + DocValueFormat BINARY = BinaryDocValueFormat.INSTANCE; /** diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 0bb914a9dbf97..fd39a95bdb75d 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -1020,6 +1020,7 @@ private void registerValueFormats() { registerValueFormat(DocValueFormat.IP.getWriteableName(), in -> DocValueFormat.IP); registerValueFormat(DocValueFormat.RAW.getWriteableName(), in -> DocValueFormat.RAW); registerValueFormat(DocValueFormat.BINARY.getWriteableName(), in -> DocValueFormat.BINARY); + registerValueFormat(DocValueFormat.DENSE_VECTOR.getWriteableName(), in -> DocValueFormat.DENSE_VECTOR); registerValueFormat(DocValueFormat.UNSIGNED_LONG_SHIFTED.getWriteableName(), in -> DocValueFormat.UNSIGNED_LONG_SHIFTED); registerValueFormat(DocValueFormat.TIME_SERIES_ID.getWriteableName(), in -> DocValueFormat.TIME_SERIES_ID); registerValueFormat(TS_ROUTING_HASH_DOC_VALUE_FORMAT.getWriteableName(), in -> TS_ROUTING_HASH_DOC_VALUE_FORMAT); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index 71de6a9bbb822..38cab1761d409 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.DelayableWriteable; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -51,7 +52,12 @@ protected InternalAggregation(String name, Map metadata) { * Read from a stream. */ protected InternalAggregation(StreamInput in) throws IOException { - name = in.readString(); + final String name = in.readString(); + if (in instanceof DelayableWriteable.Deduplicator d) { + this.name = d.deduplicate(name); + } else { + this.name = name; + } metadata = in.readGenericMap(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 7c7233b0eaa1d..e6c26c4278807 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -81,12 +81,7 @@ public final void collectBucket(LeafBucketCollector subCollector, int doc, long grow(bucketOrd + 1); int docCount = docCountProvider.getDocCount(doc); if (docCounts.increment(bucketOrd, docCount) == docCount) { - // We call the circuit breaker the time to time in order to give it a chance to check available - // memory in the parent breaker and break the execution if we are running out. To achieve that we - // are passing 0 as the estimated bytes every 1024 calls - if ((++callCount & 0x3FF) == 0) { - breaker.addEstimateBytesAndMaybeBreak(0, "allocated_buckets"); - } + updateCircuitBreaker("allocated_buckets"); } subCollector.collect(doc, bucketOrd); } @@ -179,6 +174,7 @@ protected final IntFunction buildSubAggsForBuckets(long[] prepareSubAggs(bucketOrdsToCollect); InternalAggregation[][] aggregations = new InternalAggregation[subAggregators.length][]; for (int i = 0; i < subAggregators.length; i++) { + updateCircuitBreaker("building_sub_aggregation"); aggregations[i] = subAggregators[i].buildAggregations(bucketOrdsToCollect); } return subAggsForBucketFunction(aggregations); @@ -415,4 +411,15 @@ protected void preGetSubLeafCollectors(LeafReaderContext ctx) throws IOException // Set LeafReaderContext to the doc_count provider docCountProvider.setLeafReaderContext(ctx); } + + /** + * This method calls the circuit breaker from time to time in order to give it a chance to check available + * memory in the parent breaker (Which should be a real memory breaker) and break the execution if we are running out. + * To achieve that, we are passing 0 as the estimated bytes every 1024 calls + */ + private void updateCircuitBreaker(String label) { + if ((++callCount & 0x3FF) == 0) { + breaker.addEstimateBytesAndMaybeBreak(0, label); + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java index 039bd0dd67592..72e1db245338e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java @@ -61,7 +61,11 @@ public void collect(int doc, long bucket) throws IOException { if (values.advanceExact(doc)) { final DoubleHistogram state = getExistingOrNewHistogram(bigArrays(), bucket); for (int i = 0; i < values.docValueCount(); i++) { - state.recordValue(values.nextValue()); + final double value = values.nextValue(); + if (value < 0) { + throw new IllegalArgumentException("Negative values are not supported by HDR aggregation"); + } + state.recordValue(value); } } } @@ -74,8 +78,12 @@ protected LeafBucketCollector getLeafCollector(NumericDoubleValues values, LeafB @Override public void collect(int doc, long bucket) throws IOException { if (values.advanceExact(doc)) { + final double value = values.doubleValue(); + if (value < 0) { + throw new IllegalArgumentException("Negative values are not supported by HDR aggregation"); + } final DoubleHistogram state = getExistingOrNewHistogram(bigArrays(), bucket); - state.recordValue(values.doubleValue()); + state.recordValue(value); } } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCentroid.java index 05dd82fd59c4f..eb789bcdd8a74 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCentroid.java @@ -23,7 +23,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.function.Function; /** * Serialization and merge logic for {@link GeoCentroidAggregator}. @@ -31,24 +30,13 @@ public abstract class InternalCentroid extends InternalAggregation implements CentroidAggregation { protected final SpatialPoint centroid; protected final long count; - private final FieldExtractor firstField; - private final FieldExtractor secondField; - - public InternalCentroid( - String name, - SpatialPoint centroid, - long count, - Map metadata, - FieldExtractor firstField, - FieldExtractor secondField - ) { + + public InternalCentroid(String name, SpatialPoint centroid, long count, Map metadata) { super(name, metadata); assert (centroid == null) == (count == 0); this.centroid = centroid; assert count >= 0; this.count = count; - this.firstField = firstField; - this.secondField = secondField; } protected abstract SpatialPoint centroidFromStream(StreamInput in) throws IOException; @@ -59,7 +47,7 @@ public InternalCentroid( * Read from a stream. */ @SuppressWarnings("this-escape") - protected InternalCentroid(StreamInput in, FieldExtractor firstField, FieldExtractor secondField) throws IOException { + protected InternalCentroid(StreamInput in) throws IOException { super(in); count = in.readVLong(); if (in.readBoolean()) { @@ -67,8 +55,6 @@ protected InternalCentroid(StreamInput in, FieldExtractor firstField, FieldExtra } else { centroid = null; } - this.firstField = firstField; - this.secondField = secondField; } @Override @@ -110,11 +96,11 @@ public void accept(InternalAggregation aggregation) { if (centroidAgg.count > 0) { totalCount += centroidAgg.count; if (Double.isNaN(firstSum)) { - firstSum = centroidAgg.count * firstField.extractor.apply(centroidAgg.centroid); - secondSum = centroidAgg.count * secondField.extractor.apply(centroidAgg.centroid); + firstSum = centroidAgg.count * extractFirst(centroidAgg.centroid); + secondSum = centroidAgg.count * extractSecond(centroidAgg.centroid); } else { - firstSum += centroidAgg.count * firstField.extractor.apply(centroidAgg.centroid); - secondSum += centroidAgg.count * secondField.extractor.apply(centroidAgg.centroid); + firstSum += centroidAgg.count * extractFirst(centroidAgg.centroid); + secondSum += centroidAgg.count * extractSecond(centroidAgg.centroid); } } } @@ -126,6 +112,14 @@ public InternalAggregation get() { }; } + protected abstract String nameFirst(); + + protected abstract double extractFirst(SpatialPoint point); + + protected abstract String nameSecond(); + + protected abstract double extractSecond(SpatialPoint point); + @Override public InternalAggregation finalizeSampling(SamplingContext samplingContext) { return copyWith(centroid, samplingContext.scaleUp(count)); @@ -136,16 +130,6 @@ protected boolean mustReduceOnSingleInternalAgg() { return false; } - protected static class FieldExtractor { - private final String name; - private final Function extractor; - - public FieldExtractor(String name, Function extractor) { - this.name = name; - this.extractor = extractor; - } - } - protected abstract double extractDouble(String name); @Override @@ -174,8 +158,8 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th if (centroid != null) { builder.startObject(Fields.CENTROID.getPreferredName()); { - builder.field(firstField.name, firstField.extractor.apply(centroid)); - builder.field(secondField.name, secondField.extractor.apply(centroid)); + builder.field(nameFirst(), extractFirst(centroid)); + builder.field(nameSecond(), extractSecond(centroid)); } builder.endObject(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java index 10e301608ec2f..40c77b79f3ac6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java @@ -13,9 +13,6 @@ import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.support.SamplingContext; -import org.elasticsearch.xcontent.ParseField; import java.io.IOException; import java.util.Map; @@ -26,21 +23,14 @@ public class InternalGeoCentroid extends InternalCentroid implements GeoCentroid { public InternalGeoCentroid(String name, SpatialPoint centroid, long count, Map metadata) { - super( - name, - centroid, - count, - metadata, - new FieldExtractor("lat", SpatialPoint::getY), - new FieldExtractor("lon", SpatialPoint::getX) - ); + super(name, centroid, count, metadata); } /** * Read from a stream. */ public InternalGeoCentroid(StreamInput in) throws IOException { - super(in, new FieldExtractor("lat", SpatialPoint::getY), new FieldExtractor("lon", SpatialPoint::getX)); + super(in); } public static InternalGeoCentroid empty(String name, Map metadata) { @@ -84,12 +74,22 @@ protected InternalGeoCentroid copyWith(double firstSum, double secondSum, long t } @Override - public InternalAggregation finalizeSampling(SamplingContext samplingContext) { - return new InternalGeoCentroid(name, centroid, samplingContext.scaleUp(count), getMetadata()); + protected String nameFirst() { + return "lat"; } - static class Fields { - static final ParseField CENTROID_LAT = new ParseField("lat"); - static final ParseField CENTROID_LON = new ParseField("lon"); + @Override + protected double extractFirst(SpatialPoint point) { + return point.getY(); + } + + @Override + protected String nameSecond() { + return "lon"; + } + + @Override + protected double extractSecond(SpatialPoint point) { + return point.getX(); } } diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 9c96319136007..cb5e841a3df77 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -1638,6 +1638,18 @@ private SearchSourceBuilder parseXContent( } knnSearch = knnBuilders.stream().map(knnBuilder -> knnBuilder.build(size())).collect(Collectors.toList()); + if (rankBuilder != null) { + if (retrieverBuilder != null) { + throw new IllegalArgumentException("Cannot specify both [rank] and [retriever]."); + } + RetrieverBuilder transformedRetriever = rankBuilder.toRetriever(this, clusterSupportsFeature); + if (transformedRetriever != null) { + this.retriever(transformedRetriever); + rankBuilder = null; + subSearchSourceBuilders.clear(); + knnSearch.clear(); + } + } searchUsageConsumer.accept(searchUsage); return this; } diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankBuilder.java b/server/src/main/java/org/elasticsearch/search/rank/RankBuilder.java index e8f6fdf9fde46..9176410f6ea35 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/RankBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/rank/RankBuilder.java @@ -16,11 +16,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.search.SearchService; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; +import org.elasticsearch.search.retriever.RetrieverBuilder; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,6 +33,7 @@ import java.io.IOException; import java.util.List; import java.util.Objects; +import java.util.function.Predicate; /** * {@code RankBuilder} is used as a base class to manage input, parsing, and subsequent generation of appropriate contexts @@ -109,6 +115,16 @@ public int rankWindowSize() { */ public abstract RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from, Client client); + /** + * Transforms the specific rank builder (as parsed through SearchSourceBuilder) to the corresponding retriever. + * This is used to ensure smooth deprecation of `rank` and `sub_searches` and move towards the retriever framework + */ + @UpdateForV10(owner = UpdateForV10.Owner.SEARCH_RELEVANCE) // remove for 10.0 once we remove support for the rank parameter in SearchAPI + @Nullable + public RetrieverBuilder toRetriever(SearchSourceBuilder searchSourceBuilder, Predicate clusterSupportsFeature) { + return null; + } + @Override public final boolean equals(Object obj) { if (this == obj) { diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java index 8ce8fc07f3acd..41673a0e7edb0 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java @@ -286,6 +286,10 @@ public int k() { return k; } + public int getNumCands() { + return numCands; + } + public QueryVectorBuilder getQueryVectorBuilder() { return queryVectorBuilder; } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java index b7f7a02e3b07e..5785d076693e7 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestParserTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.bulk; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.core.RestApiVersion; @@ -23,6 +24,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Stream; +import static org.hamcrest.Matchers.equalTo; + public class BulkRequestParserTests extends ESTestCase { @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) // Replace with just RestApiVersion.values() when V8 no longer exists @@ -30,6 +33,84 @@ public class BulkRequestParserTests extends ESTestCase { .filter(v -> v.compareTo(RestApiVersion.V_8) > 0) .toList(); + public void testParserCannotBeReusedAfterFailure() { + BytesArray request = new BytesArray(""" + { "index":{ }, "something": "unexpected" } + {} + """); + + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser.IncrementalParser incrementalParser = parser.incrementalParser( + null, + null, + null, + null, + null, + null, + null, + false, + XContentType.JSON, + (req, type) -> fail("expected failure before we got this far"), + req -> fail("expected failure before we got this far"), + req -> fail("expected failure before we got this far") + ); + + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> incrementalParser.parse(request, false)); + assertEquals("Malformed action/metadata line [1], expected END_OBJECT but found [FIELD_NAME]", ex.getMessage()); + + BytesArray valid = new BytesArray(""" + { "index":{ "_id": "bar" } } + {} + """); + expectThrows(AssertionError.class, () -> incrementalParser.parse(valid, false)); + } + + public void testIncrementalParsing() throws IOException { + ArrayList> indexRequests = new ArrayList<>(); + ArrayList> updateRequests = new ArrayList<>(); + ArrayList> deleteRequests = new ArrayList<>(); + + BulkRequestParser parser = new BulkRequestParser(randomBoolean(), RestApiVersion.current()); + BulkRequestParser.IncrementalParser incrementalParser = parser.incrementalParser( + null, + null, + null, + null, + null, + null, + null, + false, + XContentType.JSON, + (r, t) -> indexRequests.add(r), + updateRequests::add, + deleteRequests::add + ); + + BytesArray request = new BytesArray(""" + { "index":{ "_id": "bar", "pipeline": "foo" } } + { "field": "value"} + { "index":{ "require_alias": false } } + { "field": "value" } + { "update":{ "_id": "bus", "require_alias": true } } + { "doc": {"field": "value" }} + { "delete":{ "_id": "baz" } } + { "index": { } } + { "field": "value"} + { "delete":{ "_id": "bop" } } + """); + + int consumed = 0; + for (int i = 0; i < request.length() - 1; ++i) { + consumed += incrementalParser.parse(request.slice(consumed, i - consumed + 1), false); + } + consumed += incrementalParser.parse(request.slice(consumed, request.length() - consumed), true); + assertThat(consumed, equalTo(request.length())); + + assertThat(indexRequests.size(), equalTo(3)); + assertThat(updateRequests.size(), equalTo(1)); + assertThat(deleteRequests.size(), equalTo(2)); + } + public void testIndexRequest() throws IOException { BytesArray request = new BytesArray(""" { "index":{ "_id": "bar" } } @@ -133,7 +214,7 @@ public void testUpdateRequest() throws IOException { }, req -> fail()); } - public void testBarfOnLackOfTrailingNewline() { + public void testBarfOnLackOfTrailingNewline() throws IOException { BytesArray request = new BytesArray(""" { "index":{ "_id": "bar" } } {}"""); @@ -157,6 +238,27 @@ public void testBarfOnLackOfTrailingNewline() { ) ); assertEquals("The bulk request must be terminated by a newline [\\n]", e.getMessage()); + + BulkRequestParser.IncrementalParser incrementalParser = parser.incrementalParser( + "foo", + null, + null, + null, + null, + null, + null, + false, + XContentType.JSON, + (req, type) -> {}, + req -> {}, + req -> {} + ); + + // Should not throw because not last + incrementalParser.parse(request, false); + + IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, () -> incrementalParser.parse(request, true)); + assertEquals("The bulk request must be terminated by a newline [\\n]", e2.getMessage()); } public void testFailOnExplicitIndex() { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 032db4135aab7..b36b3af1ddb86 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -406,7 +406,7 @@ public void testInvalidDynamicTemplates() { IllegalArgumentException.class, () -> new BulkRequest().add(updateWithDynamicTemplates, null, XContentType.JSON) ); - assertThat(error.getMessage(), equalTo("Update request in line [2] does not accept dynamic_templates")); + assertThat(error.getMessage(), equalTo("Update request in line [1] does not accept dynamic_templates")); BytesArray invalidDynamicTemplates = new BytesArray(""" { "index":{"_index":"test","dynamic_templates":[]} diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index d8e3151adb61d..f8ecdbd062054 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -94,7 +94,7 @@ private AbstractSearchAsyncAction createAction( SearchResponse.Clusters.EMPTY ) { @Override - protected SearchPhase getNextPhase(final SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { return null; } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index a796827baa253..f655136cd4ba4 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -139,7 +139,7 @@ protected void executePhaseOnShard( } @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override public void run() { @@ -255,7 +255,7 @@ protected void executePhaseOnShard( } @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override public void run() { @@ -359,7 +359,7 @@ protected void executePhaseOnShard( } @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override public void run() { @@ -488,7 +488,7 @@ protected void executePhaseOnShard( } @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override public void run() { @@ -600,7 +600,7 @@ protected void executePhaseOnShard( } @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override public void run() { @@ -680,7 +680,7 @@ protected void executePhaseOnShard( } @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override public void run() { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index e4284937474c7..6357155793fdf 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -204,7 +204,7 @@ public void sendExecuteQuery( null ) { @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + protected SearchPhase getNextPhase() { return new SearchPhase("test") { @Override public void run() { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ContinuousComputationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ContinuousComputationTests.java index c15ca6d8205de..5ee57ebaa2c3c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ContinuousComputationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ContinuousComputationTests.java @@ -21,6 +21,7 @@ import java.util.concurrent.CyclicBarrier; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -73,6 +74,68 @@ protected void processInput(Integer input) { assertTrue(Arrays.toString(valuePerThread) + " vs " + result.get(), Arrays.stream(valuePerThread).anyMatch(i -> i == result.get())); } + public void testCompareAndEnqueue() throws Exception { + final var initialInput = new Object(); + final var compareAndEnqueueCount = between(1, 10); + final var remaining = new AtomicInteger(compareAndEnqueueCount); + final var computationsExecuted = new AtomicInteger(); + final var result = new AtomicReference<>(); + final var computation = new ContinuousComputation<>(threadPool.generic()) { + @Override + protected void processInput(Object input) { + result.set(input); + if (remaining.decrementAndGet() >= 0) { + compareAndEnqueue(input, new Object()); + } + computationsExecuted.incrementAndGet(); + } + }; + computation.onNewInput(initialInput); + assertBusy(() -> assertFalse(computation.isActive())); + assertNotEquals(result.get(), initialInput); + assertEquals(computationsExecuted.get(), 1 + compareAndEnqueueCount); + } + + public void testCompareAndEnqueueSkipped() throws Exception { + final var barrier = new CyclicBarrier(2); + final var computationsExecuted = new AtomicInteger(); + final var initialInput = new Object(); + final var conditionalInput = new Object(); + final var newInput = new Object(); + final var submitConditional = new AtomicBoolean(true); + final var result = new AtomicReference<>(); + + final var computation = new ContinuousComputation<>(threadPool.generic()) { + @Override + protected void processInput(Object input) { + assertNotEquals(input, conditionalInput); + safeAwait(barrier); // start + safeAwait(barrier); // continue + if (submitConditional.getAndSet(false)) { + compareAndEnqueue(input, conditionalInput); + } + result.set(input); + safeAwait(barrier); // finished + computationsExecuted.incrementAndGet(); + } + }; + computation.onNewInput(initialInput); + + safeAwait(barrier); // start + computation.onNewInput(newInput); + safeAwait(barrier); // continue + safeAwait(barrier); // finished + assertEquals(result.get(), initialInput); + + safeAwait(barrier); // start + safeAwait(barrier); // continue + safeAwait(barrier); // finished + + assertBusy(() -> assertFalse(computation.isActive())); + assertEquals(result.get(), newInput); + assertEquals(computationsExecuted.get(), 2); + } + public void testSkipsObsoleteValues() throws Exception { final var barrier = new CyclicBarrier(2); final Runnable await = () -> safeAwait(barrier); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java index 7198170ab0c7c..56a687646b364 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java @@ -1210,7 +1210,12 @@ private void checkIterationLogging(int iterations, long eachIterationDuration, M var currentTime = new AtomicLong(0L); when(mockThreadPool.relativeTimeInMillis()).thenAnswer(invocation -> currentTime.addAndGet(eachIterationDuration)); - var desiredBalanceComputer = new DesiredBalanceComputer(createBuiltInClusterSettings(), mockThreadPool, new ShardsAllocator() { + // Some runs of this test try to simulate a long desired balance computation. Setting a high value on the following setting + // prevents interrupting a long computation. + var clusterSettings = createBuiltInClusterSettings( + Settings.builder().put(DesiredBalanceComputer.MAX_BALANCE_COMPUTATION_TIME_DURING_INDEX_CREATION_SETTING.getKey(), "2m").build() + ); + var desiredBalanceComputer = new DesiredBalanceComputer(clusterSettings, mockThreadPool, new ShardsAllocator() { @Override public void allocate(RoutingAllocation allocation) { final var unassignedIterator = allocation.routingNodes().unassigned().iterator(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java index 739f81ed6d110..27c430131ff07 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing.allocation.allocator; +import org.apache.logging.log4j.Level; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionTestUtils; @@ -52,6 +53,7 @@ import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ClusterServiceUtils; +import org.elasticsearch.test.MockLog; import org.elasticsearch.threadpool.TestThreadPool; import java.util.List; @@ -59,11 +61,12 @@ import java.util.Queue; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Predicate; @@ -85,14 +88,19 @@ public class DesiredBalanceShardsAllocatorTests extends ESAllocationTestCase { public void testGatewayAllocatorPreemptsAllocation() { final var nodeId = randomFrom(LOCAL_NODE_ID, OTHER_NODE_ID); testAllocate( - (allocation, unassignedAllocationHandler) -> unassignedAllocationHandler.initialize(nodeId, null, 0L, allocation.changes()), + (shardRouting, allocation, unassignedAllocationHandler) -> unassignedAllocationHandler.initialize( + nodeId, + null, + 0L, + allocation.changes() + ), routingTable -> assertEquals(nodeId, routingTable.index("test-index").shard(0).primaryShard().currentNodeId()) ); } public void testGatewayAllocatorStillFetching() { testAllocate( - (allocation, unassignedAllocationHandler) -> unassignedAllocationHandler.removeAndIgnore( + (shardRouting, allocation, unassignedAllocationHandler) -> unassignedAllocationHandler.removeAndIgnore( UnassignedInfo.AllocationStatus.FETCHING_SHARD_DATA, allocation.changes() ), @@ -108,17 +116,14 @@ public void testGatewayAllocatorStillFetching() { } public void testGatewayAllocatorDoesNothing() { - testAllocate((allocation, unassignedAllocationHandler) -> {}, routingTable -> { + testAllocate((shardRouting, allocation, unassignedAllocationHandler) -> {}, routingTable -> { var shardRouting = routingTable.shardRoutingTable("test-index", 0).primaryShard(); assertTrue(shardRouting.assignedToNode());// assigned by a followup reconciliation assertThat(shardRouting.unassignedInfo().lastAllocationStatus(), equalTo(UnassignedInfo.AllocationStatus.NO_ATTEMPT)); }); } - public void testAllocate( - BiConsumer allocateUnassigned, - Consumer verifier - ) { + public void testAllocate(AllocateUnassignedHandler allocateUnassigned, Consumer verifier) { var deterministicTaskQueue = new DeterministicTaskQueue(); var threadPool = deterministicTaskQueue.getThreadPool(); @@ -295,7 +300,7 @@ public ClusterState apply(ClusterState clusterState, RerouteStrategy routingAllo var allocationService = new AllocationService( new AllocationDeciders(List.of()), createGatewayAllocator( - (allocation, unassignedAllocationHandler) -> unassignedAllocationHandler.removeAndIgnore( + (shardRouting, allocation, unassignedAllocationHandler) -> unassignedAllocationHandler.removeAndIgnore( UnassignedInfo.AllocationStatus.NO_ATTEMPT, allocation.changes() ) @@ -336,6 +341,158 @@ protected long currentNanoTime() { } } + public void testIndexCreationInterruptsLongDesiredBalanceComputation() throws Exception { + var discoveryNode = newNode("node-0"); + var initialState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(discoveryNode).localNodeId(discoveryNode.getId()).masterNodeId(discoveryNode.getId())) + .build(); + final var ignoredIndexName = "index-ignored"; + + var threadPool = new TestThreadPool(getTestName()); + var time = new AtomicLong(threadPool.relativeTimeInMillis()); + var clusterService = ClusterServiceUtils.createClusterService(initialState, threadPool); + var allocationServiceRef = new SetOnce(); + var reconcileAction = new DesiredBalanceReconcilerAction() { + @Override + public ClusterState apply(ClusterState clusterState, RerouteStrategy routingAllocationAction) { + return allocationServiceRef.get().executeWithRoutingAllocation(clusterState, "reconcile", routingAllocationAction); + } + }; + + var gatewayAllocator = createGatewayAllocator((shardRouting, allocation, unassignedAllocationHandler) -> { + if (shardRouting.getIndexName().equals(ignoredIndexName)) { + unassignedAllocationHandler.removeAndIgnore(UnassignedInfo.AllocationStatus.NO_ATTEMPT, allocation.changes()); + } + }); + var shardsAllocator = new ShardsAllocator() { + @Override + public void allocate(RoutingAllocation allocation) { + // simulate long computation + time.addAndGet(1_000); + var dataNodeId = allocation.nodes().getDataNodes().values().iterator().next().getId(); + var unassignedIterator = allocation.routingNodes().unassigned().iterator(); + while (unassignedIterator.hasNext()) { + unassignedIterator.next(); + unassignedIterator.initialize(dataNodeId, null, 0L, allocation.changes()); + } + allocation.routingNodes().setBalanceWeightStatsPerNode(Map.of()); + } + + @Override + public ShardAllocationDecision decideShardAllocation(ShardRouting shard, RoutingAllocation allocation) { + throw new AssertionError("only used for allocation explain"); + } + }; + + // Make sure the computation takes at least a few iterations, where each iteration takes 1s (see {@code #shardsAllocator.allocate}). + // By setting the following setting we ensure the desired balance computation will be interrupted early to not delay assigning + // newly created primary shards. This ensures that we hit a desired balance computation (3s) which is longer than the configured + // setting below. + var clusterSettings = createBuiltInClusterSettings( + Settings.builder().put(DesiredBalanceComputer.MAX_BALANCE_COMPUTATION_TIME_DURING_INDEX_CREATION_SETTING.getKey(), "2s").build() + ); + final int minIterations = between(3, 10); + var desiredBalanceShardsAllocator = new DesiredBalanceShardsAllocator( + shardsAllocator, + threadPool, + clusterService, + new DesiredBalanceComputer(clusterSettings, shardsAllocator, time::get) { + @Override + public DesiredBalance compute( + DesiredBalance previousDesiredBalance, + DesiredBalanceInput desiredBalanceInput, + Queue> pendingDesiredBalanceMoves, + Predicate isFresh + ) { + return super.compute(previousDesiredBalance, desiredBalanceInput, pendingDesiredBalanceMoves, isFresh); + } + + @Override + boolean hasEnoughIterations(int currentIteration) { + return currentIteration >= minIterations; + } + }, + reconcileAction, + TelemetryProvider.NOOP + ); + var allocationService = createAllocationService(desiredBalanceShardsAllocator, gatewayAllocator); + allocationServiceRef.set(allocationService); + + var rerouteFinished = new CyclicBarrier(2); + // A mock cluster state update task for creating an index + class CreateIndexTask extends ClusterStateUpdateTask { + private final String indexName; + + private CreateIndexTask(String indexName) { + this.indexName = indexName; + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + var indexMetadata = createIndex(indexName); + var newState = ClusterState.builder(currentState) + .metadata(Metadata.builder(currentState.metadata()).put(indexMetadata, true)) + .routingTable( + RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY, currentState.routingTable()) + .addAsNew(indexMetadata) + ) + .build(); + return allocationService.reroute( + newState, + "test", + ActionTestUtils.assertNoFailureListener(response -> safeAwait(rerouteFinished)) + ); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + } + + final var computationInterruptedMessage = + "Desired balance computation for * interrupted * in order to not delay assignment of newly created index shards *"; + try { + // Create a new index which is not ignored and therefore must be considered when a desired balance + // computation takes longer than 2s. + assertThat(desiredBalanceShardsAllocator.getStats().computationExecuted(), equalTo(0L)); + MockLog.assertThatLogger(() -> { + clusterService.submitUnbatchedStateUpdateTask("test", new CreateIndexTask("index-1")); + safeAwait(rerouteFinished); + assertThat(clusterService.state().getRoutingTable().index("index-1").primaryShardsUnassigned(), equalTo(0)); + }, + DesiredBalanceComputer.class, + new MockLog.SeenEventExpectation( + "Should log interrupted computation", + DesiredBalanceComputer.class.getCanonicalName(), + Level.INFO, + computationInterruptedMessage + ) + ); + assertBusy(() -> assertFalse(desiredBalanceShardsAllocator.getStats().computationActive())); + assertThat(desiredBalanceShardsAllocator.getStats().computationExecuted(), equalTo(2L)); + // The computation should not get interrupted when the newly created index shard stays unassigned. + MockLog.assertThatLogger(() -> { + clusterService.submitUnbatchedStateUpdateTask("test", new CreateIndexTask(ignoredIndexName)); + safeAwait(rerouteFinished); + assertThat(clusterService.state().getRoutingTable().index(ignoredIndexName).primaryShardsUnassigned(), equalTo(1)); + }, + DesiredBalanceComputer.class, + new MockLog.UnseenEventExpectation( + "Should log interrupted computation", + DesiredBalanceComputer.class.getCanonicalName(), + Level.INFO, + computationInterruptedMessage + ) + ); + assertBusy(() -> assertFalse(desiredBalanceShardsAllocator.getStats().computationActive())); + assertThat(desiredBalanceShardsAllocator.getStats().computationExecuted(), equalTo(3L)); + } finally { + clusterService.close(); + terminate(threadPool); + } + } + public void testCallListenersOnlyAfterProducingFreshInput() throws InterruptedException { var reconciliations = new AtomicInteger(0); @@ -772,13 +929,30 @@ private static GatewayAllocator createGatewayAllocator() { return createGatewayAllocator(DesiredBalanceShardsAllocatorTests::initialize); } - private static void initialize(RoutingAllocation allocation, ExistingShardsAllocator.UnassignedAllocationHandler handler) { + private static void initialize( + ShardRouting shardRouting, + RoutingAllocation allocation, + ExistingShardsAllocator.UnassignedAllocationHandler handler + ) { handler.initialize(allocation.nodes().getLocalNodeId(), null, 0L, allocation.changes()); } - private static GatewayAllocator createGatewayAllocator( - BiConsumer allocateUnassigned - ) { + /** + * A helper interface to simplify creating a GatewayAllocator in the tests by only requiring + * an implementation for {@link org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator#allocateUnassigned}. + */ + interface AllocateUnassignedHandler { + void handle( + ShardRouting shardRouting, + RoutingAllocation allocation, + ExistingShardsAllocator.UnassignedAllocationHandler unassignedAllocationHandler + ); + } + + /** + * Creates an implementation of GatewayAllocator that delegates its logic for allocating unassigned shards to the provided handler. + */ + private static GatewayAllocator createGatewayAllocator(AllocateUnassignedHandler allocateUnassigned) { return new GatewayAllocator() { @Override @@ -790,7 +964,7 @@ public void allocateUnassigned( RoutingAllocation allocation, UnassignedAllocationHandler unassignedAllocationHandler ) { - allocateUnassigned.accept(allocation, unassignedAllocationHandler); + allocateUnassigned.handle(shardRouting, allocation, unassignedAllocationHandler); } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 1392058f45384..e2d08a6884b68 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -45,8 +45,6 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.Index; @@ -1070,9 +1068,6 @@ private void doTestWatermarksEnabledForSingleDataNode(boolean testMaxHeadroom) { ByteSizeValue.ofGb(110).toString() ); } - if (randomBoolean()) { - builder = builder.put(DiskThresholdDecider.ENABLE_FOR_SINGLE_DATA_NODE.getKey(), true); - } Settings diskSettings = builder.build(); final long totalBytes = testMaxHeadroom ? ByteSizeValue.ofGb(10000).getBytes() : 100; @@ -1151,10 +1146,6 @@ private void doTestWatermarksEnabledForSingleDataNode(boolean testMaxHeadroom) { + "on node, actual free: [20b], actual used: [80%]" ) ); - - if (DiskThresholdDecider.ENABLE_FOR_SINGLE_DATA_NODE.exists(diskSettings)) { - assertSettingDeprecationsAndWarnings(new Setting[] { DiskThresholdDecider.ENABLE_FOR_SINGLE_DATA_NODE }); - } } public void testWatermarksEnabledForSingleDataNodeWithPercentages() { @@ -1165,25 +1156,6 @@ public void testWatermarksEnabledForSingleDataNodeWithMaxHeadroom() { doTestWatermarksEnabledForSingleDataNode(true); } - public void testSingleDataNodeDeprecationWarning() { - Settings settings = Settings.builder().put(DiskThresholdDecider.ENABLE_FOR_SINGLE_DATA_NODE.getKey(), false).build(); - - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> new DiskThresholdDecider(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) - ); - - assertThat( - e.getCause().getMessage(), - equalTo( - "setting [cluster.routing.allocation.disk.watermark.enable_for_single_data_node=false] is not allowed," - + " only true is valid" - ) - ); - - assertSettingDeprecationsAndWarnings(new Setting[] { DiskThresholdDecider.ENABLE_FOR_SINGLE_DATA_NODE }); - } - private void doTestDiskThresholdWithSnapshotShardSizes(boolean testMaxHeadroom) { final long shardSizeInBytes = randomBoolean() ? (testMaxHeadroom ? ByteSizeValue.ofGb(99).getBytes() : 10L) // fits free space of node1 diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 9300aa992b687..d5fb33c9ec671 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -50,7 +50,10 @@ import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -74,7 +77,6 @@ import static org.hamcrest.Matchers.equalTo; public class LuceneTests extends ESTestCase { - private static final NamedWriteableRegistry EMPTY_REGISTRY = new NamedWriteableRegistry(Collections.emptyList()); public void testCleanIndex() throws IOException { MockDirectoryWrapper dir = newMockDirectory(); @@ -551,7 +553,6 @@ public void testSortFieldSerialization() throws IOException { Tuple sortFieldTuple = randomSortField(); SortField deserialized = copyInstance( sortFieldTuple.v1(), - EMPTY_REGISTRY, Lucene::writeSortField, Lucene::readSortField, TransportVersionUtils.randomVersion(random()) @@ -563,7 +564,6 @@ public void testSortValueSerialization() throws IOException { Object sortValue = randomSortValue(); Object deserialized = copyInstance( sortValue, - EMPTY_REGISTRY, Lucene::writeSortValue, Lucene::readSortValue, TransportVersionUtils.randomVersion(random()) @@ -571,6 +571,18 @@ public void testSortValueSerialization() throws IOException { assertEquals(sortValue, deserialized); } + private static T copyInstance(T original, Writeable.Writer writer, Writeable.Reader reader, TransportVersion version) + throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setTransportVersion(version); + writer.write(output, original); + try (StreamInput in = output.bytes().streamInput()) { + in.setTransportVersion(version); + return reader.read(in); + } + } + } + public static Object randomSortValue() { return switch (randomIntBetween(0, 9)) { case 0 -> null; diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 8051437cf6e12..47026fe713c5c 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -1138,6 +1138,42 @@ public void testDiffSecureSettings() { assertTrue(diffed.isEmpty()); } + public void testValidateSecureSettingInsecureOverride() { + MockSecureSettings secureSettings = new MockSecureSettings(); + String settingName = "something.secure"; + secureSettings.setString(settingName, "secure"); + Settings settings = Settings.builder().put(settingName, "notreallysecure").setSecureSettings(secureSettings).build(); + + ClusterSettings clusterSettings = new ClusterSettings( + settings, + Collections.singleton(SecureSetting.secureString(settingName, null)) + ); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> clusterSettings.validate(settings, false)); + assertEquals( + e.getMessage(), + "Setting [something.secure] is a secure setting " + + "and must be stored inside the Elasticsearch keystore, but was found inside elasticsearch.yml" + ); + } + + public void testValidateSecureSettingInInsecureSettings() { + String settingName = "something.secure"; + Settings settings = Settings.builder().put(settingName, "notreallysecure").build(); + + ClusterSettings clusterSettings = new ClusterSettings( + settings, + Collections.singleton(SecureSetting.secureString(settingName, null)) + ); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> clusterSettings.validate(settings, false)); + assertEquals( + e.getMessage(), + "Setting [something.secure] is a secure setting " + + "and must be stored inside the Elasticsearch keystore, but was found inside elasticsearch.yml" + ); + } + public static IndexMetadata newIndexMeta(String name, Settings indexSettings) { return IndexMetadata.builder(name).settings(indexSettings(IndexVersion.current(), 1, 0).put(indexSettings)).build(); } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index cfdc5e6befaaa..5fefd92d176a5 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -473,13 +473,6 @@ public void testDiff() throws IOException { } } - public void testSecureSettingConflict() { - Setting setting = SecureSetting.secureString("something.secure", null); - Settings settings = Settings.builder().put("something.secure", "notreallysecure").build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> setting.get(settings)); - assertTrue(e.getMessage().contains("must be stored inside the Elasticsearch keystore")); - } - public void testSecureSettingIllegalName() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> SecureSetting.secureString("*IllegalName", null)); assertTrue(e.getMessage().contains("does not match the allowed setting name pattern")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index 3a68ad301ce5c..1f8a2a754428b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -9,13 +9,19 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.test.ESTestCase; import java.util.Collections; import java.util.Optional; import static org.elasticsearch.index.mapper.MapperService.MergeReason.INDEX_TEMPLATE; +import static org.elasticsearch.index.mapper.MapperService.MergeReason.MAPPING_AUTO_UPDATE; +import static org.elasticsearch.index.mapper.MapperService.MergeReason.MAPPING_AUTO_UPDATE_PREFLIGHT; import static org.elasticsearch.index.mapper.MapperService.MergeReason.MAPPING_UPDATE; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; public final class ObjectMapperMergeTests extends ESTestCase { @@ -318,6 +324,34 @@ public void testMergeSubobjectsFalseWithObject() { assertNotNull(parentMapper.getMapper("child.grandchild")); } + public void testConflictingDynamicUpdate() { + RootObjectMapper mergeInto = new RootObjectMapper.Builder("_doc", Optional.empty()).add( + new KeywordFieldMapper.Builder("http.status_code", IndexVersion.current()) + ).build(MapperBuilderContext.root(false, false)); + RootObjectMapper mergeWith = new RootObjectMapper.Builder("_doc", Optional.empty()).add( + new NumberFieldMapper.Builder( + "http.status_code", + NumberFieldMapper.NumberType.LONG, + ScriptCompiler.NONE, + false, + true, + IndexVersion.current(), + null + ) + ).build(MapperBuilderContext.root(false, false)); + + MapperService.MergeReason autoUpdateMergeReason = randomFrom(MAPPING_AUTO_UPDATE, MAPPING_AUTO_UPDATE_PREFLIGHT); + ObjectMapper merged = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, autoUpdateMergeReason, Long.MAX_VALUE)); + FieldMapper httpStatusCode = (FieldMapper) merged.getMapper("http.status_code"); + assertThat(httpStatusCode, is(instanceOf(KeywordFieldMapper.class))); + + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, MAPPING_UPDATE, Long.MAX_VALUE)) + ); + assertThat(e.getMessage(), equalTo("mapper [http.status_code] cannot be changed from type [keyword] to [long]")); + } + private static RootObjectMapper createRootSubobjectFalseLeafWithDots() { FieldMapper.Builder fieldBuilder = new KeywordFieldMapper.Builder("host.name", IndexVersion.current()); FieldMapper fieldMapper = fieldBuilder.build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java index c29d8335bbcd6..a4a4cc77bab8a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java @@ -236,15 +236,19 @@ public long cost() { } public static BytesRef mockEncodeDenseVector(float[] values, ElementType elementType, IndexVersion indexVersion) { + int dims = values.length; + if (elementType == ElementType.BIT) { + dims *= Byte.SIZE; + } int numBytes = indexVersion.onOrAfter(DenseVectorFieldMapper.MAGNITUDE_STORED_INDEX_VERSION) - ? elementType.getNumBytes(values.length) + DenseVectorFieldMapper.MAGNITUDE_BYTES - : elementType.getNumBytes(values.length); + ? elementType.getNumBytes(dims) + DenseVectorFieldMapper.MAGNITUDE_BYTES + : elementType.getNumBytes(dims); double dotProduct = 0f; ByteBuffer byteBuffer = elementType.createByteBuffer(indexVersion, numBytes); for (float value : values) { if (elementType == ElementType.FLOAT) { byteBuffer.putFloat(value); - } else if (elementType == ElementType.BYTE) { + } else if (elementType == ElementType.BYTE || elementType == ElementType.BIT) { byteBuffer.put((byte) value); } else { throw new IllegalStateException("unknown element_type [" + elementType + "]"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java index 6433cf2f1c0d4..9e819f38eae6e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.DenseVectorFieldType; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.VectorSimilarity; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.vectors.DenseVectorQuery; import org.elasticsearch.search.vectors.VectorData; @@ -134,9 +135,9 @@ public void testFielddataBuilder() { public void testDocValueFormat() { DenseVectorFieldType fft = createFloatFieldType(); - expectThrows(IllegalArgumentException.class, () -> fft.docValueFormat(null, null)); + assertEquals(DocValueFormat.DENSE_VECTOR, fft.docValueFormat(null, null)); DenseVectorFieldType bft = createByteFieldType(); - expectThrows(IllegalArgumentException.class, () -> bft.docValueFormat(null, null)); + assertEquals(DocValueFormat.DENSE_VECTOR, bft.docValueFormat(null, null)); } public void testFetchSourceValue() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/AbstractQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/AbstractQueryBuilderTests.java index 07c8166741e63..72fe6aaa7154a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/AbstractQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/AbstractQueryBuilderTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchModule; @@ -93,4 +94,25 @@ public void testMaybeConvertToBytesRefLongTerm() { assertThat(e.getMessage(), containsString("term starting with [aaaaa")); } + public void testMaybeConvertToBytesRefStringCorrectSize() { + int capacity = randomIntBetween(20, 40); + StringBuilder termBuilder = new StringBuilder(capacity); + int correctSize = 0; + for (int i = 0; i < capacity; i++) { + if (i < capacity / 3) { + termBuilder.append((char) randomIntBetween(0, 127)); + ++correctSize; // use only one byte for char < 128 + } else if (i < 2 * capacity / 3) { + termBuilder.append((char) randomIntBetween(128, 2047)); + correctSize += 2; // use two bytes for char < 2048 + } else { + termBuilder.append((char) randomIntBetween(2048, 4092)); + correctSize += 3; // use three bytes for char >= 2048 + } + } + BytesRef bytesRef = (BytesRef) AbstractQueryBuilder.maybeConvertToBytesRef(termBuilder.toString()); + assertEquals(correctSize, bytesRef.bytes.length); + assertEquals(correctSize, bytesRef.length); + } + } diff --git a/server/src/test/java/org/elasticsearch/monitor/jvm/JvmStatsTests.java b/server/src/test/java/org/elasticsearch/monitor/jvm/JvmStatsTests.java index 28976d803ff53..7956d67c83c3b 100644 --- a/server/src/test/java/org/elasticsearch/monitor/jvm/JvmStatsTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/jvm/JvmStatsTests.java @@ -53,7 +53,7 @@ public void testJvmStats() { assertThat(memoryPools, hasKey("Metaspace")); assertThat(memoryPools.keySet(), hasSize(greaterThan(3))); for (JvmStats.MemoryPool memoryPool : memoryPools.values()) { - assertThat(memoryPool.getUsed().getBytes(), greaterThan(0L)); + assertThat("Memory pool: " + memoryPool.getName(), memoryPool.getUsed().getBytes(), greaterThanOrEqualTo(0L)); } // Threads diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index f67d7ddcc7550..aa76245c20679 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -9,6 +9,9 @@ package org.elasticsearch.reservedstate.service; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; @@ -22,10 +25,10 @@ import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.file.AbstractFileWatchingService; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.IOUtils; -import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.Environment; @@ -41,10 +44,14 @@ import org.mockito.stubbing.Answer; import java.io.IOException; -import java.io.UncheckedIOException; import java.nio.file.AtomicMoveNotSupportedException; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.attribute.FileTime; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; import java.util.List; import java.util.Map; import java.util.Set; @@ -54,7 +61,6 @@ import java.util.function.Consumer; import static java.nio.file.StandardCopyOption.ATOMIC_MOVE; -import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; import static org.elasticsearch.node.Node.NODE_NAME_SETTING; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.hasEntry; @@ -67,6 +73,7 @@ import static org.mockito.Mockito.verify; public class FileSettingsServiceTests extends ESTestCase { + private static final Logger logger = LogManager.getLogger(FileSettingsServiceTests.class); private Environment env; private ClusterService clusterService; private ReservedClusterStateService controller; @@ -76,6 +83,8 @@ public class FileSettingsServiceTests extends ESTestCase { @Before public void setUp() throws Exception { super.setUp(); + // TODO remove me once https://github.com/elastic/elasticsearch/issues/115280 is closed + Loggers.setLevel(LogManager.getLogger(AbstractFileWatchingService.class), Level.DEBUG); threadpool = new TestThreadPool("file_settings_service_tests"); @@ -120,16 +129,23 @@ public void setUp() throws Exception { @After public void tearDown() throws Exception { - if (fileSettingsService.lifecycleState() == Lifecycle.State.STARTED) { - fileSettingsService.stop(); - } - if (fileSettingsService.lifecycleState() == Lifecycle.State.STOPPED) { - fileSettingsService.close(); - } + try { + if (fileSettingsService.lifecycleState() == Lifecycle.State.STARTED) { + logger.info("Stopping file settings service"); + fileSettingsService.stop(); + } + if (fileSettingsService.lifecycleState() == Lifecycle.State.STOPPED) { + logger.info("Closing file settings service"); + fileSettingsService.close(); + } - super.tearDown(); - clusterService.close(); - threadpool.shutdownNow(); + super.tearDown(); + clusterService.close(); + threadpool.shutdownNow(); + } finally { + // TODO remove me once https://github.com/elastic/elasticsearch/issues/115280 is closed + Loggers.setLevel(LogManager.getLogger(AbstractFileWatchingService.class), Level.INFO); + } } public void testStartStop() { @@ -195,24 +211,17 @@ public void testInitialFileWorks() throws Exception { return null; }).when(controller).process(any(), any(XContentParser.class), any(), any()); - CountDownLatch fileProcessingLatch = new CountDownLatch(1); + CountDownLatch processFileLatch = new CountDownLatch(1); + fileSettingsService.addFileChangedListener(processFileLatch::countDown); Files.createDirectories(fileSettingsService.watchedFileDir()); // contents of the JSON don't matter, we just need a file to exist writeTestFile(fileSettingsService.watchedFile(), "{}"); - doAnswer((Answer) invocation -> { - try { - return invocation.callRealMethod(); - } finally { - fileProcessingLatch.countDown(); - } - }).when(fileSettingsService).processFileOnServiceStart(); - fileSettingsService.start(); fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); - longAwait(fileProcessingLatch); + longAwait(processFileLatch); verify(fileSettingsService, times(1)).processFileOnServiceStart(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); @@ -225,23 +234,8 @@ public void testProcessFileChanges() throws Exception { return null; }).when(controller).process(any(), any(XContentParser.class), any(), any()); - CountDownLatch changesOnStartLatch = new CountDownLatch(1); - doAnswer((Answer) invocation -> { - try { - return invocation.callRealMethod(); - } finally { - changesOnStartLatch.countDown(); - } - }).when(fileSettingsService).processFileOnServiceStart(); - - CountDownLatch changesLatch = new CountDownLatch(1); - doAnswer((Answer) invocation -> { - try { - return invocation.callRealMethod(); - } finally { - changesLatch.countDown(); - } - }).when(fileSettingsService).processFileChanges(); + CountDownLatch processFileCreationLatch = new CountDownLatch(1); + fileSettingsService.addFileChangedListener(processFileCreationLatch::countDown); Files.createDirectories(fileSettingsService.watchedFileDir()); // contents of the JSON don't matter, we just need a file to exist @@ -250,14 +244,19 @@ public void testProcessFileChanges() throws Exception { fileSettingsService.start(); fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); - longAwait(changesOnStartLatch); + longAwait(processFileCreationLatch); + + CountDownLatch processFileChangeLatch = new CountDownLatch(1); + fileSettingsService.addFileChangedListener(processFileChangeLatch::countDown); verify(fileSettingsService, times(1)).processFileOnServiceStart(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); - // second file change; contents still don't matter - writeTestFile(fileSettingsService.watchedFile(), "[]"); - longAwait(changesLatch); + // Touch the file to get an update + Instant now = LocalDateTime.now(ZoneId.systemDefault()).toInstant(ZoneOffset.ofHours(0)); + Files.setLastModifiedTime(fileSettingsService.watchedFile(), FileTime.from(now)); + + longAwait(processFileChangeLatch); verify(fileSettingsService, times(1)).processFileChanges(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_VERSION_ONLY), any()); @@ -352,22 +351,15 @@ public void testHandleSnapshotRestoreResetsMetadata() throws Exception { } // helpers - private static void writeTestFile(Path path, String contents) { - Path tempFile = null; + private static void writeTestFile(Path path, String contents) throws IOException { + logger.info("Writing settings file under [{}]", path.toAbsolutePath()); + Path tempFilePath = createTempFile(); + Files.writeString(tempFilePath, contents); try { - tempFile = Files.createTempFile(path.getParent(), path.getFileName().toString(), "tmp"); - Files.writeString(tempFile, contents); - - try { - Files.move(tempFile, path, REPLACE_EXISTING, ATOMIC_MOVE); - } catch (AtomicMoveNotSupportedException e) { - Files.move(tempFile, path, REPLACE_EXISTING); - } - } catch (final IOException e) { - throw new UncheckedIOException(Strings.format("could not write file [%s]", path.toAbsolutePath()), e); - } finally { - // we are ignoring exceptions here, so we do not need handle whether or not tempFile was initialized nor if the file exists - IOUtils.deleteFilesIgnoringExceptions(tempFile); + Files.move(tempFilePath, path, ATOMIC_MOVE); + } catch (AtomicMoveNotSupportedException e) { + logger.info("Atomic move not available. Falling back on non-atomic move to write [{}]", path.toAbsolutePath()); + Files.move(tempFilePath, path); } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java index 25cfd1e56514c..3b6b280565da5 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java @@ -251,6 +251,7 @@ public void lastItems(List> items, Releasable releasable, Act assertTrue(next.get()); next.set(false); assertFalse(isLast.get()); + assertFalse(r1.hasReferences()); ReleasableBytesReference r2 = new ReleasableBytesReference(new BytesArray("{\"field\":1}"), () -> {}); chunkHandler.handleChunk(channel, r2, false); @@ -258,7 +259,6 @@ public void lastItems(List> items, Releasable releasable, Act assertTrue(next.get()); next.set(false); assertFalse(isLast.get()); - assertTrue(r1.hasReferences()); assertTrue(r2.hasReferences()); ReleasableBytesReference r3 = new ReleasableBytesReference(new BytesArray("\n{\"delete\":"), () -> {}); diff --git a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java index b6b311c12edd1..6b2178310d17c 100644 --- a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java @@ -20,6 +20,8 @@ import org.elasticsearch.script.VectorScoreScriptUtils.L1Norm; import org.elasticsearch.script.VectorScoreScriptUtils.L2Norm; import org.elasticsearch.script.field.vectors.BinaryDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.BitBinaryDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.BitKnnDenseVectorDocValuesField; import org.elasticsearch.script.field.vectors.ByteBinaryDenseVectorDocValuesField; import org.elasticsearch.script.field.vectors.ByteKnnDenseVectorDocValuesField; import org.elasticsearch.script.field.vectors.DenseVectorDocValuesField; @@ -233,6 +235,61 @@ public void testByteVectorClassBindings() throws IOException { } } + public void testBitVectorClassBindingsDotProduct() throws IOException { + String fieldName = "vector"; + int dims = 8; + float[] docVector = new float[] { 124 }; + // 124 in binary is b01111100 + List queryVector = Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4, (byte) 1, (byte) 125, (byte) -12); + List floatQueryVector = Arrays.asList(1.4f, -1.4f, 0.42f, 0.0f, 1f, -1f, -0.42f, 1.2f); + List invalidQueryVector = Arrays.asList((byte) 1, (byte) 1); + String hexidecimalString = HexFormat.of().formatHex(new byte[] { 124 }); + + List fields = List.of( + new BitBinaryDenseVectorDocValuesField( + BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.BIT, IndexVersion.current()), + "test", + ElementType.BIT, + dims + ), + new BitKnnDenseVectorDocValuesField(KnnDenseVectorScriptDocValuesTests.wrapBytes(new float[][] { docVector }), "test", dims) + ); + for (DenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + // Test cosine similarity explicitly, as it must perform special logic on top of the doc values + DotProduct function = new DotProduct(scoreScript, queryVector, fieldName); + assertEquals("dotProduct result is not equal to the expected value!", -12 + 2 + 4 + 1 + 125, function.dotProduct(), 0.001); + + function = new DotProduct(scoreScript, floatQueryVector, fieldName); + assertEquals( + "dotProduct result is not equal to the expected value!", + 0.42f + 0f + 1f - 1f - 0.42f, + function.dotProduct(), + 0.001 + ); + + function = new DotProduct(scoreScript, hexidecimalString, fieldName); + assertEquals("dotProduct result is not equal to the expected value!", Integer.bitCount(124), function.dotProduct(), 0.0); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new DotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString( + "query vector has an incorrect number of dimensions. " + + "Must be [1] for bitwise operations, or [8] for byte wise operations: provided [2]." + ) + ); + } + } + public void testByteVsFloatSimilarity() throws IOException { int dims = 5; float[] docVector = new float[] { 1f, 127f, -128f, 5f, -10f }; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java index 7533028d6ea08..de9025795ce87 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiReader; @@ -29,6 +30,9 @@ import java.util.Iterator; import java.util.List; +import static java.util.Collections.singleton; +import static org.hamcrest.Matchers.equalTo; + public class HDRPercentileRanksAggregatorTests extends AggregatorTestCase { @Override @@ -100,4 +104,26 @@ public void testEmptyValues() throws IOException { assertThat(e.getMessage(), Matchers.equalTo("[values] must not be an empty array: [my_agg]")); } + + public void testInvalidNegativeNumber() throws IOException { + try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir)) { + iw.addDocument(singleton(new NumericDocValuesField("number", 60))); + iw.addDocument(singleton(new NumericDocValuesField("number", 40))); + iw.addDocument(singleton(new NumericDocValuesField("number", -20))); + iw.addDocument(singleton(new NumericDocValuesField("number", 10))); + iw.commit(); + + PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.1, 0.5, 12 }) + .field("number") + .method(PercentilesMethod.HDR); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); + try (IndexReader reader = iw.getReader()) { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType)) + ); + assertThat(e.getMessage(), equalTo("Negative values are not supported by HDR aggregation")); + } + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java index 2001286f27f0f..b8f4ab100e84c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java @@ -164,6 +164,18 @@ public void testHdrThenTdigestSettings() throws Exception { assertThat(e.getMessage(), equalTo("Cannot set [compression] because the method has already been configured for HDRHistogram")); } + public void testInvalidNegativeNumber() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + testCase(new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField("number", 60))); + iw.addDocument(singleton(new NumericDocValuesField("number", 40))); + iw.addDocument(singleton(new NumericDocValuesField("number", -20))); + iw.addDocument(singleton(new NumericDocValuesField("number", 10))); + }, hdr -> { fail("Aggregation should have failed due to negative value"); }); + }); + assertThat(e.getMessage(), equalTo("Negative values are not supported by HDR aggregation")); + } + private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); diff --git a/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java index 961d5200b6e0d..62cd3bf2f308c 100644 --- a/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java @@ -15,13 +15,17 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; +import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.CollectorManager; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FilterLeafCollector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.test.ESTestCase; @@ -121,15 +125,40 @@ public void testHitCountFromWeightDoesNotEarlyTerminate() throws IOException { public void testCollectedHitCount() throws Exception { Query query = new NonCountingTermQuery(new Term("string", "a1")); - int threshold = randomIntBetween(1, 10000); - assumeTrue("bug with single collection & single segment: https://github.com/elastic/elasticsearch/issues/106647", threshold > 1); - // there's one doc matching the query: any totalHitsThreshold greater than or equal to 1 will not cause early termination + int threshold = randomIntBetween(2, 10000); + // there's one doc matching the query: any totalHitsThreshold greater than 1 will not cause early termination CollectorManager collectorManager = createCollectorManager(new HitsThresholdChecker(threshold)); Result result = searcher.search(query, collectorManager); assertEquals(1, result.totalHits); assertFalse(result.terminatedAfter); } + public void testThresholdOne() throws Exception { + Query query = new NonCountingTermQuery(new Term("string", "a1")); + Weight weight = query.createWeight(searcher, ScoreMode.COMPLETE, 0f); + CollectorManager collectorManager = createCollectorManager(new HitsThresholdChecker(1)); + // threshold 1 behaves differently depending on whether there is a single segment (no early termination) or multiple segments. + // With inter-segment concurrency the behaviour is not deterministic and depends on the timing of the different threads. + // Without inter-segment concurrency the behaviour depends on which segment holds the matching document. + // This is because the check for early termination is performed every time a leaf collector is pulled for a segment, as well + // as for every collected doc. + PartialHitCountCollector partialHitCountCollector = collectorManager.newCollector(); + int i = 0; + while (partialHitCountCollector.getTotalHits() == 0 && i < searcher.getLeafContexts().size()) { + LeafReaderContext ctx = searcher.getLeafContexts().get(i++); + LeafCollector leafCollector = partialHitCountCollector.getLeafCollector(ctx); + BulkScorer bulkScorer = weight.bulkScorer(ctx); + bulkScorer.score(leafCollector, ctx.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); + } + assertEquals(1, partialHitCountCollector.getTotalHits()); + assertFalse(partialHitCountCollector.hasEarlyTerminated()); + expectThrows( + CollectionTerminatedException.class, + () -> partialHitCountCollector.getLeafCollector(randomFrom(searcher.getLeafContexts())) + ); + assertTrue(partialHitCountCollector.hasEarlyTerminated()); + } + public void testCollectedHitCountEarlyTerminated() throws Exception { Query query = new NonCountingTermQuery(new Term("string", "foo")); // there's three docs matching the query: any totalHitsThreshold lower than 3 will trigger early termination diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java index eddce6aae298a..56d3454aa5544 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java @@ -13,6 +13,7 @@ import com.sun.net.httpserver.HttpHandler; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; @@ -168,7 +169,21 @@ public void handle(final HttpExchange exchange) throws IOException { RestUtils.decodeQueryString(request, request.indexOf('?') + 1, params); final var upload = uploads.remove(params.get("uploadId")); if (upload == null) { - exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), -1); + if (Randomness.get().nextBoolean()) { + exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), -1); + } else { + byte[] response = (""" + + + NoSuchUpload + No such upload + test-request-id + test-host-id + """).getBytes(StandardCharsets.UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/xml"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + } } else { final var blobContents = upload.complete(extractPartEtags(Streams.readFully(exchange.getRequestBody()))); blobs.put(requestComponents.path, blobContents); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 5bfcd54e963b3..1edc800956a67 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -65,6 +65,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.CompositeBytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.DelayableWriteable; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -1849,7 +1850,7 @@ public static C copyNamedWriteable( ); } - protected static T copyInstance( + protected static T copyInstance( T original, NamedWriteableRegistry namedWriteableRegistry, Writeable.Writer writer, @@ -1859,9 +1860,20 @@ protected static T copyInstance( try (BytesStreamOutput output = new BytesStreamOutput()) { output.setTransportVersion(version); writer.write(output, original); - try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { - in.setTransportVersion(version); - return reader.read(in); + if (randomBoolean()) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { + in.setTransportVersion(version); + return reader.read(in); + } + } else { + BytesReference bytesReference = output.copyBytes(); + output.reset(); + output.writeBytesReference(bytesReference); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { + in.setTransportVersion(version); + DelayableWriteable delayableWriteable = DelayableWriteable.delayed(reader, in); + return delayableWriteable.expand(); + } } } } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 3000819066495..f996db92e57f4 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -19,7 +19,6 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - CHUNKING_SETTINGS_ENABLED("es.inference_chunking_settings_feature_flag_enabled=true", Version.fromString("8.16.0"), null), INFERENCE_DEFAULT_ELSER("es.inference_default_elser_feature_flag_enabled=true", Version.fromString("8.16.0"), null), ML_SCALE_FROM_ZERO("es.ml_scale_from_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java index f5c8a14c314cc..ba389a6cd9d7c 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.analytics.topmetrics; import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.common.io.stream.DelayableWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -68,7 +69,12 @@ static InternalTopMetrics buildEmptyAggregation(String name, List metric public InternalTopMetrics(StreamInput in) throws IOException { super(in); sortOrder = SortOrder.readFromStream(in); - metricNames = in.readStringCollectionAsList(); + final List metricNames = in.readStringCollectionAsList(); + if (in instanceof DelayableWriteable.Deduplicator bo) { + this.metricNames = bo.deduplicate(metricNames); + } else { + this.metricNames = metricNames; + } size = in.readVInt(); topMetrics = in.readCollectionAsList(TopMetric::new); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java index 3337b6d239413..69b6e311e23ee 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java @@ -40,11 +40,9 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Map; -import static java.util.stream.Collectors.toList; import static org.elasticsearch.xpack.analytics.topmetrics.TopMetricsAggregationBuilder.REGISTRY_KEY; /** @@ -148,9 +146,14 @@ public void doClose() { static class Metrics implements BucketedSort.ExtraData, Releasable { private final MetricValues[] values; + private final List names; Metrics(MetricValues[] values) { this.values = values; + names = new ArrayList<>(values.length); + for (MetricValues value : values) { + names.add(value.name); + } } boolean needsScores() { @@ -182,7 +185,7 @@ BucketedSort.ResultBuilder resultBuilder(DocValueF } List names() { - return Arrays.stream(values).map(v -> v.name).collect(toList()); + return names; } @Override diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml index c591668f3549d..64e7c12caeef3 100644 --- a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml @@ -53,6 +53,23 @@ setup: - contains: {index_templates: {name: logs-apm.app@template}} - contains: {index_templates: {name: logs-apm.error@template}} +--- +"Test template reinstallation": + - skip: + reason: contains is a newly added assertion + features: contains + - do: + indices.delete_index_template: + name: traces-apm@template + - do: + cluster.health: + wait_for_events: languid + - do: + indices.get_index_template: + name: traces-apm@template + - length: {index_templates: 1} + - contains: {index_templates: {name: traces-apm@template}} + --- "Test traces-apm-* data stream indexing": - skip: diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_rollover.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_rollover.yml new file mode 100644 index 0000000000000..95586cd1fd665 --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_rollover.yml @@ -0,0 +1,51 @@ +--- +setup: + - do: + indices.put_index_template: + name: traces-low-prio + body: + data_stream: {} + index_patterns: ["traces-*"] + priority: 1 + +--- +"Test data stream rollover on template installation": + - skip: + awaits_fix: "https://github.com/elastic/elasticsearch/issues/102360" + + # Disable the apm-data plugin and delete the traces-apm@template index + # template so traces-low-prio takes effect. + - do: + cluster.put_settings: + body: + transient: + xpack.apm_data.registry.enabled: false + - do: + indices.delete_index_template: + name: traces-apm@template + - do: + indices.create_data_stream: + name: traces-apm-testing + - do: + indices.get_data_stream: + name: traces-apm-testing + - match: {data_streams.0.template: traces-low-prio} + + # Re-enable the apm-data plugin, after which the traces-apm@template + # index template should be recreated and trigger a lazy rollover on + # the traces-apm-testing data stream. + - do: + cluster.put_settings: + body: + transient: + xpack.apm_data.registry.enabled: true + - do: + cluster.health: + wait_for_events: languid + - do: + indices.get_data_stream: + name: traces-apm-testing + - length: {data_streams: 1} + - match: {data_streams.0.template: traces-apm@template} + - match: {data_streams.0.rollover_on_write: true} + diff --git a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/AutoscalingStorageIntegTestCase.java b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/AutoscalingStorageIntegTestCase.java index 01b78bb0063c1..83bd9399274d6 100644 --- a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/AutoscalingStorageIntegTestCase.java +++ b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/AutoscalingStorageIntegTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.cluster.DiskUsageIntegTestCase; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; -import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.datastreams.DataStreamsPlugin; import org.elasticsearch.plugins.Plugin; @@ -40,8 +39,7 @@ protected Settings nodeSettings(final int nodeOrdinal, final Settings otherSetti builder.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), LOW_WATERMARK_BYTES + "b") .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), HIGH_WATERMARK_BYTES + "b") .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "0b") - .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.getKey(), "0ms") - .put(DiskThresholdDecider.ENABLE_FOR_SINGLE_DATA_NODE.getKey(), "true"); + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.getKey(), "0ms"); return builder.build(); } diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index cf6a8f51d1b81..8b920ac11cee7 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -82,8 +82,10 @@ tasks.named("precommit").configure { tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("security/10_forbidden/Test bulk response with invalid credentials", "warning does not exist for compatibility") - task.skipTest("inference/inference_crud/Test get all", "Assertions on number of inference models break due to default configs") task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry)", "The telemetry output changed. We dropped a column. That's safe.") + task.skipTest("inference/inference_crud/Test get all", "Assertions on number of inference models break due to default configs") + task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry) snapshot version", "The number of functions is constantly increasing") + task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry) non-snapshot version", "The number of functions is constantly increasing") task.skipTest("esql/80_text/reverse text", "The output type changed from TEXT to KEYWORD.") task.skipTest("esql/80_text/values function", "The output type changed from TEXT to KEYWORD.") }) diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/template/RolloverEnabledTestTemplateRegistry.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/template/RolloverEnabledTestTemplateRegistry.java index 819b0e01ac4de..442ad9a68dfc4 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/template/RolloverEnabledTestTemplateRegistry.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/template/RolloverEnabledTestTemplateRegistry.java @@ -53,7 +53,7 @@ protected Map getComposableTemplateConfigs() { } @Override - protected boolean applyRolloverAfterTemplateV2Upgrade() { + protected boolean applyRolloverAfterTemplateV2Update() { return true; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java index a7cf878511d78..b885a90c30e57 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java @@ -21,13 +21,15 @@ */ public class XPackFeatures implements FeatureSpecification { public static final NodeFeature LOGSDB_TELEMETRY = new NodeFeature("logsdb_telemetry"); + public static final NodeFeature LOGSDB_TELMETRY_STATS = new NodeFeature("logsdb_telemetry_stats"); @Override public Set getFeatures() { return Set.of( NodesDataTiersUsageTransportAction.LOCALLY_PRECALCULATED_STATS_FEATURE, // Added in 8.12 License.INDEPENDENT_TRIAL_VERSION_FEATURE, // 8.14.0 - LOGSDB_TELEMETRY + LOGSDB_TELEMETRY, + LOGSDB_TELMETRY_STATS ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java index a3473bf6224a1..2758ef73a98da 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java @@ -20,11 +20,20 @@ public final class LogsDBFeatureSetUsage extends XPackFeatureUsage { private final int indicesCount; private final int indicesWithSyntheticSource; + private final long numDocs; + private final long sizeInBytes; public LogsDBFeatureSetUsage(StreamInput input) throws IOException { super(input); indicesCount = input.readVInt(); indicesWithSyntheticSource = input.readVInt(); + if (input.getTransportVersion().onOrAfter(TransportVersions.LOGSDB_TELEMETRY_STATS)) { + numDocs = input.readVLong(); + sizeInBytes = input.readVLong(); + } else { + numDocs = 0; + sizeInBytes = 0; + } } @Override @@ -32,12 +41,25 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(indicesCount); out.writeVInt(indicesWithSyntheticSource); + if (out.getTransportVersion().onOrAfter(TransportVersions.LOGSDB_TELEMETRY_STATS)) { + out.writeVLong(numDocs); + out.writeVLong(sizeInBytes); + } } - public LogsDBFeatureSetUsage(boolean available, boolean enabled, int indicesCount, int indicesWithSyntheticSource) { + public LogsDBFeatureSetUsage( + boolean available, + boolean enabled, + int indicesCount, + int indicesWithSyntheticSource, + long numDocs, + long sizeInBytes + ) { super(XPackField.LOGSDB, available, enabled); this.indicesCount = indicesCount; this.indicesWithSyntheticSource = indicesWithSyntheticSource; + this.numDocs = numDocs; + this.sizeInBytes = sizeInBytes; } @Override @@ -50,11 +72,13 @@ protected void innerXContent(XContentBuilder builder, Params params) throws IOEx super.innerXContent(builder, params); builder.field("indices_count", indicesCount); builder.field("indices_with_synthetic_source", indicesWithSyntheticSource); + builder.field("num_docs", numDocs); + builder.field("size_in_bytes", sizeInBytes); } @Override public int hashCode() { - return Objects.hash(available, enabled, indicesCount, indicesWithSyntheticSource); + return Objects.hash(available, enabled, indicesCount, indicesWithSyntheticSource, numDocs, sizeInBytes); } @Override @@ -69,6 +93,8 @@ public boolean equals(Object obj) { return Objects.equals(available, other.available) && Objects.equals(enabled, other.enabled) && Objects.equals(indicesCount, other.indicesCount) - && Objects.equals(indicesWithSyntheticSource, other.indicesWithSyntheticSource); + && Objects.equals(indicesWithSyntheticSource, other.indicesWithSyntheticSource) + && Objects.equals(numDocs, other.numDocs) + && Objects.equals(sizeInBytes, other.sizeInBytes); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/ChunkingSettingsFeatureFlag.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/ChunkingSettingsFeatureFlag.java deleted file mode 100644 index fae69058df565..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/ChunkingSettingsFeatureFlag.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.inference; - -import org.elasticsearch.common.util.FeatureFlag; - -/** - * chunking_settings feature flag. When the feature is complete, this flag will be removed. - */ -public class ChunkingSettingsFeatureFlag { - - private ChunkingSettingsFeatureFlag() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("inference_chunking_settings"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java index 1b365bd96d834..409dde2d06d92 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java @@ -298,10 +298,8 @@ public static Tuple getMinMaxMlConfigVersion(D public static MlConfigVersion getMlConfigVersionForNode(DiscoveryNode node) { String mlConfigVerStr = node.getAttributes().get(ML_CONFIG_VERSION_NODE_ATTR); - if (mlConfigVerStr != null) { - return fromString(mlConfigVerStr); - } - return fromId(node.getPre811VersionId().orElseThrow(() -> new IllegalStateException("getting legacy version id not possible"))); + if (mlConfigVerStr == null) throw new IllegalStateException(ML_CONFIG_VERSION_NODE_ATTR + " not present on node"); + return fromString(mlConfigVerStr); } // Parse an MlConfigVersion from a string. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java index 8849377e6ad7e..05f4e560b73c1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java @@ -401,7 +401,7 @@ private void addComposableTemplatesIfMissing(ClusterState state) { } } else if (Objects.isNull(currentTemplate)) { logger.debug("adding composable template [{}] for [{}], because it doesn't exist", templateName, getOrigin()); - putComposableTemplate(state, templateName, newTemplate.getValue(), creationCheck, false); + putComposableTemplate(state, templateName, newTemplate.getValue(), creationCheck); } else if (Objects.isNull(currentTemplate.version()) || newTemplate.getValue().version() > currentTemplate.version()) { // IndexTemplateConfig now enforces templates contain a `version` property, so if the template doesn't have one we can // safely assume it's an old version of the template. @@ -412,7 +412,7 @@ private void addComposableTemplatesIfMissing(ClusterState state) { currentTemplate.version(), newTemplate.getValue().version() ); - putComposableTemplate(state, templateName, newTemplate.getValue(), creationCheck, true); + putComposableTemplate(state, templateName, newTemplate.getValue(), creationCheck); } else { creationCheck.set(false); logger.trace( @@ -434,11 +434,11 @@ private void addComposableTemplatesIfMissing(ClusterState state) { /** * Returns true if the cluster state contains all of the component templates needed by the composable template. If this registry - * requires automatic rollover after index template upgrades (see {@link #applyRolloverAfterTemplateV2Upgrade()}), this method also + * requires automatic rollover after index template upgrades (see {@link #applyRolloverAfterTemplateV2Update()}), this method also * verifies that the installed components templates are of the right version. */ private boolean componentTemplatesInstalled(ClusterState state, ComposableIndexTemplate indexTemplate) { - if (applyRolloverAfterTemplateV2Upgrade() == false) { + if (applyRolloverAfterTemplateV2Update() == false) { // component templates and index templates can be updated independently, we only need to know that the required component // templates are available return state.metadata().componentTemplates().keySet().containsAll(indexTemplate.getRequiredComponentTemplates()); @@ -534,8 +534,7 @@ private void putComposableTemplate( ClusterState state, final String templateName, final ComposableIndexTemplate indexTemplate, - final AtomicBoolean creationCheck, - final boolean isUpgrade + final AtomicBoolean creationCheck ) { final Executor executor = threadPool.generic(); executor.execute(() -> { @@ -550,8 +549,8 @@ private void putComposableTemplate( @Override public void onResponse(AcknowledgedResponse response) { if (response.isAcknowledged()) { - if (isUpgrade && applyRolloverAfterTemplateV2Upgrade()) { - invokeRollover(state, templateName, indexTemplate, creationCheck); + if (applyRolloverAfterTemplateV2Update()) { + invokeRollover(state, templateName, indexTemplate, () -> creationCheck.set((false))); } else { creationCheck.set(false); } @@ -765,12 +764,13 @@ public void onFailure(Exception e) { /** * Allows registries to opt-in for automatic rollover of "relevant" data streams immediately after a composable index template gets - * upgraded. If set to {@code true}, then every time a composable index template is being upgraded, all data streams of which name - * matches this template's index patterns AND of all matching templates the upgraded one has the highest priority, will be rolled over. + * updated, including its initial installation. If set to {@code true}, then every time a composable index template is being updated, + * all data streams of which name matches this template's index patterns AND of all matching templates the upgraded one has the highest + * priority, will be rolled over. * * @return {@code true} if this registry wants to apply automatic rollovers after template V2 upgrades */ - protected boolean applyRolloverAfterTemplateV2Upgrade() { + protected boolean applyRolloverAfterTemplateV2Update() { return false; } @@ -784,50 +784,56 @@ protected void onPutPipelineFailure(String pipelineId, Exception e) { logger.error(() -> format("error adding ingest pipeline template [%s] for [%s]", pipelineId, getOrigin()), e); } + /** + * invokeRollover rolls over any data streams matching the index template, + * and then invokes runAfter. + */ private void invokeRollover( final ClusterState state, final String templateName, final ComposableIndexTemplate indexTemplate, - final AtomicBoolean creationCheck + final Runnable runAfter ) { final Executor executor = threadPool.generic(); executor.execute(() -> { List rolloverTargets = findRolloverTargetDataStreams(state, templateName, indexTemplate); - if (rolloverTargets.isEmpty() == false) { - GroupedActionListener groupedActionListener = new GroupedActionListener<>( - rolloverTargets.size(), - new ActionListener<>() { - @Override - public void onResponse(Collection rolloverResponses) { - creationCheck.set(false); - onRolloversBulkResponse(rolloverResponses); - } + if (rolloverTargets.isEmpty()) { + runAfter.run(); + return; + } + GroupedActionListener groupedActionListener = new GroupedActionListener<>( + rolloverTargets.size(), + new ActionListener<>() { + @Override + public void onResponse(Collection rolloverResponses) { + runAfter.run(); + onRolloversBulkResponse(rolloverResponses); + } - @Override - public void onFailure(Exception e) { - creationCheck.set(false); - onRolloverFailure(e); - } + @Override + public void onFailure(Exception e) { + runAfter.run(); + onRolloverFailure(e); } - ); - for (String rolloverTarget : rolloverTargets) { - logger.info( - "rolling over data stream [{}] lazily as a followup to the upgrade of the [{}] index template [{}]", - rolloverTarget, - getOrigin(), - templateName - ); - RolloverRequest request = new RolloverRequest(rolloverTarget, null); - request.lazy(true); - request.masterNodeTimeout(TimeValue.MAX_VALUE); - executeAsyncWithOrigin( - client.threadPool().getThreadContext(), - getOrigin(), - request, - groupedActionListener, - (req, listener) -> client.execute(RolloverAction.INSTANCE, req, listener) - ); } + ); + for (String rolloverTarget : rolloverTargets) { + logger.info( + "rolling over data stream [{}] lazily as a followup to the upgrade of the [{}] index template [{}]", + rolloverTarget, + getOrigin(), + templateName + ); + RolloverRequest request = new RolloverRequest(rolloverTarget, null); + request.lazy(true); + request.masterNodeTimeout(TimeValue.MAX_VALUE); + executeAsyncWithOrigin( + client.threadPool().getThreadContext(), + getOrigin(), + request, + groupedActionListener, + (req, listener) -> client.execute(RolloverAction.INSTANCE, req, listener) + ); } }); } @@ -867,7 +873,21 @@ static List findRolloverTargetDataStreams(ClusterState state, String tem .stream() // Limit to checking data streams that match any of the index template's index patterns .filter(ds -> indexTemplate.indexPatterns().stream().anyMatch(pattern -> Regex.simpleMatch(pattern, ds.getName()))) - .filter(ds -> templateName.equals(MetadataIndexTemplateService.findV2Template(metadata, ds.getName(), ds.isHidden()))) + .filter(ds -> { + final String dsTemplateName = MetadataIndexTemplateService.findV2Template(metadata, ds.getName(), ds.isHidden()); + if (templateName.equals(dsTemplateName)) { + return true; + } + // findV2Template did not match templateName, which implies one of two things: + // - indexTemplate has a lower priority than the index template matching for ds, OR + // - indexTemplate does not yet exist in cluster state (i.e. because it's in the process of being + // installed or updated) + // + // Because of the second case, we must check if indexTemplate's priority is greater than the matching + // index template, in case it would take precedence after installation/update. + final ComposableIndexTemplate dsTemplate = metadata.templatesV2().get(dsTemplateName); + return dsTemplate == null || indexTemplate.priorityOrZero() > dsTemplate.priorityOrZero(); + }) .map(DataStream::getName) .collect(Collectors.toList()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/ResourceUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/ResourceUtils.java index f52013ea5aa89..50f144a0e0899 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/ResourceUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/ResourceUtils.java @@ -35,11 +35,12 @@ static byte[] loadVersionedResourceUTF8( } public static String loadResource(Class clazz, String name) throws IOException { - InputStream is = clazz.getResourceAsStream(name); - if (is == null) { - throw new IOException("Resource [" + name + "] not found in classpath."); + try (InputStream is = clazz.getResourceAsStream(name)) { + if (is == null) { + throw new IOException("Resource [" + name + "] not found in classpath."); + } + return new String(is.readAllBytes(), java.nio.charset.StandardCharsets.UTF_8); } - return new String(is.readAllBytes(), java.nio.charset.StandardCharsets.UTF_8); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java index 183251f39a029..c8ddd46c5912f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java @@ -227,7 +227,7 @@ private IngestPipelineConfig loadIngestPipeline(String name, int version, @Nulla } @Override - protected boolean applyRolloverAfterTemplateV2Upgrade() { + protected boolean applyRolloverAfterTemplateV2Update() { return true; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java index ef65f4bca1c35..a1cc978af20e6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java @@ -331,10 +331,8 @@ public static Tuple getMinMaxTra public static TransformConfigVersion getTransformConfigVersionForNode(DiscoveryNode node) { String transformConfigVerStr = node.getAttributes().get(TRANSFORM_CONFIG_VERSION_NODE_ATTR); - if (transformConfigVerStr != null) { - return fromString(transformConfigVerStr); - } - return fromId(node.getPre811VersionId().orElseThrow(() -> new IllegalStateException("getting legacy version id not possible"))); + if (transformConfigVerStr == null) throw new IllegalStateException(TRANSFORM_CONFIG_VERSION_NODE_ATTR + " not present on node"); + return fromString(transformConfigVerStr); } // Parse an TransformConfigVersion from a string. diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java index 34428c303a076..4a3fd101b507d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java @@ -196,15 +196,6 @@ public void testGetMinMaxMlConfigVersionWhenMlConfigVersionAttrIsMissing() { } public void testGetMlConfigVersionForNode() { - DiscoveryNode node = DiscoveryNodeUtils.builder("_node_id4") - .name("_node_name4") - .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9303)) - .roles(ROLES_WITH_ML) - .version(VersionInformation.inferVersions(Version.fromString("8.7.0"))) - .build(); - MlConfigVersion mlConfigVersion = MlConfigVersion.getMlConfigVersionForNode(node); - assertEquals(MlConfigVersion.V_8_7_0, mlConfigVersion); - DiscoveryNode node1 = DiscoveryNodeUtils.builder("_node_id5") .name("_node_name5") .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9304)) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java index 891430057513e..21fadee4e78a8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java @@ -207,7 +207,7 @@ private void validateScheduledEventSuccessfulBuild( String description = randomAlphaOfLength(10); String calendarId = randomAlphaOfLength(10); Instant startTime = Instant.ofEpochMilli(Instant.now().toEpochMilli()); - Instant endTime = startTime.plusSeconds(randomInt(3600)); + Instant endTime = startTime.plusSeconds(randomIntBetween(1, 3600)); ScheduledEvent.Builder builder = new ScheduledEvent.Builder().description(description) .calendarId(calendarId) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java index b0127c0005323..e396712cbc360 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java @@ -435,7 +435,7 @@ public void testAutomaticRollover() throws Exception { assertThat(suppressed[0].getMessage(), startsWith("Failed to rollover logs-my_app-")); } - public void testNoRolloverForFreshInstalledIndexTemplate() throws Exception { + public void testRolloverForFreshInstalledIndexTemplate() throws Exception { DiscoveryNode node = DiscoveryNodeUtils.create("node"); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); @@ -473,9 +473,9 @@ public void testNoRolloverForFreshInstalledIndexTemplate() throws Exception { registry.setApplyRollover(true); registry.clusterChanged(event); assertBusy(() -> assertThat(putIndexTemplateCounter.get(), equalTo(1))); - // the index component is first installed, not upgraded, therefore rollover should not be triggered - Thread.sleep(100L); - assertThat(rolloverCounter.get(), equalTo(0)); + // rollover should be triggered even for the first installation, since the template + // may now take precedence over a data stream's existing index template + assertBusy(() -> assertThat(rolloverCounter.get(), equalTo(2))); } public void testThatTemplatesAreNotUpgradedWhenNotNeeded() throws Exception { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TestRegistryWithCustomPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TestRegistryWithCustomPlugin.java index 349fdfe1259c9..2ef0c7f5301ec 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TestRegistryWithCustomPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TestRegistryWithCustomPlugin.java @@ -118,7 +118,7 @@ public void setPolicyUpgradeRequired(boolean policyUpgradeRequired) { } @Override - protected boolean applyRolloverAfterTemplateV2Upgrade() { + protected boolean applyRolloverAfterTemplateV2Update() { return applyRollover.get(); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/TransformConfigVersionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/TransformConfigVersionTests.java index b42056372b1ab..acb89298ed568 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/TransformConfigVersionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/TransformConfigVersionTests.java @@ -155,15 +155,6 @@ public void testGetMinMaxTransformConfigVersion() { } public void testGetTransformConfigVersionForNode() { - DiscoveryNode node = DiscoveryNodeUtils.builder("_node_id4") - .name("_node_name4") - .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9303)) - .roles(ROLES_WITH_TRANSFORM) - .version(VersionInformation.inferVersions(Version.fromString("8.7.0"))) - .build(); - TransformConfigVersion transformConfigVersion = TransformConfigVersion.getTransformConfigVersionForNode(node); - assertEquals(TransformConfigVersion.V_8_7_0, transformConfigVersion); - DiscoveryNode node1 = DiscoveryNodeUtils.builder("_node_id5") .name("_node_name5") .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9304)) diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index 4329cc65f262f..2f875cc1a3fa9 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -42,7 +42,6 @@ private DeprecationChecks() {} NodeDeprecationChecks::checkDataPathsList, NodeDeprecationChecks::checkSharedDataPathSetting, NodeDeprecationChecks::checkReservedPrefixedRealmNames, - NodeDeprecationChecks::checkSingleDataNodeWatermarkSetting, NodeDeprecationChecks::checkExporterUseIngestPipelineSettings, NodeDeprecationChecks::checkExporterPipelineMasterTimeoutSetting, NodeDeprecationChecks::checkExporterCreateLegacyTemplateSetting, diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java index b265dd5e44710..349762456cd3b 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.allocation.DataTier; -import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -216,28 +215,6 @@ static DeprecationIssue checkReservedPrefixedRealmNames( } } - static DeprecationIssue checkSingleDataNodeWatermarkSetting( - final Settings settings, - final PluginsAndModules pluginsAndModules, - final ClusterState clusterState, - final XPackLicenseState licenseState - ) { - if (DiskThresholdDecider.ENABLE_FOR_SINGLE_DATA_NODE.exists(settings)) { - String key = DiskThresholdDecider.ENABLE_FOR_SINGLE_DATA_NODE.getKey(); - return new DeprecationIssue( - DeprecationIssue.Level.CRITICAL, - String.format(Locale.ROOT, "setting [%s] is deprecated and will not be available in a future version", key), - "https://www.elastic.co/guide/en/elasticsearch/reference/7.14/" - + "breaking-changes-7.14.html#deprecate-single-data-node-watermark", - String.format(Locale.ROOT, "found [%s] configured. Discontinue use of this setting.", key), - false, - null - ); - } - - return null; - } - private static DeprecationIssue deprecatedAffixSetting( Setting.AffixSetting deprecatedAffixSetting, String detailPattern, diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java index 282fa6c8960cd..9d89a3cbe328e 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.allocation.DataTier; -import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Setting; @@ -211,33 +210,6 @@ public void testCheckReservedPrefixedRealmNames() { ); } - public void testSingleDataNodeWatermarkSetting() { - Settings settings = Settings.builder().put(DiskThresholdDecider.ENABLE_FOR_SINGLE_DATA_NODE.getKey(), true).build(); - - List issues = DeprecationChecks.filterChecks( - NODE_SETTINGS_CHECKS, - c -> c.apply(settings, null, ClusterState.EMPTY_STATE, new XPackLicenseState(() -> 0)) - ); - - final String expectedUrl = "https://www.elastic.co/guide/en/elasticsearch/reference/7.14/" - + "breaking-changes-7.14.html#deprecate-single-data-node-watermark"; - assertThat( - issues, - hasItem( - new DeprecationIssue( - DeprecationIssue.Level.CRITICAL, - "setting [cluster.routing.allocation.disk.watermark.enable_for_single_data_node] is deprecated and" - + " will not be available in a future version", - expectedUrl, - "found [cluster.routing.allocation.disk.watermark.enable_for_single_data_node] configured." - + " Discontinue use of this setting.", - false, - null - ) - ) - ); - } - void monitoringSetting(String settingKey, String value) { Settings settings = Settings.builder().put(settingKey, value).build(); List issues = DeprecationChecks.filterChecks( diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml index 81eaa24fd6f5d..8271b17626600 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml @@ -27,6 +27,7 @@ teardown: --- "xpack usage includes Enterprise Search": + - do: xpack.usage: { } @@ -144,7 +145,7 @@ teardown: - 'id1' - 'id2' - rule_id: query-rule-id2 - type: pinned + type: exclude criteria: - type: exact metadata: query_string @@ -193,15 +194,15 @@ teardown: - do: xpack.usage: { } - - match: { - enterprise_search: { - enabled: true, - available: true, - search_applications: { count: 1 }, - analytics_collections: { count: 0 }, - query_rulesets: { total_count: 2, total_rule_count: 5, min_rule_count: 2, max_rule_count: 3, rule_criteria_total_counts: { exact: 5 } } - } - } + - match: { "enterprise_search.enabled": true } + - match: { "enterprise_search.available": true } + - match: { "enterprise_search.search_applications.count": 1 } + - match: { "enterprise_search.analytics_collections.count": 0 } + - match: { "enterprise_search.query_rulesets.total_count": 2 } + - match: { "enterprise_search.query_rulesets.total_rule_count": 5 } + - match: { "enterprise_search.query_rulesets.min_rule_count": 2 } + - match: { "enterprise_search.query_rulesets.max_rule_count": 3 } + - match: { "enterprise_search.query_rulesets.rule_criteria_total_counts.exact": 5 } - do: query_rules.delete_ruleset: @@ -210,14 +211,12 @@ teardown: - do: xpack.usage: { } - - match: { - enterprise_search: { - enabled: true, - available: true, - search_applications: { count: 1 }, - analytics_collections: { count: 0 }, - query_rulesets: { total_count: 1, total_rule_count: 2, min_rule_count: 2, max_rule_count: 2, rule_criteria_total_counts: { exact: 2 } } - } - } - - + - match: { "enterprise_search.enabled": true } + - match: { "enterprise_search.available": true } + - match: { "enterprise_search.search_applications.count": 1 } + - match: { "enterprise_search.analytics_collections.count": 0 } + - match: { "enterprise_search.query_rulesets.total_count": 1 } + - match: { "enterprise_search.query_rulesets.total_rule_count": 2 } + - match: { "enterprise_search.query_rulesets.min_rule_count": 2 } + - match: { "enterprise_search.query_rulesets.max_rule_count": 2 } + - match: { "enterprise_search.query_rulesets.rule_criteria_total_counts.exact": 2 } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java index 366630eadb5fe..b6383fac33299 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; @@ -24,7 +23,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; import org.elasticsearch.xpack.esql.core.querydsl.query.BoolQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.ExistsQuery; -import org.elasticsearch.xpack.esql.core.querydsl.query.MatchQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.MultiMatchQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; @@ -87,18 +85,6 @@ public static Query doTranslate(StringQueryPredicate q, TranslatorHandler handle } } - public static class Matches extends ExpressionTranslator { - - @Override - protected Query asQuery(MatchQueryPredicate q, TranslatorHandler handler) { - return doTranslate(q, handler); - } - - public static Query doTranslate(MatchQueryPredicate q, TranslatorHandler handler) { - return new MatchQuery(q.source(), handler.nameOf(q.field()), q.query(), q); - } - } - public static class MultiMatches extends ExpressionTranslator { @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQuery.java index 3b7948d37cfad..e6b6dc20c951a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQuery.java @@ -12,7 +12,6 @@ import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.Collections; @@ -34,6 +33,7 @@ public class MatchQuery extends Query { entry("analyzer", MatchQueryBuilder::analyzer), entry("auto_generate_synonyms_phrase_query", (qb, s) -> qb.autoGenerateSynonymsPhraseQuery(Booleans.parseBoolean(s))), entry("fuzziness", (qb, s) -> qb.fuzziness(Fuzziness.fromString(s))), + entry("boost", (qb, s) -> qb.boost(Float.parseFloat(s))), entry("fuzzy_transpositions", (qb, s) -> qb.fuzzyTranspositions(Booleans.parseBoolean(s))), entry("fuzzy_rewrite", MatchQueryBuilder::fuzzyRewrite), entry("lenient", (qb, s) -> qb.lenient(Booleans.parseBoolean(s))), @@ -46,19 +46,31 @@ public class MatchQuery extends Query { private final String name; private final Object text; - private final MatchQueryPredicate predicate; + private final Double boost; + private final Fuzziness fuzziness; private final Map options; public MatchQuery(Source source, String name, Object text) { - this(source, name, text, null); + this(source, name, text, Map.of()); } - public MatchQuery(Source source, String name, Object text, MatchQueryPredicate predicate) { + public MatchQuery(Source source, String name, Object text, Map options) { super(source); + assert options != null; this.name = name; this.text = text; - this.predicate = predicate; - this.options = predicate == null ? Collections.emptyMap() : predicate.optionMap(); + this.options = options; + this.boost = null; + this.fuzziness = null; + } + + public MatchQuery(Source source, String name, Object text, Double boost, Fuzziness fuzziness) { + super(source); + this.name = name; + this.text = text; + this.options = Collections.emptyMap(); + this.boost = boost; + this.fuzziness = fuzziness; } @Override @@ -71,6 +83,12 @@ public QueryBuilder asBuilder() { throw new IllegalArgumentException("illegal match option [" + k + "]"); } }); + if (boost != null) { + queryBuilder.boost(boost.floatValue()); + } + if (fuzziness != null) { + queryBuilder.fuzziness(fuzziness); + } return queryBuilder; } @@ -82,13 +100,9 @@ public Object text() { return text; } - MatchQueryPredicate predicate() { - return predicate; - } - @Override public int hashCode() { - return Objects.hash(text, name, predicate); + return Objects.hash(text, name, options, boost, fuzziness); } @Override @@ -98,11 +112,27 @@ public boolean equals(Object obj) { } MatchQuery other = (MatchQuery) obj; - return Objects.equals(text, other.text) && Objects.equals(name, other.name) && Objects.equals(predicate, other.predicate); + return Objects.equals(text, other.text) + && Objects.equals(name, other.name) + && Objects.equals(options, other.options) + && Objects.equals(boost, other.boost) + && Objects.equals(fuzziness, other.fuzziness); } @Override protected String innerToString() { return name + ":" + text; } + + public Double boost() { + return boost; + } + + public Fuzziness fuzziness() { + return fuzziness; + } + + public Map options() { + return options; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 81739536c6572..9708a3ea0db85 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -370,10 +370,7 @@ public static boolean isUnsupported(DataType from) { } public static boolean isString(DataType t) { - if (EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG.isEnabled() && t == SEMANTIC_TEXT) { - return true; - } - return t == KEYWORD || t == TEXT; + return t == KEYWORD || t == TEXT || t == SEMANTIC_TEXT; } public static boolean isPrimitiveAndSupported(DataType t) { diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQueryTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQueryTests.java index 47c471af1051c..4316bd21ffe94 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQueryTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQueryTests.java @@ -37,14 +37,14 @@ public void testEqualsAndHashCode() { } private static MatchQuery copy(MatchQuery query) { - return new MatchQuery(query.source(), query.name(), query.text(), query.predicate()); + return new MatchQuery(query.source(), query.name(), query.text(), query.options()); } private static MatchQuery mutate(MatchQuery query) { List> options = Arrays.asList( - q -> new MatchQuery(SourceTests.mutate(q.source()), q.name(), q.text(), q.predicate()), - q -> new MatchQuery(q.source(), randomValueOtherThan(q.name(), () -> randomAlphaOfLength(5)), q.text(), q.predicate()), - q -> new MatchQuery(q.source(), q.name(), randomValueOtherThan(q.text(), () -> randomAlphaOfLength(5)), q.predicate()) + q -> new MatchQuery(SourceTests.mutate(q.source()), q.name(), q.text(), q.options()), + q -> new MatchQuery(q.source(), randomValueOtherThan(q.name(), () -> randomAlphaOfLength(5)), q.text(), q.options()), + q -> new MatchQuery(q.source(), q.name(), randomValueOtherThan(q.text(), () -> randomAlphaOfLength(5)), q.options()) ); // TODO mutate the predicate return randomFrom(options).apply(query); @@ -69,7 +69,7 @@ private static MatchQueryBuilder getBuilder(String options) { final Source source = new Source(1, 1, StringUtils.EMPTY); FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", KEYWORD, emptyMap(), true)); final MatchQueryPredicate mmqp = new MatchQueryPredicate(source, fa, "eggplant", options); - final MatchQuery mmq = new MatchQuery(source, "eggplant", "foo", mmqp); + final MatchQuery mmq = new MatchQuery(source, "eggplant", "foo", mmqp.optionMap()); return (MatchQueryBuilder) mmq.asBuilder(); } @@ -77,7 +77,7 @@ public void testToString() { final Source source = new Source(1, 1, StringUtils.EMPTY); FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", KEYWORD, emptyMap(), true)); final MatchQueryPredicate mmqp = new MatchQueryPredicate(source, fa, "eggplant", ""); - final MatchQuery mmq = new MatchQuery(source, "eggplant", "foo", mmqp); + final MatchQuery mmq = new MatchQuery(source, "eggplant", "foo", mmqp.optionMap()); assertEquals("MatchQuery@1:2[eggplant:foo]", mmq.toString()); } } diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 150017ce9e955..1cf39f06f77c8 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -1,3 +1,7 @@ +plugins { + id 'idea' +} + import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.internal.util.SourceDirectoryCommandLineArgumentProvider; @@ -52,14 +56,19 @@ dependencies { internalClusterTestImplementation project(":modules:mapper-extras") } +def generatedPath = "src/main/generated" def projectDirectory = project.layout.projectDirectory -def generatedSourceDir = projectDirectory.dir("src/main/generated") +def generatedSourceDir = projectDirectory.dir(generatedPath) tasks.named("compileJava").configure { options.compilerArgumentProviders.add(new SourceDirectoryCommandLineArgumentProvider(generatedSourceDir)) // IntelliJ sticks generated files here and we can't stop it.... exclude { normalize(it.file.toString()).contains("src/main/generated-src/generated") } } +idea.module { + sourceDirs += file(generatedPath) +} + interface Injected { @Inject FileSystemOperations getFs() } @@ -289,34 +298,6 @@ tasks.named('stringTemplates').configure { var doubleProperties = prop("Double", "double", "DOUBLE", "Double.BYTES", "DoubleArray") var bytesRefProperties = prop("BytesRef", "BytesRef", "BYTES_REF", "org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF", "") var booleanProperties = prop("Boolean", "boolean", "BOOLEAN", "Byte.BYTES", "BitArray") - // enrich - File enrichResultBuilderInput = file("src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st") - template { - it.properties = intProperties - it.inputFile = enrichResultBuilderInput - it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java" - } - template { - it.properties = longProperties - it.inputFile = enrichResultBuilderInput - it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java" - } - template { - it.properties = doubleProperties - it.inputFile = enrichResultBuilderInput - it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java" - } - template { - it.properties = bytesRefProperties - it.inputFile = enrichResultBuilderInput - it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java" - } - template { - it.properties = booleanProperties - it.inputFile = enrichResultBuilderInput - it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java" - } - File inInputFile = file("src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/X-InEvaluator.java.st") template { diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 49e819b7cdc88..3deac4925c951 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -803,4 +803,32 @@ tasks.named('stringTemplates').configure { it.inputFile = bucketedSortInputFile it.outputFile = "org/elasticsearch/compute/data/sort/DoubleBucketedSort.java" } + + File enrichResultBuilderInput = file("src/main/java/org/elasticsearch/compute/operator/lookup/X-EnrichResultBuilder.java.st") + template { + it.properties = intProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForInt.java" + } + template { + it.properties = longProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForLong.java" + } + template { + it.properties = doubleProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForDouble.java" + } + template { + it.properties = bytesRefProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForBytesRef.java" + } + template { + it.properties = booleanProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForBoolean.java" + } + } diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForBoolean.java similarity index 99% rename from x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForBoolean.java index 7978baf0c5f29..0693d7040b4b1 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForBoolean.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.util.ObjectArray; diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForBytesRef.java similarity index 99% rename from x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForBytesRef.java index 28326568af63b..6c3478dfd9a38 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForBytesRef.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForDouble.java similarity index 99% rename from x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForDouble.java index e15b8f7d6d4b8..615de8cd2da9a 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForDouble.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.util.ObjectArray; diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForInt.java similarity index 99% rename from x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForInt.java index 223a8eb88f0b1..87ad680bf914d 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForInt.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.util.ObjectArray; diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForLong.java similarity index 99% rename from x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForLong.java index 674b2e01c5703..46fae69d124d7 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForLong.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.util.ObjectArray; diff --git a/x-pack/plugin/esql/compute/src/main/java/module-info.java b/x-pack/plugin/esql/compute/src/main/java/module-info.java index 1739c90467c2c..573d9e048a4d4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/module-info.java +++ b/x-pack/plugin/esql/compute/src/main/java/module-info.java @@ -29,6 +29,7 @@ exports org.elasticsearch.compute.operator.exchange; exports org.elasticsearch.compute.aggregation.blockhash; exports org.elasticsearch.compute.aggregation.spatial; + exports org.elasticsearch.compute.operator.lookup; exports org.elasticsearch.compute.operator.topn; exports org.elasticsearch.compute.operator.mvdedupe; exports org.elasticsearch.compute.aggregation.table; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java similarity index 95% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java rename to x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java index f70cfe1dc8a41..2093094fb8af5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; @@ -32,8 +32,7 @@ * This operator will emit Pages consisting of a {@link DocVector} and {@link IntBlock} of positions for each query of the input queries. * The position block will be used as keys to combine the extracted values by {@link MergePositionsOperator}. */ -final class EnrichQuerySourceOperator extends SourceOperator { - +public final class EnrichQuerySourceOperator extends SourceOperator { private final BlockFactory blockFactory; private final QueryList queryList; private int queryPosition = -1; @@ -42,9 +41,9 @@ final class EnrichQuerySourceOperator extends SourceOperator { private final int maxPageSize; // using smaller pages enables quick cancellation and reduces sorting costs - static final int DEFAULT_MAX_PAGE_SIZE = 256; + public static final int DEFAULT_MAX_PAGE_SIZE = 256; - EnrichQuerySourceOperator(BlockFactory blockFactory, int maxPageSize, QueryList queryList, IndexReader indexReader) { + public EnrichQuerySourceOperator(BlockFactory blockFactory, int maxPageSize, QueryList queryList, IndexReader indexReader) { this.blockFactory = blockFactory; this.maxPageSize = maxPageSize; this.queryList = queryList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichResultBuilder.java similarity index 98% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java rename to x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichResultBuilder.java index 062abb1917d84..16275a010d793 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichResultBuilder.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/MergePositionsOperator.java similarity index 96% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java rename to x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/MergePositionsOperator.java index 3e1f46100c4f3..d42655446ca10 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/MergePositionsOperator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -43,7 +43,7 @@ * | null | null | * | d | 2023 | */ -final class MergePositionsOperator implements Operator { +public final class MergePositionsOperator implements Operator { private boolean finished = false; private final int positionChannel; private final EnrichResultBuilder[] builders; @@ -51,7 +51,7 @@ final class MergePositionsOperator implements Operator { private Page outputPage; - MergePositionsOperator( + public MergePositionsOperator( int positionChannel, int[] mergingChannels, ElementType[] mergingTypes, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java similarity index 56% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java rename to x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java index c86f01b045dad..5428863436535 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java @@ -5,9 +5,10 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.geo.ShapeRelation; @@ -15,28 +16,24 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.FloatBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.core.Nullable; import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.index.mapper.GeoShapeQueryable; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; import java.util.ArrayList; import java.util.List; import java.util.function.IntFunction; -import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataType.IP; - /** * Generates a list of Lucene queries based on the input block. */ @@ -61,44 +58,99 @@ int getPositionCount() { abstract Query getQuery(int position); /** - * Returns a list of term queries for the given field and the input block. + * Returns a list of term queries for the given field and the input block + * using only the {@link ElementType} of the {@link Block} to determine the + * query. + */ + public static QueryList rawTermQueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, Block block) { + IntFunction blockToJavaObject = switch (block.elementType()) { + case BOOLEAN -> { + BooleanBlock booleanBlock = (BooleanBlock) block; + yield booleanBlock::getBoolean; + } + case BYTES_REF -> offset -> { + BytesRefBlock bytesRefBlock = (BytesRefBlock) block; + return bytesRefBlock.getBytesRef(offset, new BytesRef()); + }; + case DOUBLE -> { + DoubleBlock doubleBlock = ((DoubleBlock) block); + yield doubleBlock::getDouble; + } + case FLOAT -> { + FloatBlock floatBlock = ((FloatBlock) block); + yield floatBlock::getFloat; + } + case LONG -> { + LongBlock intBlock = (LongBlock) block; + yield intBlock::getLong; + } + case INT -> { + IntBlock intBlock = (IntBlock) block; + yield intBlock::getInt; + } + case NULL -> offset -> null; + case DOC -> throw new IllegalArgumentException("can't read values from [doc] block"); + case COMPOSITE -> throw new IllegalArgumentException("can't read values from [composite] block"); + case UNKNOWN -> throw new IllegalArgumentException("can't read values from [" + block + "]"); + }; + return new TermQueryList(field, searchExecutionContext, block, blockToJavaObject); + } + + /** + * Returns a list of term queries for the given field and the input block of + * {@code ip} field values. + */ + public static QueryList ipTermQueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + byte[] ipBytes = new byte[InetAddressPoint.BYTES]; + return new TermQueryList(field, searchExecutionContext, block, offset -> { + final var bytes = block.getBytesRef(offset, scratch); + if (ipBytes.length != bytes.length) { + // Lucene only support 16-byte IP addresses, even IPv4 is encoded in 16 bytes + throw new IllegalStateException("Cannot decode IP field from bytes of length " + bytes.length); + } + System.arraycopy(bytes.bytes, bytes.offset, ipBytes, 0, bytes.length); + return InetAddressPoint.decode(ipBytes); + }); + } + + /** + * Returns a list of term queries for the given field and the input block of + * {@code date} field values. */ - static QueryList termQueryList( - MappedFieldType field, - SearchExecutionContext searchExecutionContext, - Block block, - DataType inputDataType - ) { - return new TermQueryList(field, searchExecutionContext, block, inputDataType); + public static QueryList dateTermQueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, LongBlock block) { + return new TermQueryList( + field, + searchExecutionContext, + block, + field instanceof RangeFieldMapper.RangeFieldType rangeFieldType + ? offset -> rangeFieldType.dateTimeFormatter().formatMillis(block.getLong(offset)) + : block::getLong + ); } /** * Returns a list of geo_shape queries for the given field and the input block. */ - static QueryList geoShapeQuery( - MappedFieldType field, - SearchExecutionContext searchExecutionContext, - Block block, - DataType inputDataType - ) { - return new GeoShapeQueryList(field, searchExecutionContext, block, inputDataType); + public static QueryList geoShapeQueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, Block block) { + return new GeoShapeQueryList(field, searchExecutionContext, block); } private static class TermQueryList extends QueryList { - private final BytesRef scratch = new BytesRef(); - private final byte[] ipBytes = new byte[InetAddressPoint.BYTES]; private final MappedFieldType field; private final SearchExecutionContext searchExecutionContext; - private final DataType inputDataType; private final IntFunction blockValueReader; - private TermQueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, Block block, DataType inputDataType) { + private TermQueryList( + MappedFieldType field, + SearchExecutionContext searchExecutionContext, + Block block, + IntFunction blockValueReader + ) { super(block); - this.field = field; this.searchExecutionContext = searchExecutionContext; - this.inputDataType = inputDataType; - this.blockValueReader = blockToJavaObject(); + this.blockValueReader = blockValueReader; } @Override @@ -118,53 +170,6 @@ Query getQuery(int position) { } }; } - - private IntFunction blockToJavaObject() { - return switch (block.elementType()) { - case BOOLEAN -> { - BooleanBlock booleanBlock = (BooleanBlock) block; - yield booleanBlock::getBoolean; - } - case BYTES_REF -> { - BytesRefBlock bytesRefBlock = (BytesRefBlock) block; - if (inputDataType == IP) { - yield offset -> { - final var bytes = bytesRefBlock.getBytesRef(offset, scratch); - if (ipBytes.length != bytes.length) { - // Lucene only support 16-byte IP addresses, even IPv4 is encoded in 16 bytes - throw new IllegalStateException("Cannot decode IP field from bytes of length " + bytes.length); - } - System.arraycopy(bytes.bytes, bytes.offset, ipBytes, 0, bytes.length); - return InetAddressPoint.decode(ipBytes); - }; - } - yield offset -> bytesRefBlock.getBytesRef(offset, new BytesRef()); - } - case DOUBLE -> { - DoubleBlock doubleBlock = ((DoubleBlock) block); - yield doubleBlock::getDouble; - } - case FLOAT -> { - FloatBlock floatBlock = ((FloatBlock) block); - yield floatBlock::getFloat; - } - case INT -> { - IntBlock intBlock = (IntBlock) block; - yield intBlock::getInt; - } - case LONG -> { - LongBlock longBlock = (LongBlock) block; - if (inputDataType == DATETIME && field instanceof RangeFieldMapper.RangeFieldType rangeFieldType) { - yield offset -> rangeFieldType.dateTimeFormatter().formatMillis(longBlock.getLong(offset)); - } - yield longBlock::getLong; - } - case NULL -> offset -> null; - case DOC -> throw new EsqlIllegalArgumentException("can't read values from [doc] block"); - case COMPOSITE -> throw new EsqlIllegalArgumentException("can't read values from [composite] block"); - case UNKNOWN -> throw new EsqlIllegalArgumentException("can't read values from [" + block + "]"); - }; - } } private static class GeoShapeQueryList extends QueryList { @@ -172,20 +177,13 @@ private static class GeoShapeQueryList extends QueryList { private final MappedFieldType field; private final SearchExecutionContext searchExecutionContext; private final IntFunction blockValueReader; - private final DataType inputDataType; // Currently unused, but might be needed for when input is read as doc-values private final IntFunction shapeQuery; - private GeoShapeQueryList( - MappedFieldType field, - SearchExecutionContext searchExecutionContext, - Block block, - DataType inputDataType - ) { + private GeoShapeQueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, Block block) { super(block); this.field = field; this.searchExecutionContext = searchExecutionContext; - this.inputDataType = inputDataType; this.blockValueReader = blockToGeometry(block); this.shapeQuery = shapeQuery(); } @@ -198,7 +196,7 @@ Query getQuery(int position) { case 0 -> null; case 1 -> shapeQuery.apply(first); // TODO: support multiple values - default -> throw new EsqlIllegalArgumentException("can't read multiple Geometry values from a single position"); + default -> throw new IllegalArgumentException("can't read multiple Geometry values from a single position"); }; } @@ -206,14 +204,17 @@ private IntFunction blockToGeometry(Block block) { return switch (block.elementType()) { case LONG -> offset -> { var encoded = ((LongBlock) block).getLong(offset); - return SpatialCoordinateTypes.GEO.longAsPoint(encoded); + return new Point( + GeoEncodingUtils.decodeLongitude((int) encoded), + GeoEncodingUtils.decodeLatitude((int) (encoded >>> 32)) + ); }; case BYTES_REF -> offset -> { var wkb = ((BytesRefBlock) block).getBytesRef(offset, scratch); return WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); }; case NULL -> offset -> null; - default -> throw new EsqlIllegalArgumentException("can't read Geometry values from [" + block.elementType() + "] block"); + default -> throw new IllegalArgumentException("can't read Geometry values from [" + block.elementType() + "] block"); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/X-EnrichResultBuilder.java.st similarity index 99% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st rename to x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/X-EnrichResultBuilder.java.st index 7066b8b8f12a5..c0426741d2ea2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/X-EnrichResultBuilder.java.st @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; $if(BytesRef)$ import org.apache.lucene.util.BytesRef; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index f1fda67c36dda..4bf9f3942c0a8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -53,7 +53,18 @@ public void close() {} @Override protected Operator.OperatorFactory simple() { - return new FilterOperator.FilterOperatorFactory(dvrCtx -> new SameLastDigit(dvrCtx, 0, 1)); + return new FilterOperator.FilterOperatorFactory(new EvalOperator.ExpressionEvaluator.Factory() { + + @Override + public EvalOperator.ExpressionEvaluator get(DriverContext context) { + return new SameLastDigit(context, 0, 1); + } + + @Override + public String toString() { + return "SameLastDigit[lhs=0, rhs=1]"; + } + }); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java similarity index 96% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java index 04da5d406fbb9..6daace76dd8b8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -46,7 +46,6 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.Mockito.mock; @@ -105,7 +104,7 @@ public void testQueries() throws Exception { inputTerms = termBuilder.build(); } MappedFieldType uidField = new KeywordFieldMapper.KeywordFieldType("uid"); - QueryList queryList = QueryList.termQueryList(uidField, mock(SearchExecutionContext.class), inputTerms, KEYWORD); + QueryList queryList = QueryList.rawTermQueryList(uidField, mock(SearchExecutionContext.class), inputTerms); assertThat(queryList.getPositionCount(), equalTo(6)); assertThat(queryList.getQuery(0), equalTo(new TermQuery(new Term("uid", new BytesRef("b2"))))); assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", List.of(new BytesRef("c1"), new BytesRef("a2"))))); @@ -186,7 +185,7 @@ public void testRandomMatchQueries() throws Exception { inputTerms = builder.build(); } MappedFieldType uidField = new KeywordFieldMapper.KeywordFieldType("uid"); - var queryList = QueryList.termQueryList(uidField, mock(SearchExecutionContext.class), inputTerms, KEYWORD); + var queryList = QueryList.rawTermQueryList(uidField, mock(SearchExecutionContext.class), inputTerms); int maxPageSize = between(1, 256); EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, maxPageSize, queryList, reader); Map> actualPositions = new HashMap<>(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichResultBuilderTests.java similarity index 99% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichResultBuilderTests.java index 24ca02a9d2e07..a9c59fd77b7e8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichResultBuilderTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; @@ -29,7 +29,6 @@ import static org.hamcrest.Matchers.equalTo; public class EnrichResultBuilderTests extends ESTestCase { - public void testBytesRef() { BlockFactory blockFactory = blockFactory(); Map> inputValues = new HashMap<>(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/MergePositionsOperatorTests.java similarity index 99% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/MergePositionsOperatorTests.java index df49fff5191bb..da18823c67afd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/MergePositionsOperatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.enrich; +package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 0e23b29172c32..801e1d12b1d4a 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -91,4 +91,15 @@ protected boolean enableRoundingDoubleValuesOnAsserting() { protected boolean supportsInferenceTestService() { return false; } + + @Override + protected boolean deduplicateExactWarnings() { + /* + * In ESQL's main tests we shouldn't have to deduplicate but in + * serverless, where we reuse this test case exactly with *slightly* + * different configuration, we must deduplicate. So we do it here. + * It's a bit of a loss of precision, but that's ok. + */ + return true; + } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle index 676729573b69d..aa19371685ce1 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle @@ -18,8 +18,8 @@ dependencies { } def supportedVersion = bwcVersion -> { - // ESQL requires its own resolve_fields API - return bwcVersion.onOrAfter(Version.fromString("8.16.0")); + // CCS in ES|QL available in 8.13 + return bwcVersion.onOrAfter(Version.fromString("8.13.0")); } BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 3e77bee79dd10..62391c8ca001a 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -38,7 +38,6 @@ import java.util.List; import java.util.Locale; import java.util.Optional; -import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -104,7 +103,6 @@ public MultiClusterSpecIT( protected void shouldSkipTest(String testName) throws IOException { super.shouldSkipTest(testName); checkCapabilities(remoteClusterClient(), remoteFeaturesService(), testName, testCase); - assumeTrue("CCS requires its own resolve_fields API", remoteFeaturesService().clusterHasFeature("esql.resolve_fields_api")); assumeFalse("can't test with _index metadata", hasIndexMetadata(testCase.query)); assumeTrue( "Test " + testName + " is skipped on " + Clusters.oldVersion(), @@ -231,20 +229,13 @@ static CsvSpecReader.CsvTestCase convertToRemoteIndices(CsvSpecReader.CsvTestCas } int offset = testCase.query.length() - query.length(); if (offset != 0) { - final String pattern = "Line (\\d+):(\\d+):"; + final String pattern = "\\b1:(\\d+)\\b"; final Pattern regex = Pattern.compile(pattern); - testCase.adjustExpectedWarnings(warning -> { - Matcher matcher = regex.matcher(warning); - if (matcher.find()) { - int line = Integer.parseInt(matcher.group(1)); - if (line == 1) { - int position = Integer.parseInt(matcher.group(2)); - int newPosition = position + offset; - return warning.replaceFirst(pattern, "Line " + line + ":" + newPosition + ":"); - } - } - return warning; - }); + testCase.adjustExpectedWarnings(warning -> regex.matcher(warning).replaceAll(match -> { + int position = Integer.parseInt(match.group(1)); + int newPosition = position + offset; + return "1:" + newPosition; + })); } return testCase; } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index 1f72827057c5b..dbeaed1596eff 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -67,7 +67,6 @@ record Doc(int id, String color, long data) { @Before public void setUpIndices() throws Exception { - assumeTrue("CCS requires its own resolve_fields API", remoteFeaturesService().clusterHasFeature("esql.resolve_fields_api")); final String mapping = """ "properties": { "data": { "type": "long" }, diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 57f58fc448822..6ebf05755ef5e 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.esql.AssertWarnings; import org.elasticsearch.xpack.esql.CsvSpecReader.CsvTestCase; import org.elasticsearch.xpack.esql.CsvTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils; @@ -47,7 +48,6 @@ import java.util.Locale; import java.util.Map; import java.util.TreeMap; -import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.LongStream; @@ -230,7 +230,7 @@ protected final void doTest() throws Throwable { builder.tables(tables()); } - Map answer = runEsql(builder.query(testCase.query), testCase.expectedWarnings(), testCase.expectedWarningsRegex()); + Map answer = runEsql(builder.query(testCase.query), testCase.assertWarnings(deduplicateExactWarnings())); var expectedColumnsWithValues = loadCsvSpecValues(testCase.expectedResults); @@ -248,16 +248,30 @@ protected final void doTest() throws Throwable { assertResults(expectedColumnsWithValues, actualColumns, actualValues, testCase.ignoreOrder, logger); } - private Map runEsql( - RequestObjectBuilder requestObject, - List expectedWarnings, - List expectedWarningsRegex - ) throws IOException { + /** + * Should warnings be de-duplicated before checking for exact matches. Defaults + * to {@code false}, but in some environments we emit duplicate warnings. We'd prefer + * not to emit duplicate warnings but for now it isn't worth fighting with. So! In + * those environments we override this to deduplicate. + *

+ * Note: This only applies to warnings declared as {@code warning:}. Those + * declared as {@code warningRegex:} are always a list of + * allowed warnings. {@code warningRegex:} matches 0 or more + * warnings. There is no need to deduplicate because there's no expectation + * of an exact match. + *

+ * + */ + protected boolean deduplicateExactWarnings() { + return false; + } + + private Map runEsql(RequestObjectBuilder requestObject, AssertWarnings assertWarnings) throws IOException { if (mode == Mode.ASYNC) { assert supportsAsync(); - return RestEsqlTestCase.runEsqlAsync(requestObject, expectedWarnings, expectedWarningsRegex); + return RestEsqlTestCase.runEsqlAsync(requestObject, assertWarnings); } else { - return RestEsqlTestCase.runEsqlSync(requestObject, expectedWarnings, expectedWarningsRegex); + return RestEsqlTestCase.runEsqlSync(requestObject, assertWarnings); } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index ef1e77280d0ee..505ab3adc553b 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -31,6 +31,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.esql.AssertWarnings; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.junit.After; @@ -53,7 +54,6 @@ import java.util.Map; import java.util.Set; import java.util.function.IntFunction; -import java.util.regex.Pattern; import static java.util.Collections.emptySet; import static java.util.Map.entry; @@ -83,9 +83,6 @@ public abstract class RestEsqlTestCase extends ESRestTestCase { private static final Logger LOGGER = LogManager.getLogger(RestEsqlTestCase.class); - private static final List NO_WARNINGS = List.of(); - private static final List NO_WARNINGS_REGEX = List.of(); - private static final String MAPPING_ALL_TYPES; static { @@ -379,7 +376,7 @@ public void testCSVNoHeaderMode() throws IOException { options.addHeader("Content-Type", mediaType); options.addHeader("Accept", "text/csv; header=absent"); request.setOptions(options); - HttpEntity entity = performRequest(request, NO_WARNINGS, NO_WARNINGS_REGEX); + HttpEntity entity = performRequest(request, new AssertWarnings.NoWarnings()); String actual = Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); assertEquals("keyword0,0\r\n", actual); } @@ -430,11 +427,13 @@ public void testOutOfRangeComparisons() throws IOException { for (String truePredicate : trueForSingleValuesPredicates) { String comparison = fieldWithType + truePredicate; var query = requestObjectBuilder().query(format(null, "from {} | where {}", testIndexName(), comparison)); - List expectedWarnings = List.of( - "Line 1:29: evaluation of [" + comparison + "] failed, treating result as null. Only first 20 failures recorded.", - "Line 1:29: java.lang.IllegalArgumentException: single-value function encountered multi-value" + AssertWarnings assertWarnings = new AssertWarnings.ExactStrings( + List.of( + "Line 1:29: evaluation of [" + comparison + "] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:29: java.lang.IllegalArgumentException: single-value function encountered multi-value" + ) ); - var result = runEsql(query, expectedWarnings, NO_WARNINGS_REGEX, mode); + var result = runEsql(query, assertWarnings, mode); var values = as(result.get("values"), ArrayList.class); assertThat( @@ -504,7 +503,7 @@ public void testInternalRange() throws IOException { for (String p : predicates) { var query = requestObjectBuilder().query(format(null, "from {} | where {}", testIndexName(), p)); - var result = runEsql(query, List.of(), NO_WARNINGS_REGEX, mode); + var result = runEsql(query, new AssertWarnings.NoWarnings(), mode); var values = as(result.get("values"), ArrayList.class); assertThat( format(null, "Comparison [{}] should return all rows with single values.", p), @@ -996,35 +995,26 @@ private static String expectedTextBody(String format, int count, @Nullable Chara } public Map runEsql(RequestObjectBuilder requestObject) throws IOException { - return runEsql(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX, mode); + return runEsql(requestObject, new AssertWarnings.NoWarnings(), mode); } public static Map runEsqlSync(RequestObjectBuilder requestObject) throws IOException { - return runEsqlSync(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX); + return runEsqlSync(requestObject, new AssertWarnings.NoWarnings()); } public static Map runEsqlAsync(RequestObjectBuilder requestObject) throws IOException { - return runEsqlAsync(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX); + return runEsqlAsync(requestObject, new AssertWarnings.NoWarnings()); } - static Map runEsql( - RequestObjectBuilder requestObject, - List expectedWarnings, - List expectedWarningsRegex, - Mode mode - ) throws IOException { + static Map runEsql(RequestObjectBuilder requestObject, AssertWarnings assertWarnings, Mode mode) throws IOException { if (mode == ASYNC) { - return runEsqlAsync(requestObject, expectedWarnings, expectedWarningsRegex); + return runEsqlAsync(requestObject, assertWarnings); } else { - return runEsqlSync(requestObject, expectedWarnings, expectedWarningsRegex); + return runEsqlSync(requestObject, assertWarnings); } } - public static Map runEsqlSync( - RequestObjectBuilder requestObject, - List expectedWarnings, - List expectedWarningsRegex - ) throws IOException { + public static Map runEsqlSync(RequestObjectBuilder requestObject, AssertWarnings assertWarnings) throws IOException { requestObject.build(); Request request = prepareRequest(SYNC); String mediaType = attachBody(requestObject, request); @@ -1040,15 +1030,11 @@ public static Map runEsqlSync( } request.setOptions(options); - HttpEntity entity = performRequest(request, expectedWarnings, expectedWarningsRegex); + HttpEntity entity = performRequest(request, assertWarnings); return entityToMap(entity, requestObject.contentType()); } - public static Map runEsqlAsync( - RequestObjectBuilder requestObject, - List expectedWarnings, - List expectedWarningsRegex - ) throws IOException { + public static Map runEsqlAsync(RequestObjectBuilder requestObject, AssertWarnings assertWarnings) throws IOException { addAsyncParameters(requestObject); requestObject.build(); Request request = prepareRequest(ASYNC); @@ -1088,7 +1074,7 @@ public static Map runEsqlAsync( assertThat(response.getHeader("X-Elasticsearch-Async-Id"), nullValue()); assertThat(response.getHeader("X-Elasticsearch-Async-Is-Running"), is("?0")); } - assertWarnings(response, expectedWarnings, expectedWarningsRegex); + assertWarnings(response, assertWarnings); json.remove("is_running"); // remove this to not mess up later map assertions return Collections.unmodifiableMap(json); } else { @@ -1098,7 +1084,7 @@ public static Map runEsqlAsync( if (isRunning == false) { // must have completed immediately so keep_on_completion must be true assertThat(requestObject.keepOnCompletion(), is(true)); - assertWarnings(response, expectedWarnings, expectedWarningsRegex); + assertWarnings(response, assertWarnings); // we already have the results, but let's remember them so that we can compare to async get initialColumns = json.get("columns"); initialValues = json.get("values"); @@ -1128,7 +1114,7 @@ public static Map runEsqlAsync( assertEquals(initialValues, result.get("values")); } - assertWarnings(response, expectedWarnings, expectedWarningsRegex); + assertWarnings(response, assertWarnings); assertDeletable(id); return removeAsyncProperties(result); } @@ -1202,7 +1188,7 @@ static String runEsqlAsTextWithFormat(RequestObjectBuilder builder, String forma } request.setOptions(options); - HttpEntity entity = performRequest(request, NO_WARNINGS, NO_WARNINGS_REGEX); + HttpEntity entity = performRequest(request, new AssertWarnings.NoWarnings()); return Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); } @@ -1235,9 +1221,8 @@ private static String attachBody(RequestObjectBuilder requestObject, Request req return mediaType; } - private static HttpEntity performRequest(Request request, List allowedWarnings, List allowedWarningsRegex) - throws IOException { - return assertWarnings(performRequest(request), allowedWarnings, allowedWarningsRegex); + private static HttpEntity performRequest(Request request, AssertWarnings assertWarnings) throws IOException { + return assertWarnings(performRequest(request), assertWarnings); } private static Response performRequest(Request request) throws IOException { @@ -1250,13 +1235,13 @@ private static Response performRequest(Request request) throws IOException { return response; } - private static HttpEntity assertWarnings(Response response, List allowedWarnings, List allowedWarningsRegex) { + private static HttpEntity assertWarnings(Response response, AssertWarnings assertWarnings) { List warnings = new ArrayList<>(response.getWarnings()); warnings.removeAll(mutedWarnings()); if (shouldLog()) { LOGGER.info("RESPONSE warnings (after muted)={}", warnings); } - EsqlTestUtils.assertWarnings(warnings, allowedWarnings, allowedWarningsRegex); + assertWarnings.assertWarnings(warnings); return response.getEntity(); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/AssertWarnings.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/AssertWarnings.java new file mode 100644 index 0000000000000..f606d36ee6b6c --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/AssertWarnings.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql; + +import java.util.List; +import java.util.regex.Pattern; + +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.junit.Assert.assertTrue; + +/** + * How should we assert the warnings returned by ESQL. + */ +public interface AssertWarnings { + void assertWarnings(List warnings); + + record NoWarnings() implements AssertWarnings { + @Override + public void assertWarnings(List warnings) { + assertMap(warnings.stream().sorted().toList(), matchesList()); + } + } + + record ExactStrings(List expected) implements AssertWarnings { + @Override + public void assertWarnings(List warnings) { + assertMap(warnings.stream().sorted().toList(), matchesList(expected.stream().sorted().toList())); + } + } + + record DeduplicatedStrings(List expected) implements AssertWarnings { + @Override + public void assertWarnings(List warnings) { + assertMap(warnings.stream().sorted().distinct().toList(), matchesList(expected.stream().sorted().toList())); + } + } + + record AllowedRegexes(List expected) implements AssertWarnings { + @Override + public void assertWarnings(List warnings) { + for (String warning : warnings) { + assertTrue("Unexpected warning: " + warning, expected.stream().anyMatch(x -> x.matcher(warning).matches())); + } + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvSpecReader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvSpecReader.java index 781ae5531c6f0..84e06e0c1b674 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvSpecReader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvSpecReader.java @@ -142,6 +142,26 @@ public void adjustExpectedWarnings(Function updater) { public List expectedWarningsRegex() { return expectedWarningsRegex; } + + /** + * How should we assert the warnings returned by ESQL. + * @param deduplicateExact Should tests configured with {@code warnings:} deduplicate + * the warnings before asserting? Normally don't do it because + * duplicate warnings are lame. We'd like to fix them all. But + * in multi-node and multi-shard tests we can emit duplicate + * warnings and it isn't worth fixing them now. + */ + public AssertWarnings assertWarnings(boolean deduplicateExact) { + if (expectedWarnings.isEmpty() == false) { + return deduplicateExact + ? new AssertWarnings.DeduplicatedStrings(expectedWarnings) + : new AssertWarnings.ExactStrings(expectedWarnings); + } + if (expectedWarningsRegex.isEmpty() == false) { + return new AssertWarnings.AllowedRegexes(expectedWarningsRegex); + } + return new AssertWarnings.NoWarnings(); + } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index e755ddb4d0d10..bc465e7e9b64c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -97,7 +97,6 @@ import java.util.Set; import java.util.TreeMap; import java.util.jar.JarInputStream; -import java.util.regex.Pattern; import java.util.zip.ZipEntry; import static java.util.Collections.emptyList; @@ -118,8 +117,6 @@ import static org.elasticsearch.test.ESTestCase.randomMillisUpToYear9999; import static org.elasticsearch.test.ESTestCase.randomShort; import static org.elasticsearch.test.ESTestCase.randomZone; -import static org.elasticsearch.test.ListMatcher.matchesList; -import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; @@ -129,7 +126,6 @@ import static org.elasticsearch.xpack.esql.parser.ParserUtils.ParamClassification.PATTERN; import static org.elasticsearch.xpack.esql.parser.ParserUtils.ParamClassification.VALUE; import static org.hamcrest.Matchers.instanceOf; -import static org.junit.Assert.assertTrue; public final class EsqlTestUtils { @@ -356,6 +352,16 @@ public static List> getValuesList(Iterator> values return valuesList; } + public static List> getValuesList(Iterable> values) { + var valuesList = new ArrayList>(); + values.iterator().forEachRemaining(row -> { + var rowValues = new ArrayList<>(); + row.iterator().forEachRemaining(rowValues::add); + valuesList.add(rowValues); + }); + return valuesList; + } + public static List withDefaultLimitWarning(List warnings) { List result = warnings == null ? new ArrayList<>() : new ArrayList<>(warnings); result.add("No limit defined, adding default limit of [1000]"); @@ -407,16 +413,6 @@ public static String randomEnrichCommand(String name, Enrich.Mode mode, String m return String.join(" | ", all); } - public static void assertWarnings(List warnings, List allowedWarnings, List allowedWarningsRegex) { - if (allowedWarningsRegex.isEmpty()) { - assertMap(warnings.stream().sorted().toList(), matchesList(allowedWarnings.stream().sorted().toList())); - } else { - for (String warning : warnings) { - assertTrue("Unexpected warning: " + warning, allowedWarningsRegex.stream().anyMatch(x -> x.matcher(warning).matches())); - } - } - } - /** * "tables" provided in the context for the LOOKUP command. If you * add to this, you must also add to {@code EsqlSpecTestCase#tables}; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index d0edc1f07d021..2ee23382515da 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -202,6 +202,83 @@ d:date_nanos null ; +date nanos greater than +required_capability: to_date_nanos +required_capability: date_nanos_binary_comparison + +FROM date_nanos | WHERE MV_MIN(nanos) > TO_DATE_NANOS("2023-10-23T12:27:28.948000000Z") | SORT nanos DESC; + +millis:date | nanos:date_nanos | num:long +2023-10-23T13:55:01.543Z | 2023-10-23T13:55:01.543123456Z | 1698069301543123456 +2023-10-23T13:53:55.832Z | 2023-10-23T13:53:55.832987654Z | 1698069235832987654 +2023-10-23T13:52:55.015Z | 2023-10-23T13:52:55.015787878Z | 1698069175015787878 +2023-10-23T13:51:54.732Z | 2023-10-23T13:51:54.732102837Z | 1698069114732102837 +2023-10-23T13:33:34.937Z | 2023-10-23T13:33:34.937193000Z | 1698068014937193000 +; + +date nanos greater than or equal +required_capability: to_date_nanos +required_capability: date_nanos_binary_comparison + +FROM date_nanos | WHERE MV_MIN(nanos) >= TO_DATE_NANOS("2023-10-23T12:27:28.948000000Z") | SORT nanos DESC; + +millis:date | nanos:date_nanos | num:long +2023-10-23T13:55:01.543Z | 2023-10-23T13:55:01.543123456Z | 1698069301543123456 +2023-10-23T13:53:55.832Z | 2023-10-23T13:53:55.832987654Z | 1698069235832987654 +2023-10-23T13:52:55.015Z | 2023-10-23T13:52:55.015787878Z | 1698069175015787878 +2023-10-23T13:51:54.732Z | 2023-10-23T13:51:54.732102837Z | 1698069114732102837 +2023-10-23T13:33:34.937Z | 2023-10-23T13:33:34.937193000Z | 1698068014937193000 +2023-10-23T12:27:28.948Z | 2023-10-23T12:27:28.948000000Z | 1698064048948000000 +; + +date nanos less than +required_capability: to_date_nanos +required_capability: date_nanos_binary_comparison + +FROM date_nanos | WHERE MV_MIN(nanos) < TO_DATE_NANOS("2023-10-23T12:27:28.948000000Z") AND millis > "2000-01-01" | SORT nanos DESC; + +millis:date | nanos:date_nanos | num:long +2023-10-23T12:15:03.360Z | 2023-10-23T12:15:03.360103847Z | 1698063303360103847 +2023-10-23T12:15:03.360Z | 2023-10-23T12:15:03.360103847Z | 1698063303360103847 +; + +date nanos less than equal +required_capability: to_date_nanos +required_capability: date_nanos_binary_comparison + +FROM date_nanos | WHERE MV_MIN(nanos) <= TO_DATE_NANOS("2023-10-23T12:27:28.948000000Z") AND millis > "2000-01-01" | SORT nanos DESC; + +millis:date | nanos:date_nanos | num:long +2023-10-23T12:27:28.948Z | 2023-10-23T12:27:28.948000000Z | 1698064048948000000 +2023-10-23T12:15:03.360Z | 2023-10-23T12:15:03.360103847Z | 1698063303360103847 +2023-10-23T12:15:03.360Z | 2023-10-23T12:15:03.360103847Z | 1698063303360103847 +; + +date nanos equals +required_capability: to_date_nanos +required_capability: date_nanos_binary_comparison + +FROM date_nanos | WHERE MV_MIN(nanos) == TO_DATE_NANOS("2023-10-23T12:27:28.948000000Z"); + +millis:date | nanos:date_nanos | num:long +2023-10-23T12:27:28.948Z | 2023-10-23T12:27:28.948000000Z | 1698064048948000000 +; + +date nanos not equals +required_capability: to_date_nanos +required_capability: date_nanos_binary_comparison + +FROM date_nanos | WHERE MV_MIN(nanos) != TO_DATE_NANOS("2023-10-23T12:27:28.948000000Z") AND millis > "2000-01-01" | SORT nanos DESC; + +millis:date | nanos:date_nanos | num:long +2023-10-23T13:55:01.543Z | 2023-10-23T13:55:01.543123456Z | 1698069301543123456 +2023-10-23T13:53:55.832Z | 2023-10-23T13:53:55.832987654Z | 1698069235832987654 +2023-10-23T13:52:55.015Z | 2023-10-23T13:52:55.015787878Z | 1698069175015787878 +2023-10-23T13:51:54.732Z | 2023-10-23T13:51:54.732102837Z | 1698069114732102837 +2023-10-23T13:33:34.937Z | 2023-10-23T13:33:34.937193000Z | 1698068014937193000 +2023-10-23T12:15:03.360Z | 2023-10-23T12:15:03.360103847Z | 1698063303360103847 +2023-10-23T12:15:03.360Z | 2023-10-23T12:15:03.360103847Z | 1698063303360103847 +; date nanos to long, index version required_capability: to_date_nanos @@ -362,3 +439,23 @@ FROM date_nanos | WHERE millis > "2020-01-01" | STATS v = MV_SORT(VALUES(nanos), v:date_nanos [2023-10-23T13:55:01.543123456Z, 2023-10-23T13:53:55.832987654Z, 2023-10-23T13:52:55.015787878Z, 2023-10-23T13:51:54.732102837Z, 2023-10-23T13:33:34.937193000Z, 2023-10-23T12:27:28.948000000Z, 2023-10-23T12:15:03.360103847Z] ; + +Date trunc on date nanos +required_capability: date_trunc_date_nanos + +FROM date_nanos +| WHERE millis > "2020-01-01" +| EVAL yr = DATE_TRUNC(1 year, nanos), mo = DATE_TRUNC(1 month, nanos), mn = DATE_TRUNC(10 minutes, nanos), ms = DATE_TRUNC(1 millisecond, nanos) +| SORT nanos DESC +| KEEP yr, mo, mn, ms; + +yr:date_nanos | mo:date_nanos | mn:date_nanos | ms:date_nanos +2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T13:50:00.000000000Z | 2023-10-23T13:55:01.543000000Z +2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T13:50:00.000000000Z | 2023-10-23T13:53:55.832000000Z +2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T13:50:00.000000000Z | 2023-10-23T13:52:55.015000000Z +2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T13:50:00.000000000Z | 2023-10-23T13:51:54.732000000Z +2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T13:30:00.000000000Z | 2023-10-23T13:33:34.937000000Z +2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T12:20:00.000000000Z | 2023-10-23T12:27:28.948000000Z +2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T12:10:00.000000000Z | 2023-10-23T12:15:03.360000000Z +2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T12:10:00.000000000Z | 2023-10-23T12:15:03.360000000Z +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/delay.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/delay.csv-spec new file mode 100644 index 0000000000000..002302cc7e683 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/delay.csv-spec @@ -0,0 +1,7 @@ +DELAY +required_capability: delay_debug_fn +row a = 1 | where delay(1ms); + +a:integer +1 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index a9c5a5214f159..14d811535aafd 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -656,3 +656,22 @@ FROM sample_data @timestamp:date | client_ip:ip | event_duration:long | message:keyword ; + +docsBitLength +required_capability: fn_bit_length +// tag::bitLength[] +FROM employees +| KEEP first_name, last_name +| EVAL fn_bit_length = BIT_LENGTH(first_name) +// end::bitLength[] +| SORT first_name +| LIMIT 3 +; + +// tag::bitLength-result[] +first_name:keyword | last_name:keyword | fn_bit_length:integer +Alejandro |McAlpine |72 +Amabile |Gomatam |56 +Anneke |Preusig |48 +// end::bitLength-result[] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec index 0398921efabfd..91075691a6a1c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec @@ -577,6 +577,7 @@ abbrev:keyword | scalerank:integer | location:geo_point byTwoCalculatedSecondOverwrites-Ignore required_capability: join_planning_v1 +required_capability: stats_alias_collision_warnings FROM airports | WHERE abbrev IS NOT NULL @@ -587,6 +588,7 @@ FROM airports | SORT abbrev DESC | LIMIT 3 ; +warning:Line 5:4: Field 'x' shadowed by field at line 6:3 abbrev:keyword | scalerank:integer | location:geo_point | x:double | min_sl:integer ZRH | 3 | POINT(8.56221279534765 47.4523895064915) | 10 | 2 @@ -596,6 +598,7 @@ abbrev:keyword | scalerank:integer | location:geo_point byTwoCalculatedSecondOverwritesReferencingFirst-Ignore required_capability: join_planning_v1 +required_capability: stats_alias_collision_warnings FROM airports | WHERE abbrev IS NOT NULL @@ -607,6 +610,7 @@ FROM airports | SORT abbrev DESC | LIMIT 3 ; +warning:Line 6:4: Field 'x' shadowed by field at line 7:3 abbrev:keyword | scalerank:integer | location:geo_point | x:double | min_sl:integer ZRH | 3 | POINT(8.56221279534765 47.4523895064915) | 10 | 2 @@ -617,6 +621,7 @@ abbrev:keyword | scalerank:integer | location:geo_point groupShadowsAgg-Ignore required_capability: join_planning_v1 +required_capability: stats_alias_collision_warnings FROM airports | WHERE abbrev IS NOT NULL @@ -628,6 +633,7 @@ FROM airports | SORT abbrev DESC | LIMIT 3 ; +warning:Line 5:3: Field 'lat_10' shadowed by field at line 6:4 abbrev:keyword | scalerank:integer | location:geo_point | lat_10:double | lon_10:double | min_sl:integer ZRH | 3 | POINT(8.56221279534765 47.4523895064915) | 50 | 10 | 2 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec index b0578aa1a4ed0..c35f4c19cc347 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-function.csv-spec @@ -6,11 +6,11 @@ matchWithField required_capability: match_function // tag::match-with-field[] -from books -| where match(author, "Faulkner") -| keep book_no, author -| sort book_no -| limit 5; +FROM books +| WHERE MATCH(author, "Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; // end::match-with-field[] // tag::match-with-field-result[] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec index 56eded5ce4603..7b55ece964b89 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/match-operator.csv-spec @@ -1,11 +1,19 @@ ############################################### -# Tests for MATCH operator +# Tests for Match function # -singleMatchWithTextField -required_capability: match_operator -from books | where author match "William Faulkner" | keep book_no, author | sort book_no | LIMIT 5; +matchWithField +required_capability: match_operator_colon +// tag::match-with-field[] +FROM books +| WHERE author:"Faulkner" +| KEEP book_no, author +| SORT book_no +| LIMIT 5; +// end::match-with-field[] + +// tag::match-with-field-result[] book_no:keyword | author:text 2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] 2713 | William Faulkner @@ -13,57 +21,175 @@ book_no:keyword | author:text 2883 | William Faulkner 3293 | Danny Faulkner ; +// end::match-with-field-result[] + +matchWithMultipleFunctions +required_capability: match_operator_colon -singleMatchWithKeywordField -required_capability: match_operator -from books | where author.keyword match "William Faulkner" | keep book_no, author | sort book_no; +from books +| where title:"Return" AND author:"Tolkien" +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +2714 | Return of the King Being the Third Part of The Lord of the Rings +7350 | Return of the Shadow +; + +matchAfterKeep +required_capability: match_operator_colon + +from books +| keep book_no, author +| where author:"Faulkner" +| sort book_no +| limit 5; book_no:keyword | author:text +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] 2713 | William Faulkner +2847 | Colleen Faulkner 2883 | William Faulkner -4724 | William Faulkner -4977 | William Faulkner -5119 | William Faulkner -5404 | William Faulkner -5578 | William Faulkner -8077 | William Faulkner -9896 | William Faulkner +3293 | Danny Faulkner ; -multipleMatch -required_capability: match_operator -from books -| where (description match "Sauron" OR description match "Dark Lord") AND - (author match "J. R. R. Tolkien" OR author match "John Ronald Reuel Tolkien") -| keep book_no, title, author -| sort book_no -| limit 4 +matchAfterDrop +required_capability: match_operator_colon + +from books +| drop ratings, description, year, publisher, title, author.keyword +| where author:"Faulkner" +| keep book_no, author +| sort book_no +| limit 5; + +book_no:keyword | author:text +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] +2713 | William Faulkner +2847 | Colleen Faulkner +2883 | William Faulkner +3293 | Danny Faulkner ; -book_no:keyword | title:text | author:text -1463 | Realms of Tolkien: Images of Middle-earth | J. R. R. Tolkien -2675 | The Lord of the Rings - Boxed Set | J.R.R. Tolkien -2714 | Return of the King Being the Third Part of The Lord of the Rings | J. R. R. Tolkien -2936 | Fellowship of the Ring 2ND Edition | John Ronald Reuel Tolkien +matchAfterEval +required_capability: match_operator_colon + +from books +| eval stars = to_long(ratings / 2.0) +| where author:"Faulkner" +| sort book_no +| keep book_no, author, stars +| limit 5; + +book_no:keyword | author:text | stars:long +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] | 3 +2713 | William Faulkner | 2 +2847 | Colleen Faulkner | 3 +2883 | William Faulkner | 2 +3293 | Danny Faulkner | 2 ; -multipleWhereWithMatch -required_capability: match_operator -from books -| where title match "short stories" -| where author match "Ursula K. Le Guin" -| keep book_no, title, author -| sort book_no +matchWithConjunction +required_capability: match_operator_colon + +from books +| where title:"Rings" and ratings > 4.6 +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +4023 |A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +7140 |The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) ; -book_no:keyword | title:text | author:text -8480 | The wind's twelve quarters: Short stories | Ursula K. Le Guin +matchWithFunctionPushedToLucene +required_capability: match_operator_colon + +from hosts +| where host:"beta" and cidr_match(ip1, "127.0.0.2/32", "127.0.0.3/32") +| keep card, host, ip0, ip1; +ignoreOrder:true + +card:keyword |host:keyword |ip0:ip |ip1:ip +eth1 |beta |127.0.0.1 |127.0.0.2 +; + +matchWithNonPushableConjunction +required_capability: match_operator_colon + +from books +| where title:"Rings" and length(title) > 75 +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +; + +matchWithMultipleWhereClauses +required_capability: match_operator_colon + +from books +| where title:"rings" +| where title:"lord" +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +2675 | The Lord of the Rings - Boxed Set +2714 | Return of the King Being the Third Part of The Lord of the Rings +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +7140 | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) +; + +matchMultivaluedField +required_capability: match_operator_colon + +from employees +| where job_positions:"Tech Lead" and job_positions:"Reporting Analyst" +| keep emp_no, first_name, last_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10004 | Chirstian | Koblick +10010 | Duangkaew | Piveteau +10011 | Mary | Sluis +10088 | Jungsoon | Syrzycki +10093 | Sailaja | Desikan +10097 | Remzi | Waschkowski +; + +testMultiValuedFieldWithConjunction +required_capability: match_operator_colon + +from employees +| where job_positions:"Data Scientist" and job_positions:"Support Engineer" +| keep emp_no, first_name, last_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10043 | Yishay | Tzvieli +; + +testMatchAndQueryStringFunctions +required_capability: match_operator_colon +required_capability: qstr_function + +from employees +| where job_positions:"Data Scientist" and qstr("job_positions: (Support Engineer) and gender: F") +| keep emp_no, first_name, last_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10041 | Uri | Lenart +10043 | Yishay | Tzvieli ; combinedMatchWithFunctions -required_capability: match_operator +required_capability: match_operator_colon + from books -| where title match "Tolkien" AND author match "Tolkien" AND year > 2000 +| where title:"Tolkien" AND author:"Tolkien" AND year > 2000 | where mv_count(author) == 1 | keep book_no, title, author, year | sort book_no @@ -74,9 +200,10 @@ book_no:keyword | title:text | author:text | year:integer ; matchWithStats -required_capability: match_operator +required_capability: match_operator_colon + from books -| where author match "faulkner" AND year > 1990 +| where author:"faulkner" AND year > 1990 | where mv_count(author) == 1 | stats count(*) BY author.keyword | sort author.keyword diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec index 6dc03d0debcfa..3e92e55928d64 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec @@ -6,11 +6,11 @@ qstrWithField required_capability: qstr_function // tag::qstr-with-field[] -from books -| where qstr("author: Faulkner") -| keep book_no, author -| sort book_no -| limit 5; +FROM books +| WHERE QSTR("author: Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; // end::qstr-with-field[] // tag::qstr-with-field-result[] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 80ba18b85a004..448ee57b34c58 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -509,7 +509,9 @@ m:d | languages:i ; IfDuplicateNamesLastOneWins +required_capability: stats_alias_collision_warnings from employees | stats h = avg(height), h = min(height) by languages | sort languages; +warning:Line 1:24: Field 'h' shadowed by field at line 1:41 h:d | languages:i 1.42 | 1 @@ -533,7 +535,10 @@ m:d | l:i ; IfDuplicateNamesGroupingHasPriority +required_capability: stats_alias_collision_warnings from employees | stats languages = avg(height), languages = min(height) by languages | sort languages; +warning:Line 1:24: Field 'languages' shadowed by field at line 1:76 +warning:Line 1:49: Field 'languages' shadowed by field at line 1:76 languages:i 1 @@ -1582,11 +1587,13 @@ e: i | f:i | g:l | a:i ; nestedAggsOverGroupingTwiceWithAlias#[skip:-8.13.99,reason:supported in 8.14] +required_capability: stats_alias_collision_warnings FROM employees | STATS vals = COUNT() BY x = emp_no, x = languages | SORT x | LIMIT 3 ; +warning:Line 2:27: Field 'x' shadowed by field at line 2:39 vals: l| x:i 15 | 1 @@ -1623,11 +1630,13 @@ m:i | o:i | l:i | s:i ; byTwoCalculatedSecondSameNameAsFirst#[skip:-8.13.99,reason:supported in 8.14] +required_capability: stats_alias_collision_warnings FROM employees | STATS m = MAX(salary) by l = salary + 1, l = languages + 1 | SORT m | LIMIT 5 ; +warning:Line 2:28: Field 'l' shadowed by field at line 2:44 m:i | l:i 66817 | 6 @@ -1638,12 +1647,14 @@ FROM employees ; byTwoCalculatedSecondShadowingAndReferencingFirst#[skip:-8.13.99,reason:supported in 8.14] +required_capability: stats_alias_collision_warnings FROM employees | EVAL l = languages | STATS m = MAX(salary) by l = l + 1, l = l + 1 | SORT m | LIMIT 5 ; +warning:Line 3:28: Field 'l' shadowed by field at line 3:39 m:i | l:i 66817 | 6 @@ -2013,12 +2024,14 @@ c:l| languages:i ; evalMultipleOverridingKeys#[skip:-8.13.99,reason:supported in 8.14] +required_capability: stats_alias_collision_warnings FROM employees | EVAL k = languages, k1 = k | STATS c = COUNT() BY languages, k, k1, languages | DROP k | SORT languages ; +warning:Line 3:24: Field 'languages' shadowed by field at line 3:42 c:l | k1:i | languages:i 15 | 1 | 1 @@ -2030,12 +2043,14 @@ c:l | k1:i | languages:i ; evalMultipleOverridingKeysWithAggregateExpr#[skip:-8.13.99,reason:supported in 8.14] +required_capability: stats_alias_collision_warnings FROM employees | EVAL k = languages, k1 = k | STATS c = 3*COUNT() BY languages, k, k1, languages | DROP k | SORT languages ; +warning:Line 3:26: Field 'languages' shadowed by field at line 3:44 c:l | k1:i | languages:i 45 | 1 | 1 @@ -2193,19 +2208,24 @@ null ; shadowingInternal +required_capability: stats_alias_collision_warnings FROM employees | STATS x = MAX(emp_no), x = MIN(emp_no) ; +warning:Line 2:9: Field 'x' shadowed by field at line 2:26 x:integer 10001 ; shadowingInternalWithGroup#[skip:-8.14.1,reason:implemented in 8.14] +required_capability: stats_alias_collision_warnings FROM employees | STATS x = MAX(emp_no), x = MIN(emp_no) BY x = gender | SORT x ASC ; +warning:Line 2:26: Field 'x' shadowed by field at line 2:45 +warning:Line 2:9: Field 'x' shadowed by field at line 2:45 x:keyword F @@ -2214,10 +2234,13 @@ null ; shadowingInternalWithGroup2#[skip:-8.14.1,reason:implemented in 8.14] +required_capability: stats_alias_collision_warnings FROM employees | STATS x = MAX(emp_no), y = count(x) BY x = emp_no, x = gender | SORT x ASC ; +warning:Line 2:42: Field 'x' shadowed by field at line 2:54 +warning:Line 2:9: Field 'x' shadowed by field at line 2:54 y:long | x:keyword 33 | F @@ -2227,10 +2250,13 @@ y:long | x:keyword shadowingTheGroup +required_capability: stats_alias_collision_warnings FROM employees | STATS gender = MAX(emp_no), gender = MIN(emp_no) BY gender | SORT gender ASC ; +warning:Line 2:31: Field 'gender' shadowed by field at line 2:55 +warning:Line 2:9: Field 'gender' shadowed by field at line 2:55 gender:keyword F diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec index 6eebb2f4d19da..2165ee42419c2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec @@ -265,7 +265,7 @@ required_capability: agg_top required_capability: agg_top_string_support required_capability: functions_never_emit_text # we don't need MATCH, but the loader for books.csv is busted in CsvTests -required_capability: match_operator +required_capability: match_operator_colon FROM books | EVAL calc = TRIM(SUBSTRING(title, 2, 5)) @@ -283,7 +283,7 @@ required_capability: agg_top required_capability: agg_top_string_support required_capability: functions_never_emit_text # we don't need MATCH, but the loader for books.csv is busted in CsvTests -required_capability: match_operator +required_capability: match_operator_colon FROM books | EVAL calc = TRIM(SUBSTRING(title, 2, 5)) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 305b8f3d8011e..de5981df999c7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -38,6 +38,56 @@ emp_no:integer | l:integer 10003 | 5 ; +bitLength +required_capability: fn_bit_length +row a = "hello", b = "" | eval y = bit_length(a) + bit_length(b); + +a:keyword | b:keyword | y:integer +hello | | 40 +; + +bitLengthWithNonAsciiChars +required_capability: fn_bit_length +row a = "¡", b = "❗️" | eval y = bit_length(a) | eval z = bit_length(b); + +a:keyword | b:keyword | y:integer | z:integer +¡ | ❗️ | 16 | 48 +; + +foldBitLength +required_capability: fn_bit_length +row a = 1 | eval b = bit_length("hello"); + +a:integer | b:integer +1 | 40 +; + +bitLengthAndSourceQuoting +required_capability: fn_bit_length +from "employees" | sort emp_no | limit 3 | eval l = bit_length(first_name) | keep emp_no, l; + +emp_no:integer | l:integer +10001 | 48 +10002 | 56 +10003 | 40 +; + +bitLengthInsideOtherFunction +required_capability: fn_bit_length +row a = "abc", b = "de" | eval g = greatest(bit_length(a), bit_length(b), bit_length("fghi")); + +a:keyword | b:keyword | g:integer +abc | de | 32 +; + +bitLengthNull +required_capability: fn_bit_length +row a = "abc" | eval l = bit_length(null); + +a:string | l:integer +abc | null +; + startsWithConstant from employees | sort emp_no | limit 10 | eval f_S = starts_with(first_name, "S") | keep emp_no, first_name, f_S; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 00efeb37a033b..669723abe70dd 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -26,16 +26,19 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.plugin.TransportEsqlQueryAction; import org.junit.After; import java.util.Collection; +import java.util.Iterator; import java.util.List; import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; import static org.hamcrest.Matchers.equalTo; @TestLogging(value = "org.elasticsearch.xpack.esql.session:DEBUG", reason = "to better understand planning") @@ -204,4 +207,16 @@ protected static QueryPragmas randomPragmas() { protected static boolean canUseQueryPragmas() { return Build.current().isSnapshot(); } + + protected static void assertColumnNames(List actualColumns, List expectedNames) { + assertThat(actualColumns.stream().map(ColumnInfo::name).toList(), equalTo(expectedNames)); + } + + protected static void assertColumnTypes(List actualColumns, List expectedTypes) { + assertThat(actualColumns.stream().map(ColumnInfo::outputType).toList(), equalTo(expectedTypes)); + } + + protected static void assertValues(Iterator> actualValues, Iterable> expectedValues) { + assertThat(getValuesList(actualValues), equalTo(getValuesList(expectedValues))); + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index ab21a2bb8b995..b86c46fd3fa7a 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -7,29 +7,24 @@ package org.elasticsearch.xpack.esql.plugin; -import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; -import org.elasticsearch.xpack.esql.action.ColumnInfoImpl; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; -import org.elasticsearch.xpack.esql.core.type.DataType; import org.junit.Before; import java.util.List; -import static org.elasticsearch.test.ListMatcher.matchesList; -import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.Matchers.equalTo; -//@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug") +@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug") public class MatchOperatorIT extends AbstractEsqlIntegTestCase { @Before @@ -39,99 +34,90 @@ public void setupIndex() { @Override protected EsqlQueryResponse run(EsqlQueryRequest request) { - assumeTrue("match operator available in snapshot builds only", Build.current().isSnapshot()); + assumeTrue("match operator capability not available", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); return super.run(request); } public void testSimpleWhereMatch() { var query = """ FROM test - | WHERE content MATCH "fox" + | WHERE content:"fox" | KEEP id | SORT id """; try (var resp = run(query)) { - assertThat(resp.columns().stream().map(ColumnInfoImpl::name).toList(), equalTo(List.of("id"))); - assertThat(resp.columns().stream().map(ColumnInfoImpl::type).map(DataType::toString).toList(), equalTo(List.of("INTEGER"))); - // values - List> values = getValuesList(resp); - assertMap(values, matchesList().item(List.of(1)).item(List.of(6))); + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(1), List.of(6))); } } public void testCombinedWhereMatch() { var query = """ FROM test - | WHERE content MATCH "fox" AND id > 5 + | WHERE content:"fox" AND id > 5 | KEEP id | SORT id """; try (var resp = run(query)) { - assertThat(resp.columns().stream().map(ColumnInfoImpl::name).toList(), equalTo(List.of(("id")))); - assertThat(resp.columns().stream().map(ColumnInfoImpl::type).map(DataType::toString).toList(), equalTo(List.of(("INTEGER")))); - // values - List> values = getValuesList(resp); - assertMap(values, matchesList().item(List.of(6))); + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(6))); } } public void testMultipleMatch() { var query = """ FROM test - | WHERE content MATCH "fox" OR content MATCH "brown" + | WHERE content:"fox" AND content:"brown" | KEEP id | SORT id """; try (var resp = run(query)) { - assertThat(resp.columns().stream().map(ColumnInfoImpl::name).toList(), equalTo(List.of(("id")))); - assertThat(resp.columns().stream().map(ColumnInfoImpl::type).map(DataType::toString).toList(), equalTo(List.of(("INTEGER")))); - // values - List> values = getValuesList(resp); - assertThat(values.size(), equalTo(5)); - assertMap(values, matchesList().item(List.of(1)).item(List.of(2)).item(List.of(3)).item(List.of(4)).item(List.of(6))); + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(1), List.of(6))); } } public void testMultipleWhereMatch() { var query = """ FROM test - | WHERE content MATCH "fox" OR content MATCH "brown" + | WHERE content:"fox" AND content:"brown" | EVAL summary = CONCAT("document with id: ", to_str(id), "and content: ", content) | SORT summary | LIMIT 4 - | WHERE content MATCH "brown fox" + | WHERE content:"brown fox" | KEEP id """; // TODO: this should not raise an error; var error = expectThrows(ElasticsearchException.class, () -> run(query)); - assertThat(error.getMessage(), containsString("Unsupported expression [content MATCH \"brown fox\"]")); + assertThat(error.getMessage(), containsString("[:] operator cannot be used after LIMIT")); } public void testNotWhereMatch() { var query = """ FROM test - | WHERE NOT content MATCH "brown fox" + | WHERE NOT content:"brown fox" | KEEP id | SORT id """; try (var resp = run(query)) { - assertThat(resp.columns().stream().map(ColumnInfoImpl::name).toList(), equalTo(List.of(("id")))); - assertThat(resp.columns().stream().map(ColumnInfoImpl::type).map(DataType::toString).toList(), equalTo(List.of(("INTEGER")))); - // values - List> values = getValuesList(resp); - assertMap(values, matchesList().item(List.of(5))); + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(5))); } } public void testNonExistingColumn() { var query = """ FROM test - | WHERE something MATCH "fox" + | WHERE something:"fox" """; var error = expectThrows(VerificationException.class, () -> run(query)); @@ -142,12 +128,15 @@ public void testWhereMatchEvalColumn() { var query = """ FROM test | EVAL upper_content = to_upper(content) - | WHERE upper_content MATCH "FOX" + | WHERE upper_content:"FOX" | KEEP id """; var error = expectThrows(VerificationException.class, () -> run(query)); - assertThat(error.getMessage(), containsString("MATCH requires a mapped index field, found [upper_content]")); + assertThat( + error.getMessage(), + containsString("[:] operator cannot operate on [upper_content], which is not a field from an index mapping") + ); } public void testWhereMatchOverWrittenColumn() { @@ -155,18 +144,21 @@ public void testWhereMatchOverWrittenColumn() { FROM test | DROP content | EVAL content = CONCAT("document with ID ", to_str(id)) - | WHERE content MATCH "document" + | WHERE content:"document" """; var error = expectThrows(VerificationException.class, () -> run(query)); - assertThat(error.getMessage(), containsString("MATCH requires a mapped index field, found [content]")); + assertThat( + error.getMessage(), + containsString("[:] operator cannot operate on [content], which is not a field from an index mapping") + ); } public void testWhereMatchAfterStats() { var query = """ FROM test | STATS count(*) - | WHERE content match "fox" + | WHERE content:"fox" """; var error = expectThrows(VerificationException.class, () -> run(query)); @@ -176,40 +168,49 @@ public void testWhereMatchAfterStats() { public void testWhereMatchWithFunctions() { var query = """ FROM test - | WHERE content MATCH "fox" OR to_upper(content) == "FOX" + | WHERE content:"fox" OR to_upper(content) == "FOX" """; var error = expectThrows(ElasticsearchException.class, () -> run(query)); - assertThat(error.getMessage(), containsString(" Invalid condition using MATCH")); + assertThat( + error.getMessage(), + containsString( + "Invalid condition [content:\"fox\" OR to_upper(content) == \"FOX\"]. " + + "[:] operator can't be used as part of an or condition" + ) + ); } public void testWhereMatchWithRow() { var query = """ ROW content = "a brown fox" - | WHERE content MATCH "fox" + | WHERE content:"fox" """; var error = expectThrows(ElasticsearchException.class, () -> run(query)); - assertThat(error.getMessage(), containsString("MATCH requires a mapped index field, found [content]")); + assertThat( + error.getMessage(), + containsString("[:] operator cannot operate on [\"a brown fox\"], which is not a field from an index mapping") + ); } public void testMatchWithinEval() { var query = """ FROM test - | EVAL matches_query = content MATCH "fox" + | EVAL matches_query = content:"fox" """; var error = expectThrows(VerificationException.class, () -> run(query)); - assertThat(error.getMessage(), containsString("EVAL does not support MATCH expressions")); + assertThat(error.getMessage(), containsString("[:] operator is only supported in WHERE commands")); } public void testMatchWithNonTextField() { var query = """ FROM test - | WHERE id MATCH "fox" + | WHERE id:"fox" """; var error = expectThrows(VerificationException.class, () -> run(query)); - assertThat(error.getMessage(), containsString(" MATCH requires a text or keyword field, but [id] has type [integer]")); + assertThat(error.getMessage(), containsString("first argument of [id:\"fox\"] must be [string], found value [id] type [integer]")); } private void createAndPopulateIndex() { diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index ffab261d3c174..6ec93d203d984 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -105,6 +105,8 @@ WS : [ \r\n\t]+ -> channel(HIDDEN) ; +COLON : ':'; + // // Expression - used by most command // @@ -207,8 +209,8 @@ MINUS : '-'; ASTERISK : '*'; SLASH : '/'; PERCENT : '%'; +EXPRESSION_COLON : {this.isDevVersion()}? COLON -> type(COLON); -MATCH : 'match'; NESTED_WHERE : WHERE -> type(WHERE); NAMED_OR_POSITIONAL_PARAM @@ -477,7 +479,7 @@ SHOW_WS mode SETTING_MODE; SETTING_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET), popMode; -COLON : ':'; +SETTING_COLON : COLON -> type(COLON); SETTING : (ASPERAND | DIGIT| DOT | LETTER | UNDERSCORE)+ diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 4d1f426289149..3dd1a2c754038 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -21,46 +21,46 @@ UNKNOWN_CMD=20 LINE_COMMENT=21 MULTILINE_COMMENT=22 WS=23 -PIPE=24 -QUOTED_STRING=25 -INTEGER_LITERAL=26 -DECIMAL_LITERAL=27 -BY=28 -AND=29 -ASC=30 -ASSIGN=31 -CAST_OP=32 -COMMA=33 -DESC=34 -DOT=35 -FALSE=36 -FIRST=37 -IN=38 -IS=39 -LAST=40 -LIKE=41 -LP=42 -NOT=43 -NULL=44 -NULLS=45 -OR=46 -PARAM=47 -RLIKE=48 -RP=49 -TRUE=50 -EQ=51 -CIEQ=52 -NEQ=53 -LT=54 -LTE=55 -GT=56 -GTE=57 -PLUS=58 -MINUS=59 -ASTERISK=60 -SLASH=61 -PERCENT=62 -MATCH=63 +COLON=24 +PIPE=25 +QUOTED_STRING=26 +INTEGER_LITERAL=27 +DECIMAL_LITERAL=28 +BY=29 +AND=30 +ASC=31 +ASSIGN=32 +CAST_OP=33 +COMMA=34 +DESC=35 +DOT=36 +FALSE=37 +FIRST=38 +IN=39 +IS=40 +LAST=41 +LIKE=42 +LP=43 +NOT=44 +NULL=45 +NULLS=46 +OR=47 +PARAM=48 +RLIKE=49 +RP=50 +TRUE=51 +EQ=52 +CIEQ=53 +NEQ=54 +LT=55 +LTE=56 +GT=57 +GTE=58 +PLUS=59 +MINUS=60 +ASTERISK=61 +SLASH=62 +PERCENT=63 NAMED_OR_POSITIONAL_PARAM=64 OPENING_BRACKET=65 CLOSING_BRACKET=66 @@ -101,23 +101,22 @@ INFO=100 SHOW_LINE_COMMENT=101 SHOW_MULTILINE_COMMENT=102 SHOW_WS=103 -COLON=104 -SETTING=105 -SETTING_LINE_COMMENT=106 -SETTTING_MULTILINE_COMMENT=107 -SETTING_WS=108 -LOOKUP_LINE_COMMENT=109 -LOOKUP_MULTILINE_COMMENT=110 -LOOKUP_WS=111 -LOOKUP_FIELD_LINE_COMMENT=112 -LOOKUP_FIELD_MULTILINE_COMMENT=113 -LOOKUP_FIELD_WS=114 -METRICS_LINE_COMMENT=115 -METRICS_MULTILINE_COMMENT=116 -METRICS_WS=117 -CLOSING_METRICS_LINE_COMMENT=118 -CLOSING_METRICS_MULTILINE_COMMENT=119 -CLOSING_METRICS_WS=120 +SETTING=104 +SETTING_LINE_COMMENT=105 +SETTTING_MULTILINE_COMMENT=106 +SETTING_WS=107 +LOOKUP_LINE_COMMENT=108 +LOOKUP_MULTILINE_COMMENT=109 +LOOKUP_WS=110 +LOOKUP_FIELD_LINE_COMMENT=111 +LOOKUP_FIELD_MULTILINE_COMMENT=112 +LOOKUP_FIELD_WS=113 +METRICS_LINE_COMMENT=114 +METRICS_MULTILINE_COMMENT=115 +METRICS_WS=116 +CLOSING_METRICS_LINE_COMMENT=117 +CLOSING_METRICS_MULTILINE_COMMENT=118 +CLOSING_METRICS_WS=119 'dissect'=1 'drop'=2 'enrich'=3 @@ -134,47 +133,46 @@ CLOSING_METRICS_WS=120 'sort'=14 'stats'=15 'where'=16 -'|'=24 -'by'=28 -'and'=29 -'asc'=30 -'='=31 -'::'=32 -','=33 -'desc'=34 -'.'=35 -'false'=36 -'first'=37 -'in'=38 -'is'=39 -'last'=40 -'like'=41 -'('=42 -'not'=43 -'null'=44 -'nulls'=45 -'or'=46 -'?'=47 -'rlike'=48 -')'=49 -'true'=50 -'=='=51 -'=~'=52 -'!='=53 -'<'=54 -'<='=55 -'>'=56 -'>='=57 -'+'=58 -'-'=59 -'*'=60 -'/'=61 -'%'=62 -'match'=63 +':'=24 +'|'=25 +'by'=29 +'and'=30 +'asc'=31 +'='=32 +'::'=33 +','=34 +'desc'=35 +'.'=36 +'false'=37 +'first'=38 +'in'=39 +'is'=40 +'last'=41 +'like'=42 +'('=43 +'not'=44 +'null'=45 +'nulls'=46 +'or'=47 +'?'=48 +'rlike'=49 +')'=50 +'true'=51 +'=='=52 +'=~'=53 +'!='=54 +'<'=55 +'<='=56 +'>'=57 +'>='=58 +'+'=59 +'-'=60 +'*'=61 +'/'=62 +'%'=63 ']'=66 'metadata'=75 'as'=84 'on'=88 'with'=89 'info'=100 -':'=104 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index f9f994f4ab329..67f194a1bff64 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -77,7 +77,7 @@ regexBooleanExpression ; matchBooleanExpression - : valueExpression MATCH queryString=string + : fieldExp=qualifiedName COLON queryString=constant ; valueExpression @@ -105,9 +105,7 @@ functionExpression ; functionName - // Additional function identifiers that are already a reserved word in the language - : MATCH - | identifierOrParameter + : identifierOrParameter ; dataType diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 4d1f426289149..3dd1a2c754038 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -21,46 +21,46 @@ UNKNOWN_CMD=20 LINE_COMMENT=21 MULTILINE_COMMENT=22 WS=23 -PIPE=24 -QUOTED_STRING=25 -INTEGER_LITERAL=26 -DECIMAL_LITERAL=27 -BY=28 -AND=29 -ASC=30 -ASSIGN=31 -CAST_OP=32 -COMMA=33 -DESC=34 -DOT=35 -FALSE=36 -FIRST=37 -IN=38 -IS=39 -LAST=40 -LIKE=41 -LP=42 -NOT=43 -NULL=44 -NULLS=45 -OR=46 -PARAM=47 -RLIKE=48 -RP=49 -TRUE=50 -EQ=51 -CIEQ=52 -NEQ=53 -LT=54 -LTE=55 -GT=56 -GTE=57 -PLUS=58 -MINUS=59 -ASTERISK=60 -SLASH=61 -PERCENT=62 -MATCH=63 +COLON=24 +PIPE=25 +QUOTED_STRING=26 +INTEGER_LITERAL=27 +DECIMAL_LITERAL=28 +BY=29 +AND=30 +ASC=31 +ASSIGN=32 +CAST_OP=33 +COMMA=34 +DESC=35 +DOT=36 +FALSE=37 +FIRST=38 +IN=39 +IS=40 +LAST=41 +LIKE=42 +LP=43 +NOT=44 +NULL=45 +NULLS=46 +OR=47 +PARAM=48 +RLIKE=49 +RP=50 +TRUE=51 +EQ=52 +CIEQ=53 +NEQ=54 +LT=55 +LTE=56 +GT=57 +GTE=58 +PLUS=59 +MINUS=60 +ASTERISK=61 +SLASH=62 +PERCENT=63 NAMED_OR_POSITIONAL_PARAM=64 OPENING_BRACKET=65 CLOSING_BRACKET=66 @@ -101,23 +101,22 @@ INFO=100 SHOW_LINE_COMMENT=101 SHOW_MULTILINE_COMMENT=102 SHOW_WS=103 -COLON=104 -SETTING=105 -SETTING_LINE_COMMENT=106 -SETTTING_MULTILINE_COMMENT=107 -SETTING_WS=108 -LOOKUP_LINE_COMMENT=109 -LOOKUP_MULTILINE_COMMENT=110 -LOOKUP_WS=111 -LOOKUP_FIELD_LINE_COMMENT=112 -LOOKUP_FIELD_MULTILINE_COMMENT=113 -LOOKUP_FIELD_WS=114 -METRICS_LINE_COMMENT=115 -METRICS_MULTILINE_COMMENT=116 -METRICS_WS=117 -CLOSING_METRICS_LINE_COMMENT=118 -CLOSING_METRICS_MULTILINE_COMMENT=119 -CLOSING_METRICS_WS=120 +SETTING=104 +SETTING_LINE_COMMENT=105 +SETTTING_MULTILINE_COMMENT=106 +SETTING_WS=107 +LOOKUP_LINE_COMMENT=108 +LOOKUP_MULTILINE_COMMENT=109 +LOOKUP_WS=110 +LOOKUP_FIELD_LINE_COMMENT=111 +LOOKUP_FIELD_MULTILINE_COMMENT=112 +LOOKUP_FIELD_WS=113 +METRICS_LINE_COMMENT=114 +METRICS_MULTILINE_COMMENT=115 +METRICS_WS=116 +CLOSING_METRICS_LINE_COMMENT=117 +CLOSING_METRICS_MULTILINE_COMMENT=118 +CLOSING_METRICS_WS=119 'dissect'=1 'drop'=2 'enrich'=3 @@ -134,47 +133,46 @@ CLOSING_METRICS_WS=120 'sort'=14 'stats'=15 'where'=16 -'|'=24 -'by'=28 -'and'=29 -'asc'=30 -'='=31 -'::'=32 -','=33 -'desc'=34 -'.'=35 -'false'=36 -'first'=37 -'in'=38 -'is'=39 -'last'=40 -'like'=41 -'('=42 -'not'=43 -'null'=44 -'nulls'=45 -'or'=46 -'?'=47 -'rlike'=48 -')'=49 -'true'=50 -'=='=51 -'=~'=52 -'!='=53 -'<'=54 -'<='=55 -'>'=56 -'>='=57 -'+'=58 -'-'=59 -'*'=60 -'/'=61 -'%'=62 -'match'=63 +':'=24 +'|'=25 +'by'=29 +'and'=30 +'asc'=31 +'='=32 +'::'=33 +','=34 +'desc'=35 +'.'=36 +'false'=37 +'first'=38 +'in'=39 +'is'=40 +'last'=41 +'like'=42 +'('=43 +'not'=44 +'null'=45 +'nulls'=46 +'or'=47 +'?'=48 +'rlike'=49 +')'=50 +'true'=51 +'=='=52 +'=~'=53 +'!='=54 +'<'=55 +'<='=56 +'>'=57 +'>='=58 +'+'=59 +'-'=60 +'*'=61 +'/'=62 +'%'=63 ']'=66 'metadata'=75 'as'=84 'on'=88 'with'=89 'info'=100 -':'=104 diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java new file mode 100644 index 0000000000000..2d34fc613bc74 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java @@ -0,0 +1,130 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.common.Rounding; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateTrunc}. + * This class is generated. Do not edit it. + */ +public final class DateTruncDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator fieldVal; + + private final Rounding.Prepared rounding; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateTruncDateNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, + Rounding.Prepared rounding, DriverContext driverContext) { + this.source = source; + this.fieldVal = fieldVal; + this.rounding = rounding; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock fieldValBlock = (LongBlock) fieldVal.eval(page)) { + LongVector fieldValVector = fieldValBlock.asVector(); + if (fieldValVector == null) { + return eval(page.getPositionCount(), fieldValBlock); + } + return eval(page.getPositionCount(), fieldValVector).asBlock(); + } + } + + public LongBlock eval(int positionCount, LongBlock fieldValBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (fieldValBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendLong(DateTrunc.processDateNanos(fieldValBlock.getLong(fieldValBlock.getFirstValueIndex(p)), this.rounding)); + } + return result.build(); + } + } + + public LongVector eval(int positionCount, LongVector fieldValVector) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendLong(p, DateTrunc.processDateNanos(fieldValVector.getLong(p), this.rounding)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateTruncDateNanosEvaluator[" + "fieldVal=" + fieldVal + ", rounding=" + rounding + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(fieldVal); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; + + private final Rounding.Prepared rounding; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldVal, + Rounding.Prepared rounding) { + this.source = source; + this.fieldVal = fieldVal; + this.rounding = rounding; + } + + @Override + public DateTruncDateNanosEvaluator get(DriverContext context) { + return new DateTruncDateNanosEvaluator(source, fieldVal.get(context), rounding, context); + } + + @Override + public String toString() { + return "DateTruncDateNanosEvaluator[" + "fieldVal=" + fieldVal + ", rounding=" + rounding + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java similarity index 82% rename from x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java rename to x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java index ca6aad07c317e..b9e49dd7e795c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDatetimeEvaluator.java @@ -22,7 +22,7 @@ * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateTrunc}. * This class is generated. Do not edit it. */ -public final class DateTruncEvaluator implements EvalOperator.ExpressionEvaluator { +public final class DateTruncDatetimeEvaluator implements EvalOperator.ExpressionEvaluator { private final Source source; private final EvalOperator.ExpressionEvaluator fieldVal; @@ -33,7 +33,7 @@ public final class DateTruncEvaluator implements EvalOperator.ExpressionEvaluato private Warnings warnings; - public DateTruncEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, + public DateTruncDatetimeEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, Rounding.Prepared rounding, DriverContext driverContext) { this.source = source; this.fieldVal = fieldVal; @@ -66,7 +66,7 @@ public LongBlock eval(int positionCount, LongBlock fieldValBlock) { result.appendNull(); continue position; } - result.appendLong(DateTrunc.process(fieldValBlock.getLong(fieldValBlock.getFirstValueIndex(p)), this.rounding)); + result.appendLong(DateTrunc.processDatetime(fieldValBlock.getLong(fieldValBlock.getFirstValueIndex(p)), this.rounding)); } return result.build(); } @@ -75,7 +75,7 @@ public LongBlock eval(int positionCount, LongBlock fieldValBlock) { public LongVector eval(int positionCount, LongVector fieldValVector) { try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(p, DateTrunc.process(fieldValVector.getLong(p), this.rounding)); + result.appendLong(p, DateTrunc.processDatetime(fieldValVector.getLong(p), this.rounding)); } return result.build(); } @@ -83,7 +83,7 @@ public LongVector eval(int positionCount, LongVector fieldValVector) { @Override public String toString() { - return "DateTruncEvaluator[" + "fieldVal=" + fieldVal + ", rounding=" + rounding + "]"; + return "DateTruncDatetimeEvaluator[" + "fieldVal=" + fieldVal + ", rounding=" + rounding + "]"; } @Override @@ -118,13 +118,13 @@ public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldVal, } @Override - public DateTruncEvaluator get(DriverContext context) { - return new DateTruncEvaluator(source, fieldVal.get(context), rounding, context); + public DateTruncDatetimeEvaluator get(DriverContext context) { + return new DateTruncDatetimeEvaluator(source, fieldVal.get(context), rounding, context); } @Override public String toString() { - return "DateTruncEvaluator[" + "fieldVal=" + fieldVal + ", rounding=" + rounding + "]"; + return "DateTruncDatetimeEvaluator[" + "fieldVal=" + fieldVal + ", rounding=" + rounding + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java new file mode 100644 index 0000000000000..6564a2f3ef167 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthEvaluator.java @@ -0,0 +1,137 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link BitLength}. + * This class is generated. Do not edit it. + */ +public final class BitLengthEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + private Warnings warnings; + + public BitLengthEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.source = source; + this.val = val; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock valBlock = (BytesRefBlock) val.eval(page)) { + BytesRefVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector); + } + } + + public IntBlock eval(int positionCount, BytesRefBlock valBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef valScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(BitLength.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch))); + } catch (ArithmeticException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector valVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef valScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(BitLength.process(valVector.getBytesRef(p, valScratch))); + } catch (ArithmeticException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "BitLengthEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public BitLengthEvaluator get(DriverContext context) { + return new BitLengthEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "BitLengthEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/util/DelayEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/util/DelayEvaluator.java new file mode 100644 index 0000000000000..0db714eceb285 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/util/DelayEvaluator.java @@ -0,0 +1,91 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.util; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Delay}. + * This class is generated. Do not edit it. + */ +public final class DelayEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final long ms; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DelayEvaluator(Source source, long ms, DriverContext driverContext) { + this.source = source; + this.ms = ms; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + return eval(page.getPositionCount()).asBlock(); + } + + public BooleanVector eval(int positionCount) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(p, Delay.process(this.ms)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "DelayEvaluator[" + "ms=" + ms + "]"; + } + + @Override + public void close() { + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final long ms; + + public Factory(Source source, long ms) { + this.source = source; + this.ms = ms; + } + + @Override + public DelayEvaluator get(DriverContext context) { + return new DelayEvaluator(source, ms, context); + } + + @Override + public String toString() { + return "DelayEvaluator[" + "ms=" + ms + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index a17733af6bd64..b0111485adbe7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -27,6 +27,12 @@ */ public class EsqlCapabilities { public enum Cap { + + /** + * Support for function {@code BIT_LENGTH}. Done in #115792 + */ + FN_BIT_LENGTH, + /** * Support for function {@code REVERSE}. */ @@ -289,9 +295,9 @@ public enum Cap { MV_PSERIES_WEIGHTED_SUM, /** - * Support for match operator + * Support for match operator as a colon. Previous support for match operator as MATCH has been removed */ - MATCH_OPERATOR(Build.current().isSnapshot()), + MATCH_OPERATOR_COLON(Build.current().isSnapshot()), /** * Removing support for the {@code META} keyword. @@ -313,11 +319,21 @@ public enum Cap { */ TO_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** + * Support for date nanos type in binary comparisons + */ + DATE_NANOS_BINARY_COMPARISON(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** * Support Least and Greatest functions on Date Nanos type */ LEAST_GREATEST_FOR_DATENANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** + * Support for date_trunc function on date nanos type + */ + DATE_TRUNC_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + /** * support aggregations on date nanos */ @@ -428,6 +444,12 @@ public enum Cap { */ FIX_FILTER_PUSHDOWN_PAST_STATS, + /** + * Send warnings on STATS alias collision + * https://github.com/elastic/elasticsearch/issues/114970 + */ + STATS_ALIAS_COLLISION_WARNINGS, + /** * This enables 60_usage.yml "Basic ESQL usage....snapshot" version test. See also the next capability. */ @@ -448,6 +470,7 @@ public enum Cap { */ ADD_LIMIT_INSIDE_MV_EXPAND, + DELAY_DEBUG_FN(Build.current().isSnapshot()), /** * WIP on Join planning * - Introduce BinaryPlan and co diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java index 2161efca1d2b4..f7e6793fc4fb3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResolveFieldsAction.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RemoteClusterActionType; @@ -14,6 +15,7 @@ import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -27,7 +29,7 @@ public class EsqlResolveFieldsAction extends HandledTransportAction { public static final String NAME = "indices:data/read/esql/resolve_fields"; public static final ActionType TYPE = new ActionType<>(NAME); - public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( + public static final RemoteClusterActionType RESOLVE_REMOTE_TYPE = new RemoteClusterActionType<>( NAME, FieldCapabilitiesResponse::new ); @@ -47,6 +49,19 @@ public EsqlResolveFieldsAction( @Override protected void doExecute(Task task, FieldCapabilitiesRequest request, final ActionListener listener) { - fieldCapsAction.executeRequest(task, request, REMOTE_TYPE, listener); + fieldCapsAction.executeRequest(task, request, this::executeRemoteRequest, listener); + } + + void executeRemoteRequest( + RemoteClusterClient remoteClient, + FieldCapabilitiesRequest remoteRequest, + ActionListener remoteListener + ) { + remoteClient.getConnection(remoteRequest, remoteListener.delegateFailure((l, conn) -> { + var remoteAction = conn.getTransportVersion().onOrAfter(TransportVersions.ESQL_ORIGINAL_INDICES) + ? RESOLVE_REMOTE_TYPE + : TransportFieldCapabilitiesAction.REMOTE_TYPE; + remoteClient.execute(conn, remoteAction, remoteRequest, l); + })); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 994ea3ecdbb0d..632f52d163349 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.analysis; -import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; @@ -22,13 +21,11 @@ import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; @@ -66,12 +63,13 @@ import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.stream.Stream; import static org.elasticsearch.xpack.esql.common.Failure.fail; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; -import static org.elasticsearch.xpack.esql.optimizer.rules.physical.local.PushFiltersToSource.canPushToSource; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; /** * This class is part of the planner. Responsible for failing impossible queries with a human-readable error message. In particular, this @@ -195,7 +193,6 @@ else if (p instanceof Lookup lookup) { checkBinaryComparison(p, failures); checkForSortableDataTypes(p, failures); - checkFilterMatchConditions(p, failures); checkFullTextQueryFunctions(p, failures); }); checkRemoteEnrich(plan, failures); @@ -319,6 +316,10 @@ private static void checkInvalidNamedExpressionUsage( Expression filter = fe.filter(); failures.add(fail(filter, "WHERE clause allowed only for aggregate functions, none found in [{}]", fe.sourceText())); } + Expression f = fe.filter(); // check the filter has to be a boolean term, similar as checkFilterConditionType + if (f.dataType() != NULL && f.dataType() != BOOLEAN) { + failures.add(fail(f, "Condition expression needs to be boolean, found [{}]", f.dataType())); + } // but that the filter doesn't use grouping or aggregate functions fe.filter().forEachDown(c -> { if (c instanceof AggregateFunction af) { @@ -443,11 +444,6 @@ private static void checkEvalFields(LogicalPlan p, Set failures) { failures.add(fail(af, "aggregate function [{}] not allowed outside STATS command", af.sourceText())); } }); - // check no MATCH expressions are used - field.forEachDown( - MatchQueryPredicate.class, - mqp -> { failures.add(fail(mqp, "EVAL does not support MATCH expressions")); } - ); }); } } @@ -511,6 +507,7 @@ public static Failure validateBinaryComparison(BinaryComparison bc) { } allowed.add(DataType.IP); allowed.add(DataType.DATETIME); + allowed.add(DataType.DATE_NANOS); allowed.add(DataType.VERSION); allowed.add(DataType.GEO_POINT); allowed.add(DataType.GEO_SHAPE); @@ -633,86 +630,104 @@ private static void checkRemoteEnrich(LogicalPlan plan, Set failures) { } /** - * Currently any filter condition using MATCH needs to be pushed down to the Lucene query. - * Conditions that use a combination of MATCH and ES|QL functions (e.g. `title MATCH "anna" OR DATE_EXTRACT("year", date) > 2010) - * cannot be pushed down to Lucene. - * Another condition is for MATCH to use index fields that have been mapped as text or keyword. - * We are using canPushToSource at the Verifier level because we want to detect any condition that cannot be pushed down - * early in the execution, rather than fail at the compute engine level. - * In the future we will be able to handle MATCH at the compute and we will no longer need these checks. + * Checks whether a condition contains a disjunction with the specified typeToken. Adds to failure if it does. + * + * @param condition condition to check for disjunctions + * @param typeNameProvider provider for the type name to add in the failure message + * @param failures failures collection to add to */ - private static void checkFilterMatchConditions(LogicalPlan plan, Set failures) { - if (plan instanceof Filter f) { - Expression condition = f.condition(); - - Holder hasMatch = new Holder<>(false); - condition.forEachDown(MatchQueryPredicate.class, mqp -> { - hasMatch.set(true); - var field = mqp.field(); - if (field instanceof FieldAttribute == false) { - failures.add(fail(mqp, "MATCH requires a mapped index field, found [" + field.sourceText() + "]")); - } - - if (DataType.isString(field.dataType()) == false) { - var message = LoggerMessageFormat.format( - null, - "MATCH requires a text or keyword field, but [{}] has type [{}]", - field.sourceText(), - field.dataType().esType() - ); - failures.add(fail(mqp, message)); - } - }); + private static void checkNotPresentInDisjunctions( + Expression condition, + java.util.function.Function typeNameProvider, + Set failures + ) { + condition.forEachUp(Or.class, or -> { + checkNotPresentInDisjunctions(or.left(), or, typeNameProvider, failures); + checkNotPresentInDisjunctions(or.right(), or, typeNameProvider, failures); + }); + } - if (canPushToSource(condition, x -> false)) { - return; - } - if (hasMatch.get()) { - failures.add(fail(condition, "Invalid condition using MATCH")); - } - } + /** + * Checks whether a condition contains a disjunction with the specified typeToken. Adds to failure if it does. + * + * @param parentExpression parent expression to add to the failure message + * @param or disjunction that is being checked + * @param failures failures collection to add to + */ + private static void checkNotPresentInDisjunctions( + Expression parentExpression, + Or or, + java.util.function.Function elementName, + Set failures + ) { + parentExpression.forEachDown(FullTextFunction.class, ftp -> { + failures.add( + fail(or, "Invalid condition [{}]. {} can't be used as part of an or condition", or.sourceText(), elementName.apply(ftp)) + ); + }); } + /** + * Checks full text query functions for invalid usage. + * + * @param plan root plan to check + * @param failures failures found + */ private static void checkFullTextQueryFunctions(LogicalPlan plan, Set failures) { if (plan instanceof Filter f) { Expression condition = f.condition(); - checkCommandsBeforeQueryStringFunction(plan, condition, failures); - checkCommandsBeforeMatchFunction(plan, condition, failures); - checkFullTextFunctionsConditions(condition, failures); + checkCommandsBeforeExpression( + plan, + condition, + QueryString.class, + lp -> (lp instanceof Filter || lp instanceof OrderBy || lp instanceof EsRelation), + qsf -> "[" + qsf.functionName() + "] " + qsf.functionType(), + failures + ); + checkCommandsBeforeExpression( + plan, + condition, + Match.class, + lp -> (lp instanceof Limit == false), + m -> "[" + m.functionName() + "] " + m.functionType(), + failures + ); + checkNotPresentInDisjunctions(condition, ftf -> "[" + ftf.functionName() + "] " + ftf.functionType(), failures); checkFullTextFunctionsParents(condition, failures); } else { plan.forEachExpression(FullTextFunction.class, ftf -> { - failures.add(fail(ftf, "[{}] function is only supported in WHERE commands", ftf.functionName())); + failures.add(fail(ftf, "[{}] {} is only supported in WHERE commands", ftf.functionName(), ftf.functionType())); }); } } - private static void checkCommandsBeforeQueryStringFunction(LogicalPlan plan, Expression condition, Set failures) { - condition.forEachDown(QueryString.class, qsf -> { - plan.forEachDown(LogicalPlan.class, lp -> { - if ((lp instanceof Filter || lp instanceof OrderBy || lp instanceof EsRelation) == false) { - failures.add( - fail( - plan, - "[{}] function cannot be used after {}", - qsf.functionName(), - lp.sourceText().split(" ")[0].toUpperCase(Locale.ROOT) - ) - ); - } - }); - }); - } - - private static void checkCommandsBeforeMatchFunction(LogicalPlan plan, Expression condition, Set failures) { - condition.forEachDown(Match.class, qsf -> { + /** + * Checks all commands that exist before a specific type satisfy conditions. + * + * @param plan plan that contains the condition + * @param condition condition to check + * @param typeToken type to check for. When a type is found in the condition, all plans before the root plan are checked + * @param commandCheck check to perform on each command that precedes the plan that contains the typeToken + * @param typeErrorMsgProvider provider for the type name in the error message + * @param failures failures to add errors to + * @param class of the type to look for + */ + private static void checkCommandsBeforeExpression( + LogicalPlan plan, + Expression condition, + Class typeToken, + Predicate commandCheck, + java.util.function.Function typeErrorMsgProvider, + Set failures + ) { + condition.forEachDown(typeToken, exp -> { plan.forEachDown(LogicalPlan.class, lp -> { - if (lp instanceof Limit) { + if (commandCheck.test(lp) == false) { failures.add( fail( plan, - "[{}] function cannot be used after {}", - qsf.functionName(), + "{} cannot be used after {}", + typeErrorMsgProvider.apply(exp), lp.sourceText().split(" ")[0].toUpperCase(Locale.ROOT) ) ); @@ -721,26 +736,11 @@ private static void checkCommandsBeforeMatchFunction(LogicalPlan plan, Expressio }); } - private static void checkFullTextFunctionsConditions(Expression condition, Set failures) { - condition.forEachUp(Or.class, or -> { - checkFullTextFunctionInDisjunction(failures, or, or.left()); - checkFullTextFunctionInDisjunction(failures, or, or.right()); - }); - } - - private static void checkFullTextFunctionInDisjunction(Set failures, Or or, Expression left) { - left.forEachDown(FullTextFunction.class, ftf -> { - failures.add( - fail( - or, - "Invalid condition [{}]. Function {} can't be used as part of an or condition", - or.sourceText(), - ftf.functionName() - ) - ); - }); - } - + /** + * Checks parents of a full text function to ensure they are allowed + * @param condition condition that contains the full text function + * @param failures failures to add errors to + */ private static void checkFullTextFunctionsParents(Expression condition, Set failures) { forEachFullTextFunctionParent(condition, (ftf, parent) -> { if ((parent instanceof FullTextFunction == false) @@ -749,9 +749,10 @@ private static void checkFullTextFunctionsParents(Expression condition, Set QueryList.ipTermQueryList(field, searchExecutionContext, (BytesRefBlock) block); + case DATETIME -> QueryList.dateTermQueryList(field, searchExecutionContext, (LongBlock) block); + default -> QueryList.rawTermQueryList(field, searchExecutionContext, block); + }; + } + /** * Perform the actual lookup. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 9638571fab993..f24a16bb63697 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.lookup.QueryList; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; @@ -78,8 +79,8 @@ protected TransportRequest transportRequest(EnrichLookupService.Request request, protected QueryList queryList(TransportRequest request, SearchExecutionContext context, Block inputBlock, DataType inputDataType) { MappedFieldType fieldType = context.getFieldType(request.matchField); return switch (request.matchType) { - case "match", "range" -> QueryList.termQueryList(fieldType, context, inputBlock, inputDataType); - case "geo_match" -> QueryList.geoShapeQuery(fieldType, context, inputBlock, inputDataType); + case "match", "range" -> termQueryList(fieldType, context, inputBlock, inputDataType); + case "geo_match" -> QueryList.geoShapeQueryList(fieldType, context, inputBlock); default -> throw new EsqlIllegalArgumentException("illegal match type " + request.matchType); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index b0ee77327690a..ef204e88c234f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.lookup.QueryList; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; @@ -74,7 +75,7 @@ protected TransportRequest transportRequest(LookupFromIndexService.Request reque @Override protected QueryList queryList(TransportRequest request, SearchExecutionContext context, Block inputBlock, DataType inputDataType) { MappedFieldType fieldType = context.getFieldType(request.matchField); - return QueryList.termQueryList(fieldType, context, inputBlock, inputDataType); + return termQueryList(fieldType, context, inputBlock, inputDataType); } public static class Request extends AbstractLookupService.Request { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 66151275fc2e8..7a6ff79d79a65 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -117,6 +117,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StDistance; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.BitLength; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.EndsWith; import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; @@ -135,6 +136,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; +import org.elasticsearch.xpack.esql.expression.function.scalar.util.Delay; import org.elasticsearch.xpack.esql.session.Configuration; import java.lang.reflect.Constructor; @@ -305,6 +307,7 @@ private FunctionDefinition[][] functions() { def(Tau.class, Tau::new, "tau") }, // string new FunctionDefinition[] { + def(BitLength.class, BitLength::new, "bit_length"), def(Concat.class, Concat::new, "concat"), def(EndsWith.class, EndsWith::new, "ends_with"), def(LTrim.class, LTrim::new, "ltrim"), @@ -397,6 +400,9 @@ private FunctionDefinition[][] functions() { private static FunctionDefinition[][] snapshotFunctions() { return new FunctionDefinition[][] { new FunctionDefinition[] { + // The delay() function is for debug/snapshot environments only and should never be enabled in a non-snapshot build. + // This is an experimental function and can be removed without notice. + def(Delay.class, Delay::new, "delay"), def(Categorize.class, Categorize::new, "categorize"), def(Rate.class, Rate::withUnresolvedTimestamp, "rate") } }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 2f97de4c64469..1a3667de992cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -105,4 +105,13 @@ protected TypeResolutions.ParamOrdinal queryParamOrdinal() { public Nullability nullable() { return Nullability.FALSE; } + + /** + * Used to differentiate error messages between functions and operators + * + * @return function type for error messages + */ + public String functionType() { + return "function"; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index b4e0f3c743216..522a5574c0053 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -26,6 +26,7 @@ import java.io.IOException; import java.util.List; +import java.util.Locale; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -37,10 +38,12 @@ */ public class Match extends FullTextFunction implements Validatable { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Match", Match::new); + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Match", Match::readFrom); private final Expression field; + private transient Boolean isOperator; + @FunctionInfo( returnType = "boolean", preview = true, @@ -60,14 +63,17 @@ public Match( this.field = field; } - private Match(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + private static Match readFrom(StreamInput in) throws IOException { + Source source = Source.readFrom((PlanStreamInput) in); + Expression field = in.readNamedWriteable(Expression.class); + Expression query = in.readNamedWriteable(Expression.class); + return new Match(source, field, query); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - out.writeNamedWriteable(field); + out.writeNamedWriteable(field()); out.writeNamedWriteable(query()); } @@ -87,8 +93,9 @@ public void validate(Failures failures) { failures.add( Failure.fail( field, - "[{}] cannot operate on [{}], which is not a field from an index mapping", + "[{}] {} cannot operate on [{}], which is not a field from an index mapping", functionName(), + functionType(), field.sourceText() ) ); @@ -97,7 +104,6 @@ public void validate(Failures failures) { @Override public Expression replaceChildren(List newChildren) { - // Query is the first child, field is the second child return new Match(source(), newChildren.get(0), newChildren.get(1)); } @@ -113,4 +119,21 @@ protected TypeResolutions.ParamOrdinal queryParamOrdinal() { public Expression field() { return field; } + + @Override + public String functionType() { + return isOperator() ? "operator" : super.functionType(); + } + + @Override + public String functionName() { + return isOperator() ? ":" : super.functionName(); + } + + private boolean isOperator() { + if (isOperator == null) { + isOperator = source().text().toUpperCase(Locale.ROOT).matches("^" + super.functionName() + "\\s*\\(.*\\)") == false; + } + return isOperator; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index 3357b2abf0e0f..9e40b85fd6590 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -252,7 +252,7 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { assert DataType.isTemporalAmount(buckets.dataType()) : "Unexpected span data type [" + buckets.dataType() + "]"; preparedRounding = DateTrunc.createRounding(buckets.fold(), DEFAULT_TZ); } - return DateTrunc.evaluator(source(), toEvaluator.apply(field), preparedRounding); + return DateTrunc.evaluator(field.dataType(), source(), toEvaluator.apply(field), preparedRounding); } if (field.dataType().isNumeric()) { double roundTo; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java index e4e1fbb6e5aac..65985f234ac92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.BinarySpatialFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.BitLength; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.EndsWith; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; @@ -74,6 +75,7 @@ public static List getNamedWriteables() { List entries = new ArrayList<>(); entries.add(And.ENTRY); entries.add(Atan2.ENTRY); + entries.add(BitLength.ENTRY); entries.add(Bucket.ENTRY); entries.add(Case.ENTRY); entries.add(Categorize.ENTRY); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java index 53b51f16d4183..e9ca69055658d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java @@ -62,6 +62,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Space; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.util.Delay; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -123,6 +124,7 @@ public static List getNamedWriteables() { entries.add(ToVersion.ENTRY); entries.add(Trim.ENTRY); entries.add(WildcardLike.ENTRY); + entries.add(Delay.ENTRY); entries.addAll(AbstractMultivalueFunction.getNamedWriteables()); return entries; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 35a705f418906..6e38d72500840 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; @@ -31,12 +32,14 @@ import java.time.ZoneId; import java.time.ZoneOffset; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isDate; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; public class DateTrunc extends EsqlScalarFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -45,6 +48,15 @@ public class DateTrunc extends EsqlScalarFunction { DateTrunc::new ); + @FunctionalInterface + public interface DateTruncFactoryProvider { + ExpressionEvaluator.Factory apply(Source source, ExpressionEvaluator.Factory lhs, Rounding.Prepared rounding); + } + + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DATETIME, DateTruncDatetimeEvaluator.Factory::new), + Map.entry(DATE_NANOS, DateTruncDateNanosEvaluator.Factory::new) + ); private final Expression interval; private final Expression timestampField; protected static final ZoneId DEFAULT_TZ = ZoneOffset.UTC; @@ -108,20 +120,28 @@ protected TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } + String operationName = sourceText(); return isType(interval, DataType::isTemporalAmount, sourceText(), FIRST, "dateperiod", "timeduration").and( - isDate(timestampField, sourceText(), SECOND) + isType(timestampField, evaluatorMap::containsKey, operationName, SECOND, "date_nanos or datetime") ); } public DataType dataType() { - return DataType.DATETIME; + // Default to DATETIME in the case of nulls. This mimics the behavior before DATE_NANOS support + return timestampField.dataType() == DataType.NULL ? DATETIME : timestampField.dataType(); } - @Evaluator - static long process(long fieldVal, @Fixed Rounding.Prepared rounding) { + @Evaluator(extraName = "Datetime") + static long processDatetime(long fieldVal, @Fixed Rounding.Prepared rounding) { return rounding.round(fieldVal); } + @Evaluator(extraName = "DateNanos") + static long processDateNanos(long fieldVal, @Fixed Rounding.Prepared rounding) { + // Currently, ES|QL doesn't support rounding to sub-millisecond values, so it's safe to cast before rounding. + return DateUtils.toNanoSeconds(rounding.round(DateUtils.toMilliSeconds(fieldVal))); + } + @Override public Expression replaceChildren(List newChildren) { return new DateTrunc(source(), newChildren.get(0), newChildren.get(1)); @@ -214,14 +234,15 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { "Function [" + sourceText() + "] has invalid interval [" + interval.sourceText() + "]. " + e.getMessage() ); } - return evaluator(source(), fieldEvaluator, DateTrunc.createRounding(foldedInterval, DEFAULT_TZ)); + return evaluator(dataType(), source(), fieldEvaluator, DateTrunc.createRounding(foldedInterval, DEFAULT_TZ)); } public static ExpressionEvaluator.Factory evaluator( + DataType forType, Source source, ExpressionEvaluator.Factory fieldEvaluator, Rounding.Prepared rounding ) { - return new DateTruncEvaluator.Factory(source, fieldEvaluator, rounding); + return evaluatorMap.get(forType).apply(source, fieldEvaluator, rounding); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLength.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLength.java new file mode 100644 index 0000000000000..5deb6fa7feba6 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLength.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; + +public class BitLength extends UnaryScalarFunction { + + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "BitLength", + BitLength::new + ); + + @FunctionInfo( + returnType = "integer", + description = "Returns the bit length of a string.", + examples = @Example(file = "docs", tag = "bitLength") + ) + public BitLength( + Source source, + @Param( + name = "string", + type = { "keyword", "text" }, + description = "String expression. If `null`, the function returns `null`." + ) Expression field + ) { + super(source, field); + } + + private BitLength(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(field()); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public DataType dataType() { + return DataType.INTEGER; + } + + @Override + protected TypeResolution resolveType() { + return childrenResolved() == false ? new TypeResolution("Unresolved children") : isString(field(), sourceText(), DEFAULT); + } + + @Evaluator(warnExceptions = { ArithmeticException.class }) + static int process(BytesRef val) { + return Math.multiplyExact(val.length, Byte.SIZE); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new BitLength(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, BitLength::new, field()); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + return new BitLengthEvaluator.Factory(source(), toEvaluator.apply(field())); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java new file mode 100644 index 0000000000000..1d03f09c86409 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/Delay.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.util; + +import org.elasticsearch.Build; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; + +import java.io.IOException; +import java.time.Duration; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; + +/** + * Slowdown function - for debug purposes only. + * Syntax: WAIT(ms) - will sleep for ms milliseconds. + */ +public class Delay extends UnaryScalarFunction { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Delay", Delay::new); + + public Delay(Source source, @Param(name = "ms", type = { "time_duration" }, description = "For how long") Expression ms) { + super(source, ms); + } + + private Delay(StreamInput in) throws IOException { + super(in); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Delay(source(), newChildren.getFirst()); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public DataType dataType() { + return DataType.BOOLEAN; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + return isType(field(), t -> t == DataType.TIME_DURATION, sourceText(), FIRST, "time_duration"); + } + + @Override + public Nullability nullable() { + return Nullability.FALSE; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Delay::new, field()); + } + + @Override + public boolean foldable() { + return false; + } + + @Override + public Object fold() { + return null; + } + + private long msValue() { + if (field().foldable() == false) { + throw new IllegalArgumentException("function [" + sourceText() + "] has invalid argument [" + field().sourceText() + "]"); + } + var ms = field().fold(); + if (ms instanceof Duration duration) { + return duration.toMillis(); + } + return ((Number) ms).longValue(); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator) { + return new DelayEvaluator.Factory(source(), msValue()); + } + + @Evaluator + static boolean process(@Fixed long ms) { + // Only activate in snapshot builds + if (Build.current().isSnapshot()) { + try { + Thread.sleep(ms); + } catch (InterruptedException e) { + return true; + } + } else { + throw new IllegalArgumentException("Delay function is only available in snapshot builds"); + } + return true; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/RemoveStatsOverride.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/RemoveStatsOverride.java index 0cabe4376999f..97659f2a77813 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/RemoveStatsOverride.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/RemoveStatsOverride.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.logical; -import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -16,6 +16,8 @@ import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.common.logging.HeaderWarning.addWarning; + /** * Removes {@link Aggregate} overrides in grouping, aggregates and across them inside. * The overrides appear when the same alias is used multiple times in aggregations @@ -42,13 +44,24 @@ protected LogicalPlan rule(Aggregate aggregate) { private static List removeDuplicateNames(List list) { var newList = new ArrayList<>(list); - var nameSet = Sets.newHashSetWithExpectedSize(list.size()); + var expressionsByName = Maps.newMapWithExpectedSize(list.size()); // remove duplicates for (int i = list.size() - 1; i >= 0; i--) { var element = list.get(i); var name = Expressions.name(element); - if (nameSet.add(name) == false) { + var previousExpression = expressionsByName.putIfAbsent(name, element); + if (previousExpression != null) { + var source = element.source().source(); + var previousSource = previousExpression.source().source(); + addWarning( + "Line {}:{}: Field '{}' shadowed by field at line {}:{}", + source.getLineNumber(), + source.getColumnNumber(), + name, + previousSource.getLineNumber(), + previousSource.getColumnNumber() + ); newList.remove(i); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index 2209dffe5af06..626ef5e83bd65 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; @@ -247,8 +246,6 @@ public static boolean canPushToSource(Expression exp, Predicate return isAttributePushable(cidrMatch.ipField(), cidrMatch, hasIdenticalDelegate) && Expressions.foldable(cidrMatch.matches()); } else if (exp instanceof SpatialRelatesFunction spatial) { return canPushSpatialFunctionToSource(spatial); - } else if (exp instanceof MatchQueryPredicate mqp) { - return mqp.field() instanceof FieldAttribute && DataType.isString(mqp.field().dataType()); } else if (exp instanceof StringQueryPredicate) { return true; } else if (exp instanceof QueryString) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java index 855faf9df5ed2..2ae496b55ac00 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java @@ -184,8 +184,7 @@ && canPushDownOrders(topNExec.order(), hasIdenticalDelegate)) { break; } } - // TODO: We can push down partial sorts where `pushableSorts.size() < orders.size()`, but that should involve benchmarks - if (pushableSorts.size() > 0 && pushableSorts.size() == orders.size()) { + if (pushableSorts.isEmpty() == false) { return new PushableCompoundExec(evalExec, queryExec, pushableSorts); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 2566da379af73..8f9c5956dddd5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -23,6 +23,7 @@ null null null null +':' '|' null null @@ -62,7 +63,6 @@ null '*' '/' '%' -'match' null null ']' @@ -103,7 +103,6 @@ null null null null -':' null null null @@ -146,6 +145,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +COLON PIPE QUOTED_STRING INTEGER_LITERAL @@ -185,7 +185,6 @@ MINUS ASTERISK SLASH PERCENT -MATCH NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -226,7 +225,6 @@ INFO SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS -COLON SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT @@ -268,6 +266,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +COLON PIPE DIGIT LETTER @@ -317,7 +316,7 @@ MINUS ASTERISK SLASH PERCENT -MATCH +EXPRESSION_COLON NESTED_WHERE NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET @@ -406,7 +405,7 @@ SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS SETTING_CLOSING_BRACKET -COLON +SETTING_COLON SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT @@ -466,4 +465,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 120, 1479, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 578, 8, 19, 11, 19, 12, 19, 579, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 588, 8, 20, 10, 20, 12, 20, 591, 9, 20, 1, 20, 3, 20, 594, 8, 20, 1, 20, 3, 20, 597, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 606, 8, 21, 10, 21, 12, 21, 609, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 617, 8, 22, 11, 22, 12, 22, 618, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 3, 28, 638, 8, 28, 1, 28, 4, 28, 641, 8, 28, 11, 28, 12, 28, 642, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 652, 8, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 659, 8, 33, 1, 34, 1, 34, 1, 34, 5, 34, 664, 8, 34, 10, 34, 12, 34, 667, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 675, 8, 34, 10, 34, 12, 34, 678, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 685, 8, 34, 1, 34, 3, 34, 688, 8, 34, 3, 34, 690, 8, 34, 1, 35, 4, 35, 693, 8, 35, 11, 35, 12, 35, 694, 1, 36, 4, 36, 698, 8, 36, 11, 36, 12, 36, 699, 1, 36, 1, 36, 5, 36, 704, 8, 36, 10, 36, 12, 36, 707, 9, 36, 1, 36, 1, 36, 4, 36, 711, 8, 36, 11, 36, 12, 36, 712, 1, 36, 4, 36, 716, 8, 36, 11, 36, 12, 36, 717, 1, 36, 1, 36, 5, 36, 722, 8, 36, 10, 36, 12, 36, 725, 9, 36, 3, 36, 727, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 4, 36, 733, 8, 36, 11, 36, 12, 36, 734, 1, 36, 1, 36, 3, 36, 739, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 3, 74, 871, 8, 74, 1, 74, 5, 74, 874, 8, 74, 10, 74, 12, 74, 877, 9, 74, 1, 74, 1, 74, 4, 74, 881, 8, 74, 11, 74, 12, 74, 882, 3, 74, 885, 8, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 899, 8, 77, 10, 77, 12, 77, 902, 9, 77, 1, 77, 1, 77, 3, 77, 906, 8, 77, 1, 77, 4, 77, 909, 8, 77, 11, 77, 12, 77, 910, 3, 77, 913, 8, 77, 1, 78, 1, 78, 4, 78, 917, 8, 78, 11, 78, 12, 78, 918, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 3, 95, 996, 8, 95, 1, 96, 4, 96, 999, 8, 96, 11, 96, 12, 96, 1000, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 3, 107, 1050, 8, 107, 1, 108, 1, 108, 3, 108, 1054, 8, 108, 1, 108, 5, 108, 1057, 8, 108, 10, 108, 12, 108, 1060, 9, 108, 1, 108, 1, 108, 3, 108, 1064, 8, 108, 1, 108, 4, 108, 1067, 8, 108, 11, 108, 12, 108, 1068, 3, 108, 1071, 8, 108, 1, 109, 1, 109, 4, 109, 1075, 8, 109, 11, 109, 12, 109, 1076, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 129, 4, 129, 1162, 8, 129, 11, 129, 12, 129, 1163, 1, 129, 1, 129, 3, 129, 1168, 8, 129, 1, 129, 4, 129, 1171, 8, 129, 11, 129, 12, 129, 1172, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 4, 162, 1316, 8, 162, 11, 162, 12, 162, 1317, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 2, 607, 676, 0, 198, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 25, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 64, 165, 65, 167, 66, 169, 67, 171, 0, 173, 68, 175, 69, 177, 70, 179, 71, 181, 0, 183, 0, 185, 72, 187, 73, 189, 74, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 75, 205, 0, 207, 76, 209, 0, 211, 0, 213, 77, 215, 78, 217, 79, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 80, 235, 81, 237, 82, 239, 83, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 84, 255, 0, 257, 85, 259, 86, 261, 87, 263, 0, 265, 0, 267, 88, 269, 89, 271, 0, 273, 90, 275, 0, 277, 91, 279, 92, 281, 93, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 94, 303, 95, 305, 96, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 97, 321, 98, 323, 99, 325, 0, 327, 100, 329, 101, 331, 102, 333, 103, 335, 0, 337, 104, 339, 105, 341, 106, 343, 107, 345, 108, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 109, 363, 110, 365, 111, 367, 0, 369, 0, 371, 0, 373, 0, 375, 112, 377, 113, 379, 114, 381, 0, 383, 0, 385, 0, 387, 115, 389, 116, 391, 117, 393, 0, 395, 0, 397, 118, 399, 119, 401, 120, 403, 0, 405, 0, 407, 0, 409, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1507, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 6, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 9, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 10, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 12, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 13, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 15, 411, 1, 0, 0, 0, 17, 421, 1, 0, 0, 0, 19, 428, 1, 0, 0, 0, 21, 437, 1, 0, 0, 0, 23, 444, 1, 0, 0, 0, 25, 454, 1, 0, 0, 0, 27, 461, 1, 0, 0, 0, 29, 468, 1, 0, 0, 0, 31, 475, 1, 0, 0, 0, 33, 483, 1, 0, 0, 0, 35, 495, 1, 0, 0, 0, 37, 504, 1, 0, 0, 0, 39, 510, 1, 0, 0, 0, 41, 517, 1, 0, 0, 0, 43, 524, 1, 0, 0, 0, 45, 532, 1, 0, 0, 0, 47, 540, 1, 0, 0, 0, 49, 555, 1, 0, 0, 0, 51, 565, 1, 0, 0, 0, 53, 577, 1, 0, 0, 0, 55, 583, 1, 0, 0, 0, 57, 600, 1, 0, 0, 0, 59, 616, 1, 0, 0, 0, 61, 622, 1, 0, 0, 0, 63, 626, 1, 0, 0, 0, 65, 628, 1, 0, 0, 0, 67, 630, 1, 0, 0, 0, 69, 633, 1, 0, 0, 0, 71, 635, 1, 0, 0, 0, 73, 644, 1, 0, 0, 0, 75, 646, 1, 0, 0, 0, 77, 651, 1, 0, 0, 0, 79, 653, 1, 0, 0, 0, 81, 658, 1, 0, 0, 0, 83, 689, 1, 0, 0, 0, 85, 692, 1, 0, 0, 0, 87, 738, 1, 0, 0, 0, 89, 740, 1, 0, 0, 0, 91, 743, 1, 0, 0, 0, 93, 747, 1, 0, 0, 0, 95, 751, 1, 0, 0, 0, 97, 753, 1, 0, 0, 0, 99, 756, 1, 0, 0, 0, 101, 758, 1, 0, 0, 0, 103, 763, 1, 0, 0, 0, 105, 765, 1, 0, 0, 0, 107, 771, 1, 0, 0, 0, 109, 777, 1, 0, 0, 0, 111, 780, 1, 0, 0, 0, 113, 783, 1, 0, 0, 0, 115, 788, 1, 0, 0, 0, 117, 793, 1, 0, 0, 0, 119, 795, 1, 0, 0, 0, 121, 799, 1, 0, 0, 0, 123, 804, 1, 0, 0, 0, 125, 810, 1, 0, 0, 0, 127, 813, 1, 0, 0, 0, 129, 815, 1, 0, 0, 0, 131, 821, 1, 0, 0, 0, 133, 823, 1, 0, 0, 0, 135, 828, 1, 0, 0, 0, 137, 831, 1, 0, 0, 0, 139, 834, 1, 0, 0, 0, 141, 837, 1, 0, 0, 0, 143, 839, 1, 0, 0, 0, 145, 842, 1, 0, 0, 0, 147, 844, 1, 0, 0, 0, 149, 847, 1, 0, 0, 0, 151, 849, 1, 0, 0, 0, 153, 851, 1, 0, 0, 0, 155, 853, 1, 0, 0, 0, 157, 855, 1, 0, 0, 0, 159, 857, 1, 0, 0, 0, 161, 863, 1, 0, 0, 0, 163, 884, 1, 0, 0, 0, 165, 886, 1, 0, 0, 0, 167, 891, 1, 0, 0, 0, 169, 912, 1, 0, 0, 0, 171, 914, 1, 0, 0, 0, 173, 922, 1, 0, 0, 0, 175, 924, 1, 0, 0, 0, 177, 928, 1, 0, 0, 0, 179, 932, 1, 0, 0, 0, 181, 936, 1, 0, 0, 0, 183, 941, 1, 0, 0, 0, 185, 946, 1, 0, 0, 0, 187, 950, 1, 0, 0, 0, 189, 954, 1, 0, 0, 0, 191, 958, 1, 0, 0, 0, 193, 963, 1, 0, 0, 0, 195, 967, 1, 0, 0, 0, 197, 971, 1, 0, 0, 0, 199, 975, 1, 0, 0, 0, 201, 979, 1, 0, 0, 0, 203, 983, 1, 0, 0, 0, 205, 995, 1, 0, 0, 0, 207, 998, 1, 0, 0, 0, 209, 1002, 1, 0, 0, 0, 211, 1006, 1, 0, 0, 0, 213, 1010, 1, 0, 0, 0, 215, 1014, 1, 0, 0, 0, 217, 1018, 1, 0, 0, 0, 219, 1022, 1, 0, 0, 0, 221, 1027, 1, 0, 0, 0, 223, 1031, 1, 0, 0, 0, 225, 1035, 1, 0, 0, 0, 227, 1040, 1, 0, 0, 0, 229, 1049, 1, 0, 0, 0, 231, 1070, 1, 0, 0, 0, 233, 1074, 1, 0, 0, 0, 235, 1078, 1, 0, 0, 0, 237, 1082, 1, 0, 0, 0, 239, 1086, 1, 0, 0, 0, 241, 1090, 1, 0, 0, 0, 243, 1095, 1, 0, 0, 0, 245, 1099, 1, 0, 0, 0, 247, 1103, 1, 0, 0, 0, 249, 1107, 1, 0, 0, 0, 251, 1112, 1, 0, 0, 0, 253, 1117, 1, 0, 0, 0, 255, 1120, 1, 0, 0, 0, 257, 1124, 1, 0, 0, 0, 259, 1128, 1, 0, 0, 0, 261, 1132, 1, 0, 0, 0, 263, 1136, 1, 0, 0, 0, 265, 1141, 1, 0, 0, 0, 267, 1146, 1, 0, 0, 0, 269, 1151, 1, 0, 0, 0, 271, 1158, 1, 0, 0, 0, 273, 1167, 1, 0, 0, 0, 275, 1174, 1, 0, 0, 0, 277, 1178, 1, 0, 0, 0, 279, 1182, 1, 0, 0, 0, 281, 1186, 1, 0, 0, 0, 283, 1190, 1, 0, 0, 0, 285, 1196, 1, 0, 0, 0, 287, 1200, 1, 0, 0, 0, 289, 1204, 1, 0, 0, 0, 291, 1208, 1, 0, 0, 0, 293, 1212, 1, 0, 0, 0, 295, 1216, 1, 0, 0, 0, 297, 1220, 1, 0, 0, 0, 299, 1225, 1, 0, 0, 0, 301, 1230, 1, 0, 0, 0, 303, 1234, 1, 0, 0, 0, 305, 1238, 1, 0, 0, 0, 307, 1242, 1, 0, 0, 0, 309, 1247, 1, 0, 0, 0, 311, 1251, 1, 0, 0, 0, 313, 1256, 1, 0, 0, 0, 315, 1261, 1, 0, 0, 0, 317, 1265, 1, 0, 0, 0, 319, 1269, 1, 0, 0, 0, 321, 1273, 1, 0, 0, 0, 323, 1277, 1, 0, 0, 0, 325, 1281, 1, 0, 0, 0, 327, 1286, 1, 0, 0, 0, 329, 1291, 1, 0, 0, 0, 331, 1295, 1, 0, 0, 0, 333, 1299, 1, 0, 0, 0, 335, 1303, 1, 0, 0, 0, 337, 1308, 1, 0, 0, 0, 339, 1315, 1, 0, 0, 0, 341, 1319, 1, 0, 0, 0, 343, 1323, 1, 0, 0, 0, 345, 1327, 1, 0, 0, 0, 347, 1331, 1, 0, 0, 0, 349, 1336, 1, 0, 0, 0, 351, 1340, 1, 0, 0, 0, 353, 1344, 1, 0, 0, 0, 355, 1348, 1, 0, 0, 0, 357, 1353, 1, 0, 0, 0, 359, 1357, 1, 0, 0, 0, 361, 1361, 1, 0, 0, 0, 363, 1365, 1, 0, 0, 0, 365, 1369, 1, 0, 0, 0, 367, 1373, 1, 0, 0, 0, 369, 1379, 1, 0, 0, 0, 371, 1383, 1, 0, 0, 0, 373, 1387, 1, 0, 0, 0, 375, 1391, 1, 0, 0, 0, 377, 1395, 1, 0, 0, 0, 379, 1399, 1, 0, 0, 0, 381, 1403, 1, 0, 0, 0, 383, 1408, 1, 0, 0, 0, 385, 1414, 1, 0, 0, 0, 387, 1420, 1, 0, 0, 0, 389, 1424, 1, 0, 0, 0, 391, 1428, 1, 0, 0, 0, 393, 1432, 1, 0, 0, 0, 395, 1438, 1, 0, 0, 0, 397, 1444, 1, 0, 0, 0, 399, 1448, 1, 0, 0, 0, 401, 1452, 1, 0, 0, 0, 403, 1456, 1, 0, 0, 0, 405, 1462, 1, 0, 0, 0, 407, 1468, 1, 0, 0, 0, 409, 1474, 1, 0, 0, 0, 411, 412, 7, 0, 0, 0, 412, 413, 7, 1, 0, 0, 413, 414, 7, 2, 0, 0, 414, 415, 7, 2, 0, 0, 415, 416, 7, 3, 0, 0, 416, 417, 7, 4, 0, 0, 417, 418, 7, 5, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 6, 0, 0, 0, 420, 16, 1, 0, 0, 0, 421, 422, 7, 0, 0, 0, 422, 423, 7, 6, 0, 0, 423, 424, 7, 7, 0, 0, 424, 425, 7, 8, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 6, 1, 1, 0, 427, 18, 1, 0, 0, 0, 428, 429, 7, 3, 0, 0, 429, 430, 7, 9, 0, 0, 430, 431, 7, 6, 0, 0, 431, 432, 7, 1, 0, 0, 432, 433, 7, 4, 0, 0, 433, 434, 7, 10, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 2, 2, 0, 436, 20, 1, 0, 0, 0, 437, 438, 7, 3, 0, 0, 438, 439, 7, 11, 0, 0, 439, 440, 7, 12, 0, 0, 440, 441, 7, 13, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 3, 0, 0, 443, 22, 1, 0, 0, 0, 444, 445, 7, 3, 0, 0, 445, 446, 7, 14, 0, 0, 446, 447, 7, 8, 0, 0, 447, 448, 7, 13, 0, 0, 448, 449, 7, 12, 0, 0, 449, 450, 7, 1, 0, 0, 450, 451, 7, 9, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 4, 3, 0, 453, 24, 1, 0, 0, 0, 454, 455, 7, 15, 0, 0, 455, 456, 7, 6, 0, 0, 456, 457, 7, 7, 0, 0, 457, 458, 7, 16, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 5, 4, 0, 460, 26, 1, 0, 0, 0, 461, 462, 7, 17, 0, 0, 462, 463, 7, 6, 0, 0, 463, 464, 7, 7, 0, 0, 464, 465, 7, 18, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 6, 0, 0, 467, 28, 1, 0, 0, 0, 468, 469, 7, 18, 0, 0, 469, 470, 7, 3, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 8, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 7, 1, 0, 474, 30, 1, 0, 0, 0, 475, 476, 7, 13, 0, 0, 476, 477, 7, 1, 0, 0, 477, 478, 7, 16, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 5, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 8, 0, 0, 482, 32, 1, 0, 0, 0, 483, 484, 7, 16, 0, 0, 484, 485, 7, 11, 0, 0, 485, 486, 5, 95, 0, 0, 486, 487, 7, 3, 0, 0, 487, 488, 7, 14, 0, 0, 488, 489, 7, 8, 0, 0, 489, 490, 7, 12, 0, 0, 490, 491, 7, 9, 0, 0, 491, 492, 7, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 9, 5, 0, 494, 34, 1, 0, 0, 0, 495, 496, 7, 6, 0, 0, 496, 497, 7, 3, 0, 0, 497, 498, 7, 9, 0, 0, 498, 499, 7, 12, 0, 0, 499, 500, 7, 16, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 10, 6, 0, 503, 36, 1, 0, 0, 0, 504, 505, 7, 6, 0, 0, 505, 506, 7, 7, 0, 0, 506, 507, 7, 19, 0, 0, 507, 508, 1, 0, 0, 0, 508, 509, 6, 11, 0, 0, 509, 38, 1, 0, 0, 0, 510, 511, 7, 2, 0, 0, 511, 512, 7, 10, 0, 0, 512, 513, 7, 7, 0, 0, 513, 514, 7, 19, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 6, 12, 7, 0, 516, 40, 1, 0, 0, 0, 517, 518, 7, 2, 0, 0, 518, 519, 7, 7, 0, 0, 519, 520, 7, 6, 0, 0, 520, 521, 7, 5, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 13, 0, 0, 523, 42, 1, 0, 0, 0, 524, 525, 7, 2, 0, 0, 525, 526, 7, 5, 0, 0, 526, 527, 7, 12, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 2, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 14, 0, 0, 531, 44, 1, 0, 0, 0, 532, 533, 7, 19, 0, 0, 533, 534, 7, 10, 0, 0, 534, 535, 7, 3, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 15, 0, 0, 539, 46, 1, 0, 0, 0, 540, 541, 4, 16, 0, 0, 541, 542, 7, 1, 0, 0, 542, 543, 7, 9, 0, 0, 543, 544, 7, 13, 0, 0, 544, 545, 7, 1, 0, 0, 545, 546, 7, 9, 0, 0, 546, 547, 7, 3, 0, 0, 547, 548, 7, 2, 0, 0, 548, 549, 7, 5, 0, 0, 549, 550, 7, 12, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 16, 0, 0, 554, 48, 1, 0, 0, 0, 555, 556, 4, 17, 1, 0, 556, 557, 7, 13, 0, 0, 557, 558, 7, 7, 0, 0, 558, 559, 7, 7, 0, 0, 559, 560, 7, 18, 0, 0, 560, 561, 7, 20, 0, 0, 561, 562, 7, 8, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 17, 8, 0, 564, 50, 1, 0, 0, 0, 565, 566, 4, 18, 2, 0, 566, 567, 7, 16, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 7, 5, 0, 0, 569, 570, 7, 6, 0, 0, 570, 571, 7, 1, 0, 0, 571, 572, 7, 4, 0, 0, 572, 573, 7, 2, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 6, 18, 9, 0, 575, 52, 1, 0, 0, 0, 576, 578, 8, 21, 0, 0, 577, 576, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 6, 19, 0, 0, 582, 54, 1, 0, 0, 0, 583, 584, 5, 47, 0, 0, 584, 585, 5, 47, 0, 0, 585, 589, 1, 0, 0, 0, 586, 588, 8, 22, 0, 0, 587, 586, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 592, 594, 5, 13, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 597, 5, 10, 0, 0, 596, 595, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 20, 10, 0, 599, 56, 1, 0, 0, 0, 600, 601, 5, 47, 0, 0, 601, 602, 5, 42, 0, 0, 602, 607, 1, 0, 0, 0, 603, 606, 3, 57, 21, 0, 604, 606, 9, 0, 0, 0, 605, 603, 1, 0, 0, 0, 605, 604, 1, 0, 0, 0, 606, 609, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 608, 610, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 611, 5, 42, 0, 0, 611, 612, 5, 47, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 6, 21, 10, 0, 614, 58, 1, 0, 0, 0, 615, 617, 7, 23, 0, 0, 616, 615, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 621, 6, 22, 10, 0, 621, 60, 1, 0, 0, 0, 622, 623, 5, 124, 0, 0, 623, 624, 1, 0, 0, 0, 624, 625, 6, 23, 11, 0, 625, 62, 1, 0, 0, 0, 626, 627, 7, 24, 0, 0, 627, 64, 1, 0, 0, 0, 628, 629, 7, 25, 0, 0, 629, 66, 1, 0, 0, 0, 630, 631, 5, 92, 0, 0, 631, 632, 7, 26, 0, 0, 632, 68, 1, 0, 0, 0, 633, 634, 8, 27, 0, 0, 634, 70, 1, 0, 0, 0, 635, 637, 7, 3, 0, 0, 636, 638, 7, 28, 0, 0, 637, 636, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 640, 1, 0, 0, 0, 639, 641, 3, 63, 24, 0, 640, 639, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 72, 1, 0, 0, 0, 644, 645, 5, 64, 0, 0, 645, 74, 1, 0, 0, 0, 646, 647, 5, 96, 0, 0, 647, 76, 1, 0, 0, 0, 648, 652, 8, 29, 0, 0, 649, 650, 5, 96, 0, 0, 650, 652, 5, 96, 0, 0, 651, 648, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 78, 1, 0, 0, 0, 653, 654, 5, 95, 0, 0, 654, 80, 1, 0, 0, 0, 655, 659, 3, 65, 25, 0, 656, 659, 3, 63, 24, 0, 657, 659, 3, 79, 32, 0, 658, 655, 1, 0, 0, 0, 658, 656, 1, 0, 0, 0, 658, 657, 1, 0, 0, 0, 659, 82, 1, 0, 0, 0, 660, 665, 5, 34, 0, 0, 661, 664, 3, 67, 26, 0, 662, 664, 3, 69, 27, 0, 663, 661, 1, 0, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 668, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 690, 5, 34, 0, 0, 669, 670, 5, 34, 0, 0, 670, 671, 5, 34, 0, 0, 671, 672, 5, 34, 0, 0, 672, 676, 1, 0, 0, 0, 673, 675, 8, 22, 0, 0, 674, 673, 1, 0, 0, 0, 675, 678, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 679, 1, 0, 0, 0, 678, 676, 1, 0, 0, 0, 679, 680, 5, 34, 0, 0, 680, 681, 5, 34, 0, 0, 681, 682, 5, 34, 0, 0, 682, 684, 1, 0, 0, 0, 683, 685, 5, 34, 0, 0, 684, 683, 1, 0, 0, 0, 684, 685, 1, 0, 0, 0, 685, 687, 1, 0, 0, 0, 686, 688, 5, 34, 0, 0, 687, 686, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 688, 690, 1, 0, 0, 0, 689, 660, 1, 0, 0, 0, 689, 669, 1, 0, 0, 0, 690, 84, 1, 0, 0, 0, 691, 693, 3, 63, 24, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 86, 1, 0, 0, 0, 696, 698, 3, 63, 24, 0, 697, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 697, 1, 0, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 1, 0, 0, 0, 701, 705, 3, 103, 44, 0, 702, 704, 3, 63, 24, 0, 703, 702, 1, 0, 0, 0, 704, 707, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 739, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 708, 710, 3, 103, 44, 0, 709, 711, 3, 63, 24, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 739, 1, 0, 0, 0, 714, 716, 3, 63, 24, 0, 715, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 726, 1, 0, 0, 0, 719, 723, 3, 103, 44, 0, 720, 722, 3, 63, 24, 0, 721, 720, 1, 0, 0, 0, 722, 725, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 727, 1, 0, 0, 0, 725, 723, 1, 0, 0, 0, 726, 719, 1, 0, 0, 0, 726, 727, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 729, 3, 71, 28, 0, 729, 739, 1, 0, 0, 0, 730, 732, 3, 103, 44, 0, 731, 733, 3, 63, 24, 0, 732, 731, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 735, 1, 0, 0, 0, 735, 736, 1, 0, 0, 0, 736, 737, 3, 71, 28, 0, 737, 739, 1, 0, 0, 0, 738, 697, 1, 0, 0, 0, 738, 708, 1, 0, 0, 0, 738, 715, 1, 0, 0, 0, 738, 730, 1, 0, 0, 0, 739, 88, 1, 0, 0, 0, 740, 741, 7, 30, 0, 0, 741, 742, 7, 31, 0, 0, 742, 90, 1, 0, 0, 0, 743, 744, 7, 12, 0, 0, 744, 745, 7, 9, 0, 0, 745, 746, 7, 0, 0, 0, 746, 92, 1, 0, 0, 0, 747, 748, 7, 12, 0, 0, 748, 749, 7, 2, 0, 0, 749, 750, 7, 4, 0, 0, 750, 94, 1, 0, 0, 0, 751, 752, 5, 61, 0, 0, 752, 96, 1, 0, 0, 0, 753, 754, 5, 58, 0, 0, 754, 755, 5, 58, 0, 0, 755, 98, 1, 0, 0, 0, 756, 757, 5, 44, 0, 0, 757, 100, 1, 0, 0, 0, 758, 759, 7, 0, 0, 0, 759, 760, 7, 3, 0, 0, 760, 761, 7, 2, 0, 0, 761, 762, 7, 4, 0, 0, 762, 102, 1, 0, 0, 0, 763, 764, 5, 46, 0, 0, 764, 104, 1, 0, 0, 0, 765, 766, 7, 15, 0, 0, 766, 767, 7, 12, 0, 0, 767, 768, 7, 13, 0, 0, 768, 769, 7, 2, 0, 0, 769, 770, 7, 3, 0, 0, 770, 106, 1, 0, 0, 0, 771, 772, 7, 15, 0, 0, 772, 773, 7, 1, 0, 0, 773, 774, 7, 6, 0, 0, 774, 775, 7, 2, 0, 0, 775, 776, 7, 5, 0, 0, 776, 108, 1, 0, 0, 0, 777, 778, 7, 1, 0, 0, 778, 779, 7, 9, 0, 0, 779, 110, 1, 0, 0, 0, 780, 781, 7, 1, 0, 0, 781, 782, 7, 2, 0, 0, 782, 112, 1, 0, 0, 0, 783, 784, 7, 13, 0, 0, 784, 785, 7, 12, 0, 0, 785, 786, 7, 2, 0, 0, 786, 787, 7, 5, 0, 0, 787, 114, 1, 0, 0, 0, 788, 789, 7, 13, 0, 0, 789, 790, 7, 1, 0, 0, 790, 791, 7, 18, 0, 0, 791, 792, 7, 3, 0, 0, 792, 116, 1, 0, 0, 0, 793, 794, 5, 40, 0, 0, 794, 118, 1, 0, 0, 0, 795, 796, 7, 9, 0, 0, 796, 797, 7, 7, 0, 0, 797, 798, 7, 5, 0, 0, 798, 120, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 20, 0, 0, 801, 802, 7, 13, 0, 0, 802, 803, 7, 13, 0, 0, 803, 122, 1, 0, 0, 0, 804, 805, 7, 9, 0, 0, 805, 806, 7, 20, 0, 0, 806, 807, 7, 13, 0, 0, 807, 808, 7, 13, 0, 0, 808, 809, 7, 2, 0, 0, 809, 124, 1, 0, 0, 0, 810, 811, 7, 7, 0, 0, 811, 812, 7, 6, 0, 0, 812, 126, 1, 0, 0, 0, 813, 814, 5, 63, 0, 0, 814, 128, 1, 0, 0, 0, 815, 816, 7, 6, 0, 0, 816, 817, 7, 13, 0, 0, 817, 818, 7, 1, 0, 0, 818, 819, 7, 18, 0, 0, 819, 820, 7, 3, 0, 0, 820, 130, 1, 0, 0, 0, 821, 822, 5, 41, 0, 0, 822, 132, 1, 0, 0, 0, 823, 824, 7, 5, 0, 0, 824, 825, 7, 6, 0, 0, 825, 826, 7, 20, 0, 0, 826, 827, 7, 3, 0, 0, 827, 134, 1, 0, 0, 0, 828, 829, 5, 61, 0, 0, 829, 830, 5, 61, 0, 0, 830, 136, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 833, 5, 126, 0, 0, 833, 138, 1, 0, 0, 0, 834, 835, 5, 33, 0, 0, 835, 836, 5, 61, 0, 0, 836, 140, 1, 0, 0, 0, 837, 838, 5, 60, 0, 0, 838, 142, 1, 0, 0, 0, 839, 840, 5, 60, 0, 0, 840, 841, 5, 61, 0, 0, 841, 144, 1, 0, 0, 0, 842, 843, 5, 62, 0, 0, 843, 146, 1, 0, 0, 0, 844, 845, 5, 62, 0, 0, 845, 846, 5, 61, 0, 0, 846, 148, 1, 0, 0, 0, 847, 848, 5, 43, 0, 0, 848, 150, 1, 0, 0, 0, 849, 850, 5, 45, 0, 0, 850, 152, 1, 0, 0, 0, 851, 852, 5, 42, 0, 0, 852, 154, 1, 0, 0, 0, 853, 854, 5, 47, 0, 0, 854, 156, 1, 0, 0, 0, 855, 856, 5, 37, 0, 0, 856, 158, 1, 0, 0, 0, 857, 858, 7, 16, 0, 0, 858, 859, 7, 12, 0, 0, 859, 860, 7, 5, 0, 0, 860, 861, 7, 4, 0, 0, 861, 862, 7, 10, 0, 0, 862, 160, 1, 0, 0, 0, 863, 864, 3, 45, 15, 0, 864, 865, 1, 0, 0, 0, 865, 866, 6, 73, 12, 0, 866, 162, 1, 0, 0, 0, 867, 870, 3, 127, 56, 0, 868, 871, 3, 65, 25, 0, 869, 871, 3, 79, 32, 0, 870, 868, 1, 0, 0, 0, 870, 869, 1, 0, 0, 0, 871, 875, 1, 0, 0, 0, 872, 874, 3, 81, 33, 0, 873, 872, 1, 0, 0, 0, 874, 877, 1, 0, 0, 0, 875, 873, 1, 0, 0, 0, 875, 876, 1, 0, 0, 0, 876, 885, 1, 0, 0, 0, 877, 875, 1, 0, 0, 0, 878, 880, 3, 127, 56, 0, 879, 881, 3, 63, 24, 0, 880, 879, 1, 0, 0, 0, 881, 882, 1, 0, 0, 0, 882, 880, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 885, 1, 0, 0, 0, 884, 867, 1, 0, 0, 0, 884, 878, 1, 0, 0, 0, 885, 164, 1, 0, 0, 0, 886, 887, 5, 91, 0, 0, 887, 888, 1, 0, 0, 0, 888, 889, 6, 75, 0, 0, 889, 890, 6, 75, 0, 0, 890, 166, 1, 0, 0, 0, 891, 892, 5, 93, 0, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 76, 11, 0, 894, 895, 6, 76, 11, 0, 895, 168, 1, 0, 0, 0, 896, 900, 3, 65, 25, 0, 897, 899, 3, 81, 33, 0, 898, 897, 1, 0, 0, 0, 899, 902, 1, 0, 0, 0, 900, 898, 1, 0, 0, 0, 900, 901, 1, 0, 0, 0, 901, 913, 1, 0, 0, 0, 902, 900, 1, 0, 0, 0, 903, 906, 3, 79, 32, 0, 904, 906, 3, 73, 29, 0, 905, 903, 1, 0, 0, 0, 905, 904, 1, 0, 0, 0, 906, 908, 1, 0, 0, 0, 907, 909, 3, 81, 33, 0, 908, 907, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 908, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 913, 1, 0, 0, 0, 912, 896, 1, 0, 0, 0, 912, 905, 1, 0, 0, 0, 913, 170, 1, 0, 0, 0, 914, 916, 3, 75, 30, 0, 915, 917, 3, 77, 31, 0, 916, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 920, 1, 0, 0, 0, 920, 921, 3, 75, 30, 0, 921, 172, 1, 0, 0, 0, 922, 923, 3, 171, 78, 0, 923, 174, 1, 0, 0, 0, 924, 925, 3, 55, 20, 0, 925, 926, 1, 0, 0, 0, 926, 927, 6, 80, 10, 0, 927, 176, 1, 0, 0, 0, 928, 929, 3, 57, 21, 0, 929, 930, 1, 0, 0, 0, 930, 931, 6, 81, 10, 0, 931, 178, 1, 0, 0, 0, 932, 933, 3, 59, 22, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 82, 10, 0, 935, 180, 1, 0, 0, 0, 936, 937, 3, 165, 75, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 83, 13, 0, 939, 940, 6, 83, 14, 0, 940, 182, 1, 0, 0, 0, 941, 942, 3, 61, 23, 0, 942, 943, 1, 0, 0, 0, 943, 944, 6, 84, 15, 0, 944, 945, 6, 84, 11, 0, 945, 184, 1, 0, 0, 0, 946, 947, 3, 59, 22, 0, 947, 948, 1, 0, 0, 0, 948, 949, 6, 85, 10, 0, 949, 186, 1, 0, 0, 0, 950, 951, 3, 55, 20, 0, 951, 952, 1, 0, 0, 0, 952, 953, 6, 86, 10, 0, 953, 188, 1, 0, 0, 0, 954, 955, 3, 57, 21, 0, 955, 956, 1, 0, 0, 0, 956, 957, 6, 87, 10, 0, 957, 190, 1, 0, 0, 0, 958, 959, 3, 61, 23, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 88, 15, 0, 961, 962, 6, 88, 11, 0, 962, 192, 1, 0, 0, 0, 963, 964, 3, 165, 75, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 89, 13, 0, 966, 194, 1, 0, 0, 0, 967, 968, 3, 167, 76, 0, 968, 969, 1, 0, 0, 0, 969, 970, 6, 90, 16, 0, 970, 196, 1, 0, 0, 0, 971, 972, 3, 337, 161, 0, 972, 973, 1, 0, 0, 0, 973, 974, 6, 91, 17, 0, 974, 198, 1, 0, 0, 0, 975, 976, 3, 99, 42, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 92, 18, 0, 978, 200, 1, 0, 0, 0, 979, 980, 3, 95, 40, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 93, 19, 0, 982, 202, 1, 0, 0, 0, 983, 984, 7, 16, 0, 0, 984, 985, 7, 3, 0, 0, 985, 986, 7, 5, 0, 0, 986, 987, 7, 12, 0, 0, 987, 988, 7, 0, 0, 0, 988, 989, 7, 12, 0, 0, 989, 990, 7, 5, 0, 0, 990, 991, 7, 12, 0, 0, 991, 204, 1, 0, 0, 0, 992, 996, 8, 32, 0, 0, 993, 994, 5, 47, 0, 0, 994, 996, 8, 33, 0, 0, 995, 992, 1, 0, 0, 0, 995, 993, 1, 0, 0, 0, 996, 206, 1, 0, 0, 0, 997, 999, 3, 205, 95, 0, 998, 997, 1, 0, 0, 0, 999, 1000, 1, 0, 0, 0, 1000, 998, 1, 0, 0, 0, 1000, 1001, 1, 0, 0, 0, 1001, 208, 1, 0, 0, 0, 1002, 1003, 3, 207, 96, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 97, 20, 0, 1005, 210, 1, 0, 0, 0, 1006, 1007, 3, 83, 34, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 98, 21, 0, 1009, 212, 1, 0, 0, 0, 1010, 1011, 3, 55, 20, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 99, 10, 0, 1013, 214, 1, 0, 0, 0, 1014, 1015, 3, 57, 21, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 100, 10, 0, 1017, 216, 1, 0, 0, 0, 1018, 1019, 3, 59, 22, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 101, 10, 0, 1021, 218, 1, 0, 0, 0, 1022, 1023, 3, 61, 23, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 102, 15, 0, 1025, 1026, 6, 102, 11, 0, 1026, 220, 1, 0, 0, 0, 1027, 1028, 3, 103, 44, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1030, 6, 103, 22, 0, 1030, 222, 1, 0, 0, 0, 1031, 1032, 3, 99, 42, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1034, 6, 104, 18, 0, 1034, 224, 1, 0, 0, 0, 1035, 1036, 4, 105, 3, 0, 1036, 1037, 3, 127, 56, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 105, 23, 0, 1039, 226, 1, 0, 0, 0, 1040, 1041, 4, 106, 4, 0, 1041, 1042, 3, 163, 74, 0, 1042, 1043, 1, 0, 0, 0, 1043, 1044, 6, 106, 24, 0, 1044, 228, 1, 0, 0, 0, 1045, 1050, 3, 65, 25, 0, 1046, 1050, 3, 63, 24, 0, 1047, 1050, 3, 79, 32, 0, 1048, 1050, 3, 153, 69, 0, 1049, 1045, 1, 0, 0, 0, 1049, 1046, 1, 0, 0, 0, 1049, 1047, 1, 0, 0, 0, 1049, 1048, 1, 0, 0, 0, 1050, 230, 1, 0, 0, 0, 1051, 1054, 3, 65, 25, 0, 1052, 1054, 3, 153, 69, 0, 1053, 1051, 1, 0, 0, 0, 1053, 1052, 1, 0, 0, 0, 1054, 1058, 1, 0, 0, 0, 1055, 1057, 3, 229, 107, 0, 1056, 1055, 1, 0, 0, 0, 1057, 1060, 1, 0, 0, 0, 1058, 1056, 1, 0, 0, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1071, 1, 0, 0, 0, 1060, 1058, 1, 0, 0, 0, 1061, 1064, 3, 79, 32, 0, 1062, 1064, 3, 73, 29, 0, 1063, 1061, 1, 0, 0, 0, 1063, 1062, 1, 0, 0, 0, 1064, 1066, 1, 0, 0, 0, 1065, 1067, 3, 229, 107, 0, 1066, 1065, 1, 0, 0, 0, 1067, 1068, 1, 0, 0, 0, 1068, 1066, 1, 0, 0, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1071, 1, 0, 0, 0, 1070, 1053, 1, 0, 0, 0, 1070, 1063, 1, 0, 0, 0, 1071, 232, 1, 0, 0, 0, 1072, 1075, 3, 231, 108, 0, 1073, 1075, 3, 171, 78, 0, 1074, 1072, 1, 0, 0, 0, 1074, 1073, 1, 0, 0, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1074, 1, 0, 0, 0, 1076, 1077, 1, 0, 0, 0, 1077, 234, 1, 0, 0, 0, 1078, 1079, 3, 55, 20, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 110, 10, 0, 1081, 236, 1, 0, 0, 0, 1082, 1083, 3, 57, 21, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 111, 10, 0, 1085, 238, 1, 0, 0, 0, 1086, 1087, 3, 59, 22, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1089, 6, 112, 10, 0, 1089, 240, 1, 0, 0, 0, 1090, 1091, 3, 61, 23, 0, 1091, 1092, 1, 0, 0, 0, 1092, 1093, 6, 113, 15, 0, 1093, 1094, 6, 113, 11, 0, 1094, 242, 1, 0, 0, 0, 1095, 1096, 3, 95, 40, 0, 1096, 1097, 1, 0, 0, 0, 1097, 1098, 6, 114, 19, 0, 1098, 244, 1, 0, 0, 0, 1099, 1100, 3, 99, 42, 0, 1100, 1101, 1, 0, 0, 0, 1101, 1102, 6, 115, 18, 0, 1102, 246, 1, 0, 0, 0, 1103, 1104, 3, 103, 44, 0, 1104, 1105, 1, 0, 0, 0, 1105, 1106, 6, 116, 22, 0, 1106, 248, 1, 0, 0, 0, 1107, 1108, 4, 117, 5, 0, 1108, 1109, 3, 127, 56, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 117, 23, 0, 1111, 250, 1, 0, 0, 0, 1112, 1113, 4, 118, 6, 0, 1113, 1114, 3, 163, 74, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 118, 24, 0, 1116, 252, 1, 0, 0, 0, 1117, 1118, 7, 12, 0, 0, 1118, 1119, 7, 2, 0, 0, 1119, 254, 1, 0, 0, 0, 1120, 1121, 3, 233, 109, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 120, 25, 0, 1123, 256, 1, 0, 0, 0, 1124, 1125, 3, 55, 20, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 6, 121, 10, 0, 1127, 258, 1, 0, 0, 0, 1128, 1129, 3, 57, 21, 0, 1129, 1130, 1, 0, 0, 0, 1130, 1131, 6, 122, 10, 0, 1131, 260, 1, 0, 0, 0, 1132, 1133, 3, 59, 22, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1135, 6, 123, 10, 0, 1135, 262, 1, 0, 0, 0, 1136, 1137, 3, 61, 23, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1139, 6, 124, 15, 0, 1139, 1140, 6, 124, 11, 0, 1140, 264, 1, 0, 0, 0, 1141, 1142, 3, 165, 75, 0, 1142, 1143, 1, 0, 0, 0, 1143, 1144, 6, 125, 13, 0, 1144, 1145, 6, 125, 26, 0, 1145, 266, 1, 0, 0, 0, 1146, 1147, 7, 7, 0, 0, 1147, 1148, 7, 9, 0, 0, 1148, 1149, 1, 0, 0, 0, 1149, 1150, 6, 126, 27, 0, 1150, 268, 1, 0, 0, 0, 1151, 1152, 7, 19, 0, 0, 1152, 1153, 7, 1, 0, 0, 1153, 1154, 7, 5, 0, 0, 1154, 1155, 7, 10, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 127, 27, 0, 1157, 270, 1, 0, 0, 0, 1158, 1159, 8, 34, 0, 0, 1159, 272, 1, 0, 0, 0, 1160, 1162, 3, 271, 128, 0, 1161, 1160, 1, 0, 0, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1161, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1166, 3, 337, 161, 0, 1166, 1168, 1, 0, 0, 0, 1167, 1161, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1170, 1, 0, 0, 0, 1169, 1171, 3, 271, 128, 0, 1170, 1169, 1, 0, 0, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1170, 1, 0, 0, 0, 1172, 1173, 1, 0, 0, 0, 1173, 274, 1, 0, 0, 0, 1174, 1175, 3, 273, 129, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 130, 28, 0, 1177, 276, 1, 0, 0, 0, 1178, 1179, 3, 55, 20, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 131, 10, 0, 1181, 278, 1, 0, 0, 0, 1182, 1183, 3, 57, 21, 0, 1183, 1184, 1, 0, 0, 0, 1184, 1185, 6, 132, 10, 0, 1185, 280, 1, 0, 0, 0, 1186, 1187, 3, 59, 22, 0, 1187, 1188, 1, 0, 0, 0, 1188, 1189, 6, 133, 10, 0, 1189, 282, 1, 0, 0, 0, 1190, 1191, 3, 61, 23, 0, 1191, 1192, 1, 0, 0, 0, 1192, 1193, 6, 134, 15, 0, 1193, 1194, 6, 134, 11, 0, 1194, 1195, 6, 134, 11, 0, 1195, 284, 1, 0, 0, 0, 1196, 1197, 3, 95, 40, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 135, 19, 0, 1199, 286, 1, 0, 0, 0, 1200, 1201, 3, 99, 42, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 136, 18, 0, 1203, 288, 1, 0, 0, 0, 1204, 1205, 3, 103, 44, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 137, 22, 0, 1207, 290, 1, 0, 0, 0, 1208, 1209, 3, 269, 127, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 138, 29, 0, 1211, 292, 1, 0, 0, 0, 1212, 1213, 3, 233, 109, 0, 1213, 1214, 1, 0, 0, 0, 1214, 1215, 6, 139, 25, 0, 1215, 294, 1, 0, 0, 0, 1216, 1217, 3, 173, 79, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1219, 6, 140, 30, 0, 1219, 296, 1, 0, 0, 0, 1220, 1221, 4, 141, 7, 0, 1221, 1222, 3, 127, 56, 0, 1222, 1223, 1, 0, 0, 0, 1223, 1224, 6, 141, 23, 0, 1224, 298, 1, 0, 0, 0, 1225, 1226, 4, 142, 8, 0, 1226, 1227, 3, 163, 74, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 142, 24, 0, 1229, 300, 1, 0, 0, 0, 1230, 1231, 3, 55, 20, 0, 1231, 1232, 1, 0, 0, 0, 1232, 1233, 6, 143, 10, 0, 1233, 302, 1, 0, 0, 0, 1234, 1235, 3, 57, 21, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 6, 144, 10, 0, 1237, 304, 1, 0, 0, 0, 1238, 1239, 3, 59, 22, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1241, 6, 145, 10, 0, 1241, 306, 1, 0, 0, 0, 1242, 1243, 3, 61, 23, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 6, 146, 15, 0, 1245, 1246, 6, 146, 11, 0, 1246, 308, 1, 0, 0, 0, 1247, 1248, 3, 103, 44, 0, 1248, 1249, 1, 0, 0, 0, 1249, 1250, 6, 147, 22, 0, 1250, 310, 1, 0, 0, 0, 1251, 1252, 4, 148, 9, 0, 1252, 1253, 3, 127, 56, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1255, 6, 148, 23, 0, 1255, 312, 1, 0, 0, 0, 1256, 1257, 4, 149, 10, 0, 1257, 1258, 3, 163, 74, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 149, 24, 0, 1260, 314, 1, 0, 0, 0, 1261, 1262, 3, 173, 79, 0, 1262, 1263, 1, 0, 0, 0, 1263, 1264, 6, 150, 30, 0, 1264, 316, 1, 0, 0, 0, 1265, 1266, 3, 169, 77, 0, 1266, 1267, 1, 0, 0, 0, 1267, 1268, 6, 151, 31, 0, 1268, 318, 1, 0, 0, 0, 1269, 1270, 3, 55, 20, 0, 1270, 1271, 1, 0, 0, 0, 1271, 1272, 6, 152, 10, 0, 1272, 320, 1, 0, 0, 0, 1273, 1274, 3, 57, 21, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1276, 6, 153, 10, 0, 1276, 322, 1, 0, 0, 0, 1277, 1278, 3, 59, 22, 0, 1278, 1279, 1, 0, 0, 0, 1279, 1280, 6, 154, 10, 0, 1280, 324, 1, 0, 0, 0, 1281, 1282, 3, 61, 23, 0, 1282, 1283, 1, 0, 0, 0, 1283, 1284, 6, 155, 15, 0, 1284, 1285, 6, 155, 11, 0, 1285, 326, 1, 0, 0, 0, 1286, 1287, 7, 1, 0, 0, 1287, 1288, 7, 9, 0, 0, 1288, 1289, 7, 15, 0, 0, 1289, 1290, 7, 7, 0, 0, 1290, 328, 1, 0, 0, 0, 1291, 1292, 3, 55, 20, 0, 1292, 1293, 1, 0, 0, 0, 1293, 1294, 6, 157, 10, 0, 1294, 330, 1, 0, 0, 0, 1295, 1296, 3, 57, 21, 0, 1296, 1297, 1, 0, 0, 0, 1297, 1298, 6, 158, 10, 0, 1298, 332, 1, 0, 0, 0, 1299, 1300, 3, 59, 22, 0, 1300, 1301, 1, 0, 0, 0, 1301, 1302, 6, 159, 10, 0, 1302, 334, 1, 0, 0, 0, 1303, 1304, 3, 167, 76, 0, 1304, 1305, 1, 0, 0, 0, 1305, 1306, 6, 160, 16, 0, 1306, 1307, 6, 160, 11, 0, 1307, 336, 1, 0, 0, 0, 1308, 1309, 5, 58, 0, 0, 1309, 338, 1, 0, 0, 0, 1310, 1316, 3, 73, 29, 0, 1311, 1316, 3, 63, 24, 0, 1312, 1316, 3, 103, 44, 0, 1313, 1316, 3, 65, 25, 0, 1314, 1316, 3, 79, 32, 0, 1315, 1310, 1, 0, 0, 0, 1315, 1311, 1, 0, 0, 0, 1315, 1312, 1, 0, 0, 0, 1315, 1313, 1, 0, 0, 0, 1315, 1314, 1, 0, 0, 0, 1316, 1317, 1, 0, 0, 0, 1317, 1315, 1, 0, 0, 0, 1317, 1318, 1, 0, 0, 0, 1318, 340, 1, 0, 0, 0, 1319, 1320, 3, 55, 20, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 163, 10, 0, 1322, 342, 1, 0, 0, 0, 1323, 1324, 3, 57, 21, 0, 1324, 1325, 1, 0, 0, 0, 1325, 1326, 6, 164, 10, 0, 1326, 344, 1, 0, 0, 0, 1327, 1328, 3, 59, 22, 0, 1328, 1329, 1, 0, 0, 0, 1329, 1330, 6, 165, 10, 0, 1330, 346, 1, 0, 0, 0, 1331, 1332, 3, 61, 23, 0, 1332, 1333, 1, 0, 0, 0, 1333, 1334, 6, 166, 15, 0, 1334, 1335, 6, 166, 11, 0, 1335, 348, 1, 0, 0, 0, 1336, 1337, 3, 337, 161, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 167, 17, 0, 1339, 350, 1, 0, 0, 0, 1340, 1341, 3, 99, 42, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 168, 18, 0, 1343, 352, 1, 0, 0, 0, 1344, 1345, 3, 103, 44, 0, 1345, 1346, 1, 0, 0, 0, 1346, 1347, 6, 169, 22, 0, 1347, 354, 1, 0, 0, 0, 1348, 1349, 3, 267, 126, 0, 1349, 1350, 1, 0, 0, 0, 1350, 1351, 6, 170, 32, 0, 1351, 1352, 6, 170, 33, 0, 1352, 356, 1, 0, 0, 0, 1353, 1354, 3, 207, 96, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 171, 20, 0, 1356, 358, 1, 0, 0, 0, 1357, 1358, 3, 83, 34, 0, 1358, 1359, 1, 0, 0, 0, 1359, 1360, 6, 172, 21, 0, 1360, 360, 1, 0, 0, 0, 1361, 1362, 3, 55, 20, 0, 1362, 1363, 1, 0, 0, 0, 1363, 1364, 6, 173, 10, 0, 1364, 362, 1, 0, 0, 0, 1365, 1366, 3, 57, 21, 0, 1366, 1367, 1, 0, 0, 0, 1367, 1368, 6, 174, 10, 0, 1368, 364, 1, 0, 0, 0, 1369, 1370, 3, 59, 22, 0, 1370, 1371, 1, 0, 0, 0, 1371, 1372, 6, 175, 10, 0, 1372, 366, 1, 0, 0, 0, 1373, 1374, 3, 61, 23, 0, 1374, 1375, 1, 0, 0, 0, 1375, 1376, 6, 176, 15, 0, 1376, 1377, 6, 176, 11, 0, 1377, 1378, 6, 176, 11, 0, 1378, 368, 1, 0, 0, 0, 1379, 1380, 3, 99, 42, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 6, 177, 18, 0, 1382, 370, 1, 0, 0, 0, 1383, 1384, 3, 103, 44, 0, 1384, 1385, 1, 0, 0, 0, 1385, 1386, 6, 178, 22, 0, 1386, 372, 1, 0, 0, 0, 1387, 1388, 3, 233, 109, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 179, 25, 0, 1390, 374, 1, 0, 0, 0, 1391, 1392, 3, 55, 20, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 180, 10, 0, 1394, 376, 1, 0, 0, 0, 1395, 1396, 3, 57, 21, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 181, 10, 0, 1398, 378, 1, 0, 0, 0, 1399, 1400, 3, 59, 22, 0, 1400, 1401, 1, 0, 0, 0, 1401, 1402, 6, 182, 10, 0, 1402, 380, 1, 0, 0, 0, 1403, 1404, 3, 61, 23, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1406, 6, 183, 15, 0, 1406, 1407, 6, 183, 11, 0, 1407, 382, 1, 0, 0, 0, 1408, 1409, 3, 207, 96, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1411, 6, 184, 20, 0, 1411, 1412, 6, 184, 11, 0, 1412, 1413, 6, 184, 34, 0, 1413, 384, 1, 0, 0, 0, 1414, 1415, 3, 83, 34, 0, 1415, 1416, 1, 0, 0, 0, 1416, 1417, 6, 185, 21, 0, 1417, 1418, 6, 185, 11, 0, 1418, 1419, 6, 185, 34, 0, 1419, 386, 1, 0, 0, 0, 1420, 1421, 3, 55, 20, 0, 1421, 1422, 1, 0, 0, 0, 1422, 1423, 6, 186, 10, 0, 1423, 388, 1, 0, 0, 0, 1424, 1425, 3, 57, 21, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 187, 10, 0, 1427, 390, 1, 0, 0, 0, 1428, 1429, 3, 59, 22, 0, 1429, 1430, 1, 0, 0, 0, 1430, 1431, 6, 188, 10, 0, 1431, 392, 1, 0, 0, 0, 1432, 1433, 3, 337, 161, 0, 1433, 1434, 1, 0, 0, 0, 1434, 1435, 6, 189, 17, 0, 1435, 1436, 6, 189, 11, 0, 1436, 1437, 6, 189, 9, 0, 1437, 394, 1, 0, 0, 0, 1438, 1439, 3, 99, 42, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 190, 18, 0, 1441, 1442, 6, 190, 11, 0, 1442, 1443, 6, 190, 9, 0, 1443, 396, 1, 0, 0, 0, 1444, 1445, 3, 55, 20, 0, 1445, 1446, 1, 0, 0, 0, 1446, 1447, 6, 191, 10, 0, 1447, 398, 1, 0, 0, 0, 1448, 1449, 3, 57, 21, 0, 1449, 1450, 1, 0, 0, 0, 1450, 1451, 6, 192, 10, 0, 1451, 400, 1, 0, 0, 0, 1452, 1453, 3, 59, 22, 0, 1453, 1454, 1, 0, 0, 0, 1454, 1455, 6, 193, 10, 0, 1455, 402, 1, 0, 0, 0, 1456, 1457, 3, 173, 79, 0, 1457, 1458, 1, 0, 0, 0, 1458, 1459, 6, 194, 11, 0, 1459, 1460, 6, 194, 0, 0, 1460, 1461, 6, 194, 30, 0, 1461, 404, 1, 0, 0, 0, 1462, 1463, 3, 169, 77, 0, 1463, 1464, 1, 0, 0, 0, 1464, 1465, 6, 195, 11, 0, 1465, 1466, 6, 195, 0, 0, 1466, 1467, 6, 195, 31, 0, 1467, 406, 1, 0, 0, 0, 1468, 1469, 3, 89, 37, 0, 1469, 1470, 1, 0, 0, 0, 1470, 1471, 6, 196, 11, 0, 1471, 1472, 6, 196, 0, 0, 1472, 1473, 6, 196, 35, 0, 1473, 408, 1, 0, 0, 0, 1474, 1475, 3, 61, 23, 0, 1475, 1476, 1, 0, 0, 0, 1476, 1477, 6, 197, 15, 0, 1477, 1478, 6, 197, 11, 0, 1478, 410, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 579, 589, 593, 596, 605, 607, 618, 637, 642, 651, 658, 663, 665, 676, 684, 687, 689, 694, 699, 705, 712, 717, 723, 726, 734, 738, 870, 875, 882, 884, 900, 905, 910, 912, 918, 995, 1000, 1049, 1053, 1058, 1063, 1068, 1070, 1074, 1076, 1163, 1167, 1172, 1315, 1317, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 65, 0, 5, 0, 0, 7, 24, 0, 7, 66, 0, 7, 104, 0, 7, 33, 0, 7, 31, 0, 7, 76, 0, 7, 25, 0, 7, 35, 0, 7, 47, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 28, 0] \ No newline at end of file +[4, 0, 119, 1484, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 580, 8, 19, 11, 19, 12, 19, 581, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 590, 8, 20, 10, 20, 12, 20, 593, 9, 20, 1, 20, 3, 20, 596, 8, 20, 1, 20, 3, 20, 599, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 608, 8, 21, 10, 21, 12, 21, 611, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 619, 8, 22, 11, 22, 12, 22, 620, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 3, 29, 642, 8, 29, 1, 29, 4, 29, 645, 8, 29, 11, 29, 12, 29, 646, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 3, 32, 656, 8, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 3, 34, 663, 8, 34, 1, 35, 1, 35, 1, 35, 5, 35, 668, 8, 35, 10, 35, 12, 35, 671, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 679, 8, 35, 10, 35, 12, 35, 682, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 689, 8, 35, 1, 35, 3, 35, 692, 8, 35, 3, 35, 694, 8, 35, 1, 36, 4, 36, 697, 8, 36, 11, 36, 12, 36, 698, 1, 37, 4, 37, 702, 8, 37, 11, 37, 12, 37, 703, 1, 37, 1, 37, 5, 37, 708, 8, 37, 10, 37, 12, 37, 711, 9, 37, 1, 37, 1, 37, 4, 37, 715, 8, 37, 11, 37, 12, 37, 716, 1, 37, 4, 37, 720, 8, 37, 11, 37, 12, 37, 721, 1, 37, 1, 37, 5, 37, 726, 8, 37, 10, 37, 12, 37, 729, 9, 37, 3, 37, 731, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 737, 8, 37, 11, 37, 12, 37, 738, 1, 37, 1, 37, 3, 37, 743, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 3, 75, 874, 8, 75, 1, 75, 5, 75, 877, 8, 75, 10, 75, 12, 75, 880, 9, 75, 1, 75, 1, 75, 4, 75, 884, 8, 75, 11, 75, 12, 75, 885, 3, 75, 888, 8, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 902, 8, 78, 10, 78, 12, 78, 905, 9, 78, 1, 78, 1, 78, 3, 78, 909, 8, 78, 1, 78, 4, 78, 912, 8, 78, 11, 78, 12, 78, 913, 3, 78, 916, 8, 78, 1, 79, 1, 79, 4, 79, 920, 8, 79, 11, 79, 12, 79, 921, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 3, 96, 999, 8, 96, 1, 97, 4, 97, 1002, 8, 97, 11, 97, 12, 97, 1003, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 3, 108, 1053, 8, 108, 1, 109, 1, 109, 3, 109, 1057, 8, 109, 1, 109, 5, 109, 1060, 8, 109, 10, 109, 12, 109, 1063, 9, 109, 1, 109, 1, 109, 3, 109, 1067, 8, 109, 1, 109, 4, 109, 1070, 8, 109, 11, 109, 12, 109, 1071, 3, 109, 1074, 8, 109, 1, 110, 1, 110, 4, 110, 1078, 8, 110, 11, 110, 12, 110, 1079, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 130, 4, 130, 1165, 8, 130, 11, 130, 12, 130, 1166, 1, 130, 1, 130, 3, 130, 1171, 8, 130, 1, 130, 4, 130, 1174, 8, 130, 11, 130, 12, 130, 1175, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 4, 163, 1321, 8, 163, 11, 163, 12, 163, 1322, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 198, 2, 609, 680, 0, 199, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 25, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 0, 165, 64, 167, 65, 169, 66, 171, 67, 173, 0, 175, 68, 177, 69, 179, 70, 181, 71, 183, 0, 185, 0, 187, 72, 189, 73, 191, 74, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 0, 205, 75, 207, 0, 209, 76, 211, 0, 213, 0, 215, 77, 217, 78, 219, 79, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 0, 235, 80, 237, 81, 239, 82, 241, 83, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 0, 255, 84, 257, 0, 259, 85, 261, 86, 263, 87, 265, 0, 267, 0, 269, 88, 271, 89, 273, 0, 275, 90, 277, 0, 279, 91, 281, 92, 283, 93, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 0, 303, 94, 305, 95, 307, 96, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 0, 321, 97, 323, 98, 325, 99, 327, 0, 329, 100, 331, 101, 333, 102, 335, 103, 337, 0, 339, 0, 341, 104, 343, 105, 345, 106, 347, 107, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 0, 363, 108, 365, 109, 367, 110, 369, 0, 371, 0, 373, 0, 375, 0, 377, 111, 379, 112, 381, 113, 383, 0, 385, 0, 387, 0, 389, 114, 391, 115, 393, 116, 395, 0, 397, 0, 399, 117, 401, 118, 403, 119, 405, 0, 407, 0, 409, 0, 411, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1512, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 1, 63, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 171, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 1, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 2, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 205, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 3, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 229, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 4, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 5, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 271, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 6, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 7, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 8, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 9, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 10, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 11, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 12, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 13, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 14, 411, 1, 0, 0, 0, 15, 413, 1, 0, 0, 0, 17, 423, 1, 0, 0, 0, 19, 430, 1, 0, 0, 0, 21, 439, 1, 0, 0, 0, 23, 446, 1, 0, 0, 0, 25, 456, 1, 0, 0, 0, 27, 463, 1, 0, 0, 0, 29, 470, 1, 0, 0, 0, 31, 477, 1, 0, 0, 0, 33, 485, 1, 0, 0, 0, 35, 497, 1, 0, 0, 0, 37, 506, 1, 0, 0, 0, 39, 512, 1, 0, 0, 0, 41, 519, 1, 0, 0, 0, 43, 526, 1, 0, 0, 0, 45, 534, 1, 0, 0, 0, 47, 542, 1, 0, 0, 0, 49, 557, 1, 0, 0, 0, 51, 567, 1, 0, 0, 0, 53, 579, 1, 0, 0, 0, 55, 585, 1, 0, 0, 0, 57, 602, 1, 0, 0, 0, 59, 618, 1, 0, 0, 0, 61, 624, 1, 0, 0, 0, 63, 626, 1, 0, 0, 0, 65, 630, 1, 0, 0, 0, 67, 632, 1, 0, 0, 0, 69, 634, 1, 0, 0, 0, 71, 637, 1, 0, 0, 0, 73, 639, 1, 0, 0, 0, 75, 648, 1, 0, 0, 0, 77, 650, 1, 0, 0, 0, 79, 655, 1, 0, 0, 0, 81, 657, 1, 0, 0, 0, 83, 662, 1, 0, 0, 0, 85, 693, 1, 0, 0, 0, 87, 696, 1, 0, 0, 0, 89, 742, 1, 0, 0, 0, 91, 744, 1, 0, 0, 0, 93, 747, 1, 0, 0, 0, 95, 751, 1, 0, 0, 0, 97, 755, 1, 0, 0, 0, 99, 757, 1, 0, 0, 0, 101, 760, 1, 0, 0, 0, 103, 762, 1, 0, 0, 0, 105, 767, 1, 0, 0, 0, 107, 769, 1, 0, 0, 0, 109, 775, 1, 0, 0, 0, 111, 781, 1, 0, 0, 0, 113, 784, 1, 0, 0, 0, 115, 787, 1, 0, 0, 0, 117, 792, 1, 0, 0, 0, 119, 797, 1, 0, 0, 0, 121, 799, 1, 0, 0, 0, 123, 803, 1, 0, 0, 0, 125, 808, 1, 0, 0, 0, 127, 814, 1, 0, 0, 0, 129, 817, 1, 0, 0, 0, 131, 819, 1, 0, 0, 0, 133, 825, 1, 0, 0, 0, 135, 827, 1, 0, 0, 0, 137, 832, 1, 0, 0, 0, 139, 835, 1, 0, 0, 0, 141, 838, 1, 0, 0, 0, 143, 841, 1, 0, 0, 0, 145, 843, 1, 0, 0, 0, 147, 846, 1, 0, 0, 0, 149, 848, 1, 0, 0, 0, 151, 851, 1, 0, 0, 0, 153, 853, 1, 0, 0, 0, 155, 855, 1, 0, 0, 0, 157, 857, 1, 0, 0, 0, 159, 859, 1, 0, 0, 0, 161, 861, 1, 0, 0, 0, 163, 866, 1, 0, 0, 0, 165, 887, 1, 0, 0, 0, 167, 889, 1, 0, 0, 0, 169, 894, 1, 0, 0, 0, 171, 915, 1, 0, 0, 0, 173, 917, 1, 0, 0, 0, 175, 925, 1, 0, 0, 0, 177, 927, 1, 0, 0, 0, 179, 931, 1, 0, 0, 0, 181, 935, 1, 0, 0, 0, 183, 939, 1, 0, 0, 0, 185, 944, 1, 0, 0, 0, 187, 949, 1, 0, 0, 0, 189, 953, 1, 0, 0, 0, 191, 957, 1, 0, 0, 0, 193, 961, 1, 0, 0, 0, 195, 966, 1, 0, 0, 0, 197, 970, 1, 0, 0, 0, 199, 974, 1, 0, 0, 0, 201, 978, 1, 0, 0, 0, 203, 982, 1, 0, 0, 0, 205, 986, 1, 0, 0, 0, 207, 998, 1, 0, 0, 0, 209, 1001, 1, 0, 0, 0, 211, 1005, 1, 0, 0, 0, 213, 1009, 1, 0, 0, 0, 215, 1013, 1, 0, 0, 0, 217, 1017, 1, 0, 0, 0, 219, 1021, 1, 0, 0, 0, 221, 1025, 1, 0, 0, 0, 223, 1030, 1, 0, 0, 0, 225, 1034, 1, 0, 0, 0, 227, 1038, 1, 0, 0, 0, 229, 1043, 1, 0, 0, 0, 231, 1052, 1, 0, 0, 0, 233, 1073, 1, 0, 0, 0, 235, 1077, 1, 0, 0, 0, 237, 1081, 1, 0, 0, 0, 239, 1085, 1, 0, 0, 0, 241, 1089, 1, 0, 0, 0, 243, 1093, 1, 0, 0, 0, 245, 1098, 1, 0, 0, 0, 247, 1102, 1, 0, 0, 0, 249, 1106, 1, 0, 0, 0, 251, 1110, 1, 0, 0, 0, 253, 1115, 1, 0, 0, 0, 255, 1120, 1, 0, 0, 0, 257, 1123, 1, 0, 0, 0, 259, 1127, 1, 0, 0, 0, 261, 1131, 1, 0, 0, 0, 263, 1135, 1, 0, 0, 0, 265, 1139, 1, 0, 0, 0, 267, 1144, 1, 0, 0, 0, 269, 1149, 1, 0, 0, 0, 271, 1154, 1, 0, 0, 0, 273, 1161, 1, 0, 0, 0, 275, 1170, 1, 0, 0, 0, 277, 1177, 1, 0, 0, 0, 279, 1181, 1, 0, 0, 0, 281, 1185, 1, 0, 0, 0, 283, 1189, 1, 0, 0, 0, 285, 1193, 1, 0, 0, 0, 287, 1199, 1, 0, 0, 0, 289, 1203, 1, 0, 0, 0, 291, 1207, 1, 0, 0, 0, 293, 1211, 1, 0, 0, 0, 295, 1215, 1, 0, 0, 0, 297, 1219, 1, 0, 0, 0, 299, 1223, 1, 0, 0, 0, 301, 1228, 1, 0, 0, 0, 303, 1233, 1, 0, 0, 0, 305, 1237, 1, 0, 0, 0, 307, 1241, 1, 0, 0, 0, 309, 1245, 1, 0, 0, 0, 311, 1250, 1, 0, 0, 0, 313, 1254, 1, 0, 0, 0, 315, 1259, 1, 0, 0, 0, 317, 1264, 1, 0, 0, 0, 319, 1268, 1, 0, 0, 0, 321, 1272, 1, 0, 0, 0, 323, 1276, 1, 0, 0, 0, 325, 1280, 1, 0, 0, 0, 327, 1284, 1, 0, 0, 0, 329, 1289, 1, 0, 0, 0, 331, 1294, 1, 0, 0, 0, 333, 1298, 1, 0, 0, 0, 335, 1302, 1, 0, 0, 0, 337, 1306, 1, 0, 0, 0, 339, 1311, 1, 0, 0, 0, 341, 1320, 1, 0, 0, 0, 343, 1324, 1, 0, 0, 0, 345, 1328, 1, 0, 0, 0, 347, 1332, 1, 0, 0, 0, 349, 1336, 1, 0, 0, 0, 351, 1341, 1, 0, 0, 0, 353, 1345, 1, 0, 0, 0, 355, 1349, 1, 0, 0, 0, 357, 1353, 1, 0, 0, 0, 359, 1358, 1, 0, 0, 0, 361, 1362, 1, 0, 0, 0, 363, 1366, 1, 0, 0, 0, 365, 1370, 1, 0, 0, 0, 367, 1374, 1, 0, 0, 0, 369, 1378, 1, 0, 0, 0, 371, 1384, 1, 0, 0, 0, 373, 1388, 1, 0, 0, 0, 375, 1392, 1, 0, 0, 0, 377, 1396, 1, 0, 0, 0, 379, 1400, 1, 0, 0, 0, 381, 1404, 1, 0, 0, 0, 383, 1408, 1, 0, 0, 0, 385, 1413, 1, 0, 0, 0, 387, 1419, 1, 0, 0, 0, 389, 1425, 1, 0, 0, 0, 391, 1429, 1, 0, 0, 0, 393, 1433, 1, 0, 0, 0, 395, 1437, 1, 0, 0, 0, 397, 1443, 1, 0, 0, 0, 399, 1449, 1, 0, 0, 0, 401, 1453, 1, 0, 0, 0, 403, 1457, 1, 0, 0, 0, 405, 1461, 1, 0, 0, 0, 407, 1467, 1, 0, 0, 0, 409, 1473, 1, 0, 0, 0, 411, 1479, 1, 0, 0, 0, 413, 414, 7, 0, 0, 0, 414, 415, 7, 1, 0, 0, 415, 416, 7, 2, 0, 0, 416, 417, 7, 2, 0, 0, 417, 418, 7, 3, 0, 0, 418, 419, 7, 4, 0, 0, 419, 420, 7, 5, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 0, 0, 0, 422, 16, 1, 0, 0, 0, 423, 424, 7, 0, 0, 0, 424, 425, 7, 6, 0, 0, 425, 426, 7, 7, 0, 0, 426, 427, 7, 8, 0, 0, 427, 428, 1, 0, 0, 0, 428, 429, 6, 1, 1, 0, 429, 18, 1, 0, 0, 0, 430, 431, 7, 3, 0, 0, 431, 432, 7, 9, 0, 0, 432, 433, 7, 6, 0, 0, 433, 434, 7, 1, 0, 0, 434, 435, 7, 4, 0, 0, 435, 436, 7, 10, 0, 0, 436, 437, 1, 0, 0, 0, 437, 438, 6, 2, 2, 0, 438, 20, 1, 0, 0, 0, 439, 440, 7, 3, 0, 0, 440, 441, 7, 11, 0, 0, 441, 442, 7, 12, 0, 0, 442, 443, 7, 13, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 3, 0, 0, 445, 22, 1, 0, 0, 0, 446, 447, 7, 3, 0, 0, 447, 448, 7, 14, 0, 0, 448, 449, 7, 8, 0, 0, 449, 450, 7, 13, 0, 0, 450, 451, 7, 12, 0, 0, 451, 452, 7, 1, 0, 0, 452, 453, 7, 9, 0, 0, 453, 454, 1, 0, 0, 0, 454, 455, 6, 4, 3, 0, 455, 24, 1, 0, 0, 0, 456, 457, 7, 15, 0, 0, 457, 458, 7, 6, 0, 0, 458, 459, 7, 7, 0, 0, 459, 460, 7, 16, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 6, 5, 4, 0, 462, 26, 1, 0, 0, 0, 463, 464, 7, 17, 0, 0, 464, 465, 7, 6, 0, 0, 465, 466, 7, 7, 0, 0, 466, 467, 7, 18, 0, 0, 467, 468, 1, 0, 0, 0, 468, 469, 6, 6, 0, 0, 469, 28, 1, 0, 0, 0, 470, 471, 7, 18, 0, 0, 471, 472, 7, 3, 0, 0, 472, 473, 7, 3, 0, 0, 473, 474, 7, 8, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 7, 1, 0, 476, 30, 1, 0, 0, 0, 477, 478, 7, 13, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 16, 0, 0, 480, 481, 7, 1, 0, 0, 481, 482, 7, 5, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 8, 0, 0, 484, 32, 1, 0, 0, 0, 485, 486, 7, 16, 0, 0, 486, 487, 7, 11, 0, 0, 487, 488, 5, 95, 0, 0, 488, 489, 7, 3, 0, 0, 489, 490, 7, 14, 0, 0, 490, 491, 7, 8, 0, 0, 491, 492, 7, 12, 0, 0, 492, 493, 7, 9, 0, 0, 493, 494, 7, 0, 0, 0, 494, 495, 1, 0, 0, 0, 495, 496, 6, 9, 5, 0, 496, 34, 1, 0, 0, 0, 497, 498, 7, 6, 0, 0, 498, 499, 7, 3, 0, 0, 499, 500, 7, 9, 0, 0, 500, 501, 7, 12, 0, 0, 501, 502, 7, 16, 0, 0, 502, 503, 7, 3, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 10, 6, 0, 505, 36, 1, 0, 0, 0, 506, 507, 7, 6, 0, 0, 507, 508, 7, 7, 0, 0, 508, 509, 7, 19, 0, 0, 509, 510, 1, 0, 0, 0, 510, 511, 6, 11, 0, 0, 511, 38, 1, 0, 0, 0, 512, 513, 7, 2, 0, 0, 513, 514, 7, 10, 0, 0, 514, 515, 7, 7, 0, 0, 515, 516, 7, 19, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 6, 12, 7, 0, 518, 40, 1, 0, 0, 0, 519, 520, 7, 2, 0, 0, 520, 521, 7, 7, 0, 0, 521, 522, 7, 6, 0, 0, 522, 523, 7, 5, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 13, 0, 0, 525, 42, 1, 0, 0, 0, 526, 527, 7, 2, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 12, 0, 0, 529, 530, 7, 5, 0, 0, 530, 531, 7, 2, 0, 0, 531, 532, 1, 0, 0, 0, 532, 533, 6, 14, 0, 0, 533, 44, 1, 0, 0, 0, 534, 535, 7, 19, 0, 0, 535, 536, 7, 10, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 7, 6, 0, 0, 538, 539, 7, 3, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 15, 0, 0, 541, 46, 1, 0, 0, 0, 542, 543, 4, 16, 0, 0, 543, 544, 7, 1, 0, 0, 544, 545, 7, 9, 0, 0, 545, 546, 7, 13, 0, 0, 546, 547, 7, 1, 0, 0, 547, 548, 7, 9, 0, 0, 548, 549, 7, 3, 0, 0, 549, 550, 7, 2, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 12, 0, 0, 552, 553, 7, 5, 0, 0, 553, 554, 7, 2, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 16, 0, 0, 556, 48, 1, 0, 0, 0, 557, 558, 4, 17, 1, 0, 558, 559, 7, 13, 0, 0, 559, 560, 7, 7, 0, 0, 560, 561, 7, 7, 0, 0, 561, 562, 7, 18, 0, 0, 562, 563, 7, 20, 0, 0, 563, 564, 7, 8, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 6, 17, 8, 0, 566, 50, 1, 0, 0, 0, 567, 568, 4, 18, 2, 0, 568, 569, 7, 16, 0, 0, 569, 570, 7, 3, 0, 0, 570, 571, 7, 5, 0, 0, 571, 572, 7, 6, 0, 0, 572, 573, 7, 1, 0, 0, 573, 574, 7, 4, 0, 0, 574, 575, 7, 2, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 6, 18, 9, 0, 577, 52, 1, 0, 0, 0, 578, 580, 8, 21, 0, 0, 579, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 584, 6, 19, 0, 0, 584, 54, 1, 0, 0, 0, 585, 586, 5, 47, 0, 0, 586, 587, 5, 47, 0, 0, 587, 591, 1, 0, 0, 0, 588, 590, 8, 22, 0, 0, 589, 588, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 595, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 594, 596, 5, 13, 0, 0, 595, 594, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 598, 1, 0, 0, 0, 597, 599, 5, 10, 0, 0, 598, 597, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 599, 600, 1, 0, 0, 0, 600, 601, 6, 20, 10, 0, 601, 56, 1, 0, 0, 0, 602, 603, 5, 47, 0, 0, 603, 604, 5, 42, 0, 0, 604, 609, 1, 0, 0, 0, 605, 608, 3, 57, 21, 0, 606, 608, 9, 0, 0, 0, 607, 605, 1, 0, 0, 0, 607, 606, 1, 0, 0, 0, 608, 611, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 612, 1, 0, 0, 0, 611, 609, 1, 0, 0, 0, 612, 613, 5, 42, 0, 0, 613, 614, 5, 47, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 6, 21, 10, 0, 616, 58, 1, 0, 0, 0, 617, 619, 7, 23, 0, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 6, 22, 10, 0, 623, 60, 1, 0, 0, 0, 624, 625, 5, 58, 0, 0, 625, 62, 1, 0, 0, 0, 626, 627, 5, 124, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 6, 24, 11, 0, 629, 64, 1, 0, 0, 0, 630, 631, 7, 24, 0, 0, 631, 66, 1, 0, 0, 0, 632, 633, 7, 25, 0, 0, 633, 68, 1, 0, 0, 0, 634, 635, 5, 92, 0, 0, 635, 636, 7, 26, 0, 0, 636, 70, 1, 0, 0, 0, 637, 638, 8, 27, 0, 0, 638, 72, 1, 0, 0, 0, 639, 641, 7, 3, 0, 0, 640, 642, 7, 28, 0, 0, 641, 640, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 644, 1, 0, 0, 0, 643, 645, 3, 65, 25, 0, 644, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 644, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 74, 1, 0, 0, 0, 648, 649, 5, 64, 0, 0, 649, 76, 1, 0, 0, 0, 650, 651, 5, 96, 0, 0, 651, 78, 1, 0, 0, 0, 652, 656, 8, 29, 0, 0, 653, 654, 5, 96, 0, 0, 654, 656, 5, 96, 0, 0, 655, 652, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 656, 80, 1, 0, 0, 0, 657, 658, 5, 95, 0, 0, 658, 82, 1, 0, 0, 0, 659, 663, 3, 67, 26, 0, 660, 663, 3, 65, 25, 0, 661, 663, 3, 81, 33, 0, 662, 659, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 661, 1, 0, 0, 0, 663, 84, 1, 0, 0, 0, 664, 669, 5, 34, 0, 0, 665, 668, 3, 69, 27, 0, 666, 668, 3, 71, 28, 0, 667, 665, 1, 0, 0, 0, 667, 666, 1, 0, 0, 0, 668, 671, 1, 0, 0, 0, 669, 667, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 672, 694, 5, 34, 0, 0, 673, 674, 5, 34, 0, 0, 674, 675, 5, 34, 0, 0, 675, 676, 5, 34, 0, 0, 676, 680, 1, 0, 0, 0, 677, 679, 8, 22, 0, 0, 678, 677, 1, 0, 0, 0, 679, 682, 1, 0, 0, 0, 680, 681, 1, 0, 0, 0, 680, 678, 1, 0, 0, 0, 681, 683, 1, 0, 0, 0, 682, 680, 1, 0, 0, 0, 683, 684, 5, 34, 0, 0, 684, 685, 5, 34, 0, 0, 685, 686, 5, 34, 0, 0, 686, 688, 1, 0, 0, 0, 687, 689, 5, 34, 0, 0, 688, 687, 1, 0, 0, 0, 688, 689, 1, 0, 0, 0, 689, 691, 1, 0, 0, 0, 690, 692, 5, 34, 0, 0, 691, 690, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 694, 1, 0, 0, 0, 693, 664, 1, 0, 0, 0, 693, 673, 1, 0, 0, 0, 694, 86, 1, 0, 0, 0, 695, 697, 3, 65, 25, 0, 696, 695, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 88, 1, 0, 0, 0, 700, 702, 3, 65, 25, 0, 701, 700, 1, 0, 0, 0, 702, 703, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 709, 3, 105, 45, 0, 706, 708, 3, 65, 25, 0, 707, 706, 1, 0, 0, 0, 708, 711, 1, 0, 0, 0, 709, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 743, 1, 0, 0, 0, 711, 709, 1, 0, 0, 0, 712, 714, 3, 105, 45, 0, 713, 715, 3, 65, 25, 0, 714, 713, 1, 0, 0, 0, 715, 716, 1, 0, 0, 0, 716, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 743, 1, 0, 0, 0, 718, 720, 3, 65, 25, 0, 719, 718, 1, 0, 0, 0, 720, 721, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 730, 1, 0, 0, 0, 723, 727, 3, 105, 45, 0, 724, 726, 3, 65, 25, 0, 725, 724, 1, 0, 0, 0, 726, 729, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 731, 1, 0, 0, 0, 729, 727, 1, 0, 0, 0, 730, 723, 1, 0, 0, 0, 730, 731, 1, 0, 0, 0, 731, 732, 1, 0, 0, 0, 732, 733, 3, 73, 29, 0, 733, 743, 1, 0, 0, 0, 734, 736, 3, 105, 45, 0, 735, 737, 3, 65, 25, 0, 736, 735, 1, 0, 0, 0, 737, 738, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 738, 739, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740, 741, 3, 73, 29, 0, 741, 743, 1, 0, 0, 0, 742, 701, 1, 0, 0, 0, 742, 712, 1, 0, 0, 0, 742, 719, 1, 0, 0, 0, 742, 734, 1, 0, 0, 0, 743, 90, 1, 0, 0, 0, 744, 745, 7, 30, 0, 0, 745, 746, 7, 31, 0, 0, 746, 92, 1, 0, 0, 0, 747, 748, 7, 12, 0, 0, 748, 749, 7, 9, 0, 0, 749, 750, 7, 0, 0, 0, 750, 94, 1, 0, 0, 0, 751, 752, 7, 12, 0, 0, 752, 753, 7, 2, 0, 0, 753, 754, 7, 4, 0, 0, 754, 96, 1, 0, 0, 0, 755, 756, 5, 61, 0, 0, 756, 98, 1, 0, 0, 0, 757, 758, 5, 58, 0, 0, 758, 759, 5, 58, 0, 0, 759, 100, 1, 0, 0, 0, 760, 761, 5, 44, 0, 0, 761, 102, 1, 0, 0, 0, 762, 763, 7, 0, 0, 0, 763, 764, 7, 3, 0, 0, 764, 765, 7, 2, 0, 0, 765, 766, 7, 4, 0, 0, 766, 104, 1, 0, 0, 0, 767, 768, 5, 46, 0, 0, 768, 106, 1, 0, 0, 0, 769, 770, 7, 15, 0, 0, 770, 771, 7, 12, 0, 0, 771, 772, 7, 13, 0, 0, 772, 773, 7, 2, 0, 0, 773, 774, 7, 3, 0, 0, 774, 108, 1, 0, 0, 0, 775, 776, 7, 15, 0, 0, 776, 777, 7, 1, 0, 0, 777, 778, 7, 6, 0, 0, 778, 779, 7, 2, 0, 0, 779, 780, 7, 5, 0, 0, 780, 110, 1, 0, 0, 0, 781, 782, 7, 1, 0, 0, 782, 783, 7, 9, 0, 0, 783, 112, 1, 0, 0, 0, 784, 785, 7, 1, 0, 0, 785, 786, 7, 2, 0, 0, 786, 114, 1, 0, 0, 0, 787, 788, 7, 13, 0, 0, 788, 789, 7, 12, 0, 0, 789, 790, 7, 2, 0, 0, 790, 791, 7, 5, 0, 0, 791, 116, 1, 0, 0, 0, 792, 793, 7, 13, 0, 0, 793, 794, 7, 1, 0, 0, 794, 795, 7, 18, 0, 0, 795, 796, 7, 3, 0, 0, 796, 118, 1, 0, 0, 0, 797, 798, 5, 40, 0, 0, 798, 120, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 7, 0, 0, 801, 802, 7, 5, 0, 0, 802, 122, 1, 0, 0, 0, 803, 804, 7, 9, 0, 0, 804, 805, 7, 20, 0, 0, 805, 806, 7, 13, 0, 0, 806, 807, 7, 13, 0, 0, 807, 124, 1, 0, 0, 0, 808, 809, 7, 9, 0, 0, 809, 810, 7, 20, 0, 0, 810, 811, 7, 13, 0, 0, 811, 812, 7, 13, 0, 0, 812, 813, 7, 2, 0, 0, 813, 126, 1, 0, 0, 0, 814, 815, 7, 7, 0, 0, 815, 816, 7, 6, 0, 0, 816, 128, 1, 0, 0, 0, 817, 818, 5, 63, 0, 0, 818, 130, 1, 0, 0, 0, 819, 820, 7, 6, 0, 0, 820, 821, 7, 13, 0, 0, 821, 822, 7, 1, 0, 0, 822, 823, 7, 18, 0, 0, 823, 824, 7, 3, 0, 0, 824, 132, 1, 0, 0, 0, 825, 826, 5, 41, 0, 0, 826, 134, 1, 0, 0, 0, 827, 828, 7, 5, 0, 0, 828, 829, 7, 6, 0, 0, 829, 830, 7, 20, 0, 0, 830, 831, 7, 3, 0, 0, 831, 136, 1, 0, 0, 0, 832, 833, 5, 61, 0, 0, 833, 834, 5, 61, 0, 0, 834, 138, 1, 0, 0, 0, 835, 836, 5, 61, 0, 0, 836, 837, 5, 126, 0, 0, 837, 140, 1, 0, 0, 0, 838, 839, 5, 33, 0, 0, 839, 840, 5, 61, 0, 0, 840, 142, 1, 0, 0, 0, 841, 842, 5, 60, 0, 0, 842, 144, 1, 0, 0, 0, 843, 844, 5, 60, 0, 0, 844, 845, 5, 61, 0, 0, 845, 146, 1, 0, 0, 0, 846, 847, 5, 62, 0, 0, 847, 148, 1, 0, 0, 0, 848, 849, 5, 62, 0, 0, 849, 850, 5, 61, 0, 0, 850, 150, 1, 0, 0, 0, 851, 852, 5, 43, 0, 0, 852, 152, 1, 0, 0, 0, 853, 854, 5, 45, 0, 0, 854, 154, 1, 0, 0, 0, 855, 856, 5, 42, 0, 0, 856, 156, 1, 0, 0, 0, 857, 858, 5, 47, 0, 0, 858, 158, 1, 0, 0, 0, 859, 860, 5, 37, 0, 0, 860, 160, 1, 0, 0, 0, 861, 862, 4, 73, 3, 0, 862, 863, 3, 61, 23, 0, 863, 864, 1, 0, 0, 0, 864, 865, 6, 73, 12, 0, 865, 162, 1, 0, 0, 0, 866, 867, 3, 45, 15, 0, 867, 868, 1, 0, 0, 0, 868, 869, 6, 74, 13, 0, 869, 164, 1, 0, 0, 0, 870, 873, 3, 129, 57, 0, 871, 874, 3, 67, 26, 0, 872, 874, 3, 81, 33, 0, 873, 871, 1, 0, 0, 0, 873, 872, 1, 0, 0, 0, 874, 878, 1, 0, 0, 0, 875, 877, 3, 83, 34, 0, 876, 875, 1, 0, 0, 0, 877, 880, 1, 0, 0, 0, 878, 876, 1, 0, 0, 0, 878, 879, 1, 0, 0, 0, 879, 888, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 881, 883, 3, 129, 57, 0, 882, 884, 3, 65, 25, 0, 883, 882, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 888, 1, 0, 0, 0, 887, 870, 1, 0, 0, 0, 887, 881, 1, 0, 0, 0, 888, 166, 1, 0, 0, 0, 889, 890, 5, 91, 0, 0, 890, 891, 1, 0, 0, 0, 891, 892, 6, 76, 0, 0, 892, 893, 6, 76, 0, 0, 893, 168, 1, 0, 0, 0, 894, 895, 5, 93, 0, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 77, 11, 0, 897, 898, 6, 77, 11, 0, 898, 170, 1, 0, 0, 0, 899, 903, 3, 67, 26, 0, 900, 902, 3, 83, 34, 0, 901, 900, 1, 0, 0, 0, 902, 905, 1, 0, 0, 0, 903, 901, 1, 0, 0, 0, 903, 904, 1, 0, 0, 0, 904, 916, 1, 0, 0, 0, 905, 903, 1, 0, 0, 0, 906, 909, 3, 81, 33, 0, 907, 909, 3, 75, 30, 0, 908, 906, 1, 0, 0, 0, 908, 907, 1, 0, 0, 0, 909, 911, 1, 0, 0, 0, 910, 912, 3, 83, 34, 0, 911, 910, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 911, 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 916, 1, 0, 0, 0, 915, 899, 1, 0, 0, 0, 915, 908, 1, 0, 0, 0, 916, 172, 1, 0, 0, 0, 917, 919, 3, 77, 31, 0, 918, 920, 3, 79, 32, 0, 919, 918, 1, 0, 0, 0, 920, 921, 1, 0, 0, 0, 921, 919, 1, 0, 0, 0, 921, 922, 1, 0, 0, 0, 922, 923, 1, 0, 0, 0, 923, 924, 3, 77, 31, 0, 924, 174, 1, 0, 0, 0, 925, 926, 3, 173, 79, 0, 926, 176, 1, 0, 0, 0, 927, 928, 3, 55, 20, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 81, 10, 0, 930, 178, 1, 0, 0, 0, 931, 932, 3, 57, 21, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 82, 10, 0, 934, 180, 1, 0, 0, 0, 935, 936, 3, 59, 22, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 83, 10, 0, 938, 182, 1, 0, 0, 0, 939, 940, 3, 167, 76, 0, 940, 941, 1, 0, 0, 0, 941, 942, 6, 84, 14, 0, 942, 943, 6, 84, 15, 0, 943, 184, 1, 0, 0, 0, 944, 945, 3, 63, 24, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 85, 16, 0, 947, 948, 6, 85, 11, 0, 948, 186, 1, 0, 0, 0, 949, 950, 3, 59, 22, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 86, 10, 0, 952, 188, 1, 0, 0, 0, 953, 954, 3, 55, 20, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 87, 10, 0, 956, 190, 1, 0, 0, 0, 957, 958, 3, 57, 21, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 88, 10, 0, 960, 192, 1, 0, 0, 0, 961, 962, 3, 63, 24, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 89, 16, 0, 964, 965, 6, 89, 11, 0, 965, 194, 1, 0, 0, 0, 966, 967, 3, 167, 76, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 90, 14, 0, 969, 196, 1, 0, 0, 0, 970, 971, 3, 169, 77, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 91, 17, 0, 973, 198, 1, 0, 0, 0, 974, 975, 3, 61, 23, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 92, 12, 0, 977, 200, 1, 0, 0, 0, 978, 979, 3, 101, 43, 0, 979, 980, 1, 0, 0, 0, 980, 981, 6, 93, 18, 0, 981, 202, 1, 0, 0, 0, 982, 983, 3, 97, 41, 0, 983, 984, 1, 0, 0, 0, 984, 985, 6, 94, 19, 0, 985, 204, 1, 0, 0, 0, 986, 987, 7, 16, 0, 0, 987, 988, 7, 3, 0, 0, 988, 989, 7, 5, 0, 0, 989, 990, 7, 12, 0, 0, 990, 991, 7, 0, 0, 0, 991, 992, 7, 12, 0, 0, 992, 993, 7, 5, 0, 0, 993, 994, 7, 12, 0, 0, 994, 206, 1, 0, 0, 0, 995, 999, 8, 32, 0, 0, 996, 997, 5, 47, 0, 0, 997, 999, 8, 33, 0, 0, 998, 995, 1, 0, 0, 0, 998, 996, 1, 0, 0, 0, 999, 208, 1, 0, 0, 0, 1000, 1002, 3, 207, 96, 0, 1001, 1000, 1, 0, 0, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 210, 1, 0, 0, 0, 1005, 1006, 3, 209, 97, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 6, 98, 20, 0, 1008, 212, 1, 0, 0, 0, 1009, 1010, 3, 85, 35, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 99, 21, 0, 1012, 214, 1, 0, 0, 0, 1013, 1014, 3, 55, 20, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 100, 10, 0, 1016, 216, 1, 0, 0, 0, 1017, 1018, 3, 57, 21, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 6, 101, 10, 0, 1020, 218, 1, 0, 0, 0, 1021, 1022, 3, 59, 22, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 102, 10, 0, 1024, 220, 1, 0, 0, 0, 1025, 1026, 3, 63, 24, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 103, 16, 0, 1028, 1029, 6, 103, 11, 0, 1029, 222, 1, 0, 0, 0, 1030, 1031, 3, 105, 45, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 104, 22, 0, 1033, 224, 1, 0, 0, 0, 1034, 1035, 3, 101, 43, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 105, 18, 0, 1037, 226, 1, 0, 0, 0, 1038, 1039, 4, 106, 4, 0, 1039, 1040, 3, 129, 57, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 106, 23, 0, 1042, 228, 1, 0, 0, 0, 1043, 1044, 4, 107, 5, 0, 1044, 1045, 3, 165, 75, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 107, 24, 0, 1047, 230, 1, 0, 0, 0, 1048, 1053, 3, 67, 26, 0, 1049, 1053, 3, 65, 25, 0, 1050, 1053, 3, 81, 33, 0, 1051, 1053, 3, 155, 70, 0, 1052, 1048, 1, 0, 0, 0, 1052, 1049, 1, 0, 0, 0, 1052, 1050, 1, 0, 0, 0, 1052, 1051, 1, 0, 0, 0, 1053, 232, 1, 0, 0, 0, 1054, 1057, 3, 67, 26, 0, 1055, 1057, 3, 155, 70, 0, 1056, 1054, 1, 0, 0, 0, 1056, 1055, 1, 0, 0, 0, 1057, 1061, 1, 0, 0, 0, 1058, 1060, 3, 231, 108, 0, 1059, 1058, 1, 0, 0, 0, 1060, 1063, 1, 0, 0, 0, 1061, 1059, 1, 0, 0, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1074, 1, 0, 0, 0, 1063, 1061, 1, 0, 0, 0, 1064, 1067, 3, 81, 33, 0, 1065, 1067, 3, 75, 30, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1065, 1, 0, 0, 0, 1067, 1069, 1, 0, 0, 0, 1068, 1070, 3, 231, 108, 0, 1069, 1068, 1, 0, 0, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1069, 1, 0, 0, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1074, 1, 0, 0, 0, 1073, 1056, 1, 0, 0, 0, 1073, 1066, 1, 0, 0, 0, 1074, 234, 1, 0, 0, 0, 1075, 1078, 3, 233, 109, 0, 1076, 1078, 3, 173, 79, 0, 1077, 1075, 1, 0, 0, 0, 1077, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1077, 1, 0, 0, 0, 1079, 1080, 1, 0, 0, 0, 1080, 236, 1, 0, 0, 0, 1081, 1082, 3, 55, 20, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 111, 10, 0, 1084, 238, 1, 0, 0, 0, 1085, 1086, 3, 57, 21, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 112, 10, 0, 1088, 240, 1, 0, 0, 0, 1089, 1090, 3, 59, 22, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 113, 10, 0, 1092, 242, 1, 0, 0, 0, 1093, 1094, 3, 63, 24, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 114, 16, 0, 1096, 1097, 6, 114, 11, 0, 1097, 244, 1, 0, 0, 0, 1098, 1099, 3, 97, 41, 0, 1099, 1100, 1, 0, 0, 0, 1100, 1101, 6, 115, 19, 0, 1101, 246, 1, 0, 0, 0, 1102, 1103, 3, 101, 43, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 116, 18, 0, 1105, 248, 1, 0, 0, 0, 1106, 1107, 3, 105, 45, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 117, 22, 0, 1109, 250, 1, 0, 0, 0, 1110, 1111, 4, 118, 6, 0, 1111, 1112, 3, 129, 57, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 118, 23, 0, 1114, 252, 1, 0, 0, 0, 1115, 1116, 4, 119, 7, 0, 1116, 1117, 3, 165, 75, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 119, 24, 0, 1119, 254, 1, 0, 0, 0, 1120, 1121, 7, 12, 0, 0, 1121, 1122, 7, 2, 0, 0, 1122, 256, 1, 0, 0, 0, 1123, 1124, 3, 235, 110, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 121, 25, 0, 1126, 258, 1, 0, 0, 0, 1127, 1128, 3, 55, 20, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 122, 10, 0, 1130, 260, 1, 0, 0, 0, 1131, 1132, 3, 57, 21, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, 6, 123, 10, 0, 1134, 262, 1, 0, 0, 0, 1135, 1136, 3, 59, 22, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 6, 124, 10, 0, 1138, 264, 1, 0, 0, 0, 1139, 1140, 3, 63, 24, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1142, 6, 125, 16, 0, 1142, 1143, 6, 125, 11, 0, 1143, 266, 1, 0, 0, 0, 1144, 1145, 3, 167, 76, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 126, 14, 0, 1147, 1148, 6, 126, 26, 0, 1148, 268, 1, 0, 0, 0, 1149, 1150, 7, 7, 0, 0, 1150, 1151, 7, 9, 0, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 127, 27, 0, 1153, 270, 1, 0, 0, 0, 1154, 1155, 7, 19, 0, 0, 1155, 1156, 7, 1, 0, 0, 1156, 1157, 7, 5, 0, 0, 1157, 1158, 7, 10, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 128, 27, 0, 1160, 272, 1, 0, 0, 0, 1161, 1162, 8, 34, 0, 0, 1162, 274, 1, 0, 0, 0, 1163, 1165, 3, 273, 129, 0, 1164, 1163, 1, 0, 0, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1164, 1, 0, 0, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 3, 61, 23, 0, 1169, 1171, 1, 0, 0, 0, 1170, 1164, 1, 0, 0, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1173, 1, 0, 0, 0, 1172, 1174, 3, 273, 129, 0, 1173, 1172, 1, 0, 0, 0, 1174, 1175, 1, 0, 0, 0, 1175, 1173, 1, 0, 0, 0, 1175, 1176, 1, 0, 0, 0, 1176, 276, 1, 0, 0, 0, 1177, 1178, 3, 275, 130, 0, 1178, 1179, 1, 0, 0, 0, 1179, 1180, 6, 131, 28, 0, 1180, 278, 1, 0, 0, 0, 1181, 1182, 3, 55, 20, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 132, 10, 0, 1184, 280, 1, 0, 0, 0, 1185, 1186, 3, 57, 21, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 133, 10, 0, 1188, 282, 1, 0, 0, 0, 1189, 1190, 3, 59, 22, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 134, 10, 0, 1192, 284, 1, 0, 0, 0, 1193, 1194, 3, 63, 24, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 135, 16, 0, 1196, 1197, 6, 135, 11, 0, 1197, 1198, 6, 135, 11, 0, 1198, 286, 1, 0, 0, 0, 1199, 1200, 3, 97, 41, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 136, 19, 0, 1202, 288, 1, 0, 0, 0, 1203, 1204, 3, 101, 43, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 137, 18, 0, 1206, 290, 1, 0, 0, 0, 1207, 1208, 3, 105, 45, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 138, 22, 0, 1210, 292, 1, 0, 0, 0, 1211, 1212, 3, 271, 128, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 139, 29, 0, 1214, 294, 1, 0, 0, 0, 1215, 1216, 3, 235, 110, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 140, 25, 0, 1218, 296, 1, 0, 0, 0, 1219, 1220, 3, 175, 80, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 141, 30, 0, 1222, 298, 1, 0, 0, 0, 1223, 1224, 4, 142, 8, 0, 1224, 1225, 3, 129, 57, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1227, 6, 142, 23, 0, 1227, 300, 1, 0, 0, 0, 1228, 1229, 4, 143, 9, 0, 1229, 1230, 3, 165, 75, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 143, 24, 0, 1232, 302, 1, 0, 0, 0, 1233, 1234, 3, 55, 20, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 144, 10, 0, 1236, 304, 1, 0, 0, 0, 1237, 1238, 3, 57, 21, 0, 1238, 1239, 1, 0, 0, 0, 1239, 1240, 6, 145, 10, 0, 1240, 306, 1, 0, 0, 0, 1241, 1242, 3, 59, 22, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1244, 6, 146, 10, 0, 1244, 308, 1, 0, 0, 0, 1245, 1246, 3, 63, 24, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1248, 6, 147, 16, 0, 1248, 1249, 6, 147, 11, 0, 1249, 310, 1, 0, 0, 0, 1250, 1251, 3, 105, 45, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 148, 22, 0, 1253, 312, 1, 0, 0, 0, 1254, 1255, 4, 149, 10, 0, 1255, 1256, 3, 129, 57, 0, 1256, 1257, 1, 0, 0, 0, 1257, 1258, 6, 149, 23, 0, 1258, 314, 1, 0, 0, 0, 1259, 1260, 4, 150, 11, 0, 1260, 1261, 3, 165, 75, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 150, 24, 0, 1263, 316, 1, 0, 0, 0, 1264, 1265, 3, 175, 80, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 151, 30, 0, 1267, 318, 1, 0, 0, 0, 1268, 1269, 3, 171, 78, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 152, 31, 0, 1271, 320, 1, 0, 0, 0, 1272, 1273, 3, 55, 20, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 153, 10, 0, 1275, 322, 1, 0, 0, 0, 1276, 1277, 3, 57, 21, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 154, 10, 0, 1279, 324, 1, 0, 0, 0, 1280, 1281, 3, 59, 22, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 155, 10, 0, 1283, 326, 1, 0, 0, 0, 1284, 1285, 3, 63, 24, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 156, 16, 0, 1287, 1288, 6, 156, 11, 0, 1288, 328, 1, 0, 0, 0, 1289, 1290, 7, 1, 0, 0, 1290, 1291, 7, 9, 0, 0, 1291, 1292, 7, 15, 0, 0, 1292, 1293, 7, 7, 0, 0, 1293, 330, 1, 0, 0, 0, 1294, 1295, 3, 55, 20, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 158, 10, 0, 1297, 332, 1, 0, 0, 0, 1298, 1299, 3, 57, 21, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 159, 10, 0, 1301, 334, 1, 0, 0, 0, 1302, 1303, 3, 59, 22, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 160, 10, 0, 1305, 336, 1, 0, 0, 0, 1306, 1307, 3, 169, 77, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 161, 17, 0, 1309, 1310, 6, 161, 11, 0, 1310, 338, 1, 0, 0, 0, 1311, 1312, 3, 61, 23, 0, 1312, 1313, 1, 0, 0, 0, 1313, 1314, 6, 162, 12, 0, 1314, 340, 1, 0, 0, 0, 1315, 1321, 3, 75, 30, 0, 1316, 1321, 3, 65, 25, 0, 1317, 1321, 3, 105, 45, 0, 1318, 1321, 3, 67, 26, 0, 1319, 1321, 3, 81, 33, 0, 1320, 1315, 1, 0, 0, 0, 1320, 1316, 1, 0, 0, 0, 1320, 1317, 1, 0, 0, 0, 1320, 1318, 1, 0, 0, 0, 1320, 1319, 1, 0, 0, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1320, 1, 0, 0, 0, 1322, 1323, 1, 0, 0, 0, 1323, 342, 1, 0, 0, 0, 1324, 1325, 3, 55, 20, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 164, 10, 0, 1327, 344, 1, 0, 0, 0, 1328, 1329, 3, 57, 21, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 165, 10, 0, 1331, 346, 1, 0, 0, 0, 1332, 1333, 3, 59, 22, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1335, 6, 166, 10, 0, 1335, 348, 1, 0, 0, 0, 1336, 1337, 3, 63, 24, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 167, 16, 0, 1339, 1340, 6, 167, 11, 0, 1340, 350, 1, 0, 0, 0, 1341, 1342, 3, 61, 23, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 168, 12, 0, 1344, 352, 1, 0, 0, 0, 1345, 1346, 3, 101, 43, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 169, 18, 0, 1348, 354, 1, 0, 0, 0, 1349, 1350, 3, 105, 45, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 170, 22, 0, 1352, 356, 1, 0, 0, 0, 1353, 1354, 3, 269, 127, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 171, 32, 0, 1356, 1357, 6, 171, 33, 0, 1357, 358, 1, 0, 0, 0, 1358, 1359, 3, 209, 97, 0, 1359, 1360, 1, 0, 0, 0, 1360, 1361, 6, 172, 20, 0, 1361, 360, 1, 0, 0, 0, 1362, 1363, 3, 85, 35, 0, 1363, 1364, 1, 0, 0, 0, 1364, 1365, 6, 173, 21, 0, 1365, 362, 1, 0, 0, 0, 1366, 1367, 3, 55, 20, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 174, 10, 0, 1369, 364, 1, 0, 0, 0, 1370, 1371, 3, 57, 21, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 175, 10, 0, 1373, 366, 1, 0, 0, 0, 1374, 1375, 3, 59, 22, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 176, 10, 0, 1377, 368, 1, 0, 0, 0, 1378, 1379, 3, 63, 24, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 177, 16, 0, 1381, 1382, 6, 177, 11, 0, 1382, 1383, 6, 177, 11, 0, 1383, 370, 1, 0, 0, 0, 1384, 1385, 3, 101, 43, 0, 1385, 1386, 1, 0, 0, 0, 1386, 1387, 6, 178, 18, 0, 1387, 372, 1, 0, 0, 0, 1388, 1389, 3, 105, 45, 0, 1389, 1390, 1, 0, 0, 0, 1390, 1391, 6, 179, 22, 0, 1391, 374, 1, 0, 0, 0, 1392, 1393, 3, 235, 110, 0, 1393, 1394, 1, 0, 0, 0, 1394, 1395, 6, 180, 25, 0, 1395, 376, 1, 0, 0, 0, 1396, 1397, 3, 55, 20, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 181, 10, 0, 1399, 378, 1, 0, 0, 0, 1400, 1401, 3, 57, 21, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 182, 10, 0, 1403, 380, 1, 0, 0, 0, 1404, 1405, 3, 59, 22, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 183, 10, 0, 1407, 382, 1, 0, 0, 0, 1408, 1409, 3, 63, 24, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1411, 6, 184, 16, 0, 1411, 1412, 6, 184, 11, 0, 1412, 384, 1, 0, 0, 0, 1413, 1414, 3, 209, 97, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 185, 20, 0, 1416, 1417, 6, 185, 11, 0, 1417, 1418, 6, 185, 34, 0, 1418, 386, 1, 0, 0, 0, 1419, 1420, 3, 85, 35, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 186, 21, 0, 1422, 1423, 6, 186, 11, 0, 1423, 1424, 6, 186, 34, 0, 1424, 388, 1, 0, 0, 0, 1425, 1426, 3, 55, 20, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 187, 10, 0, 1428, 390, 1, 0, 0, 0, 1429, 1430, 3, 57, 21, 0, 1430, 1431, 1, 0, 0, 0, 1431, 1432, 6, 188, 10, 0, 1432, 392, 1, 0, 0, 0, 1433, 1434, 3, 59, 22, 0, 1434, 1435, 1, 0, 0, 0, 1435, 1436, 6, 189, 10, 0, 1436, 394, 1, 0, 0, 0, 1437, 1438, 3, 61, 23, 0, 1438, 1439, 1, 0, 0, 0, 1439, 1440, 6, 190, 12, 0, 1440, 1441, 6, 190, 11, 0, 1441, 1442, 6, 190, 9, 0, 1442, 396, 1, 0, 0, 0, 1443, 1444, 3, 101, 43, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 191, 18, 0, 1446, 1447, 6, 191, 11, 0, 1447, 1448, 6, 191, 9, 0, 1448, 398, 1, 0, 0, 0, 1449, 1450, 3, 55, 20, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 192, 10, 0, 1452, 400, 1, 0, 0, 0, 1453, 1454, 3, 57, 21, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 193, 10, 0, 1456, 402, 1, 0, 0, 0, 1457, 1458, 3, 59, 22, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 194, 10, 0, 1460, 404, 1, 0, 0, 0, 1461, 1462, 3, 175, 80, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 195, 11, 0, 1464, 1465, 6, 195, 0, 0, 1465, 1466, 6, 195, 30, 0, 1466, 406, 1, 0, 0, 0, 1467, 1468, 3, 171, 78, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 196, 11, 0, 1470, 1471, 6, 196, 0, 0, 1471, 1472, 6, 196, 31, 0, 1472, 408, 1, 0, 0, 0, 1473, 1474, 3, 91, 38, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 197, 11, 0, 1476, 1477, 6, 197, 0, 0, 1477, 1478, 6, 197, 35, 0, 1478, 410, 1, 0, 0, 0, 1479, 1480, 3, 63, 24, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1482, 6, 198, 16, 0, 1482, 1483, 6, 198, 11, 0, 1483, 412, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 581, 591, 595, 598, 607, 609, 620, 641, 646, 655, 662, 667, 669, 680, 688, 691, 693, 698, 703, 709, 716, 721, 727, 730, 738, 742, 873, 878, 885, 887, 903, 908, 913, 915, 921, 998, 1003, 1052, 1056, 1061, 1066, 1071, 1073, 1077, 1079, 1166, 1170, 1175, 1320, 1322, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 24, 0, 7, 16, 0, 7, 65, 0, 5, 0, 0, 7, 25, 0, 7, 66, 0, 7, 34, 0, 7, 32, 0, 7, 76, 0, 7, 26, 0, 7, 36, 0, 7, 48, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 29, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index cef4bc5378aaa..f10881fcf0692 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -8,14 +8,16 @@ * 2.0. */ -import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.RuntimeMetaData; +import org.antlr.v4.runtime.Vocabulary; +import org.antlr.v4.runtime.VocabularyImpl; +import org.antlr.v4.runtime.atn.ATN; +import org.antlr.v4.runtime.atn.ATNDeserializer; +import org.antlr.v4.runtime.atn.LexerATNSimulator; +import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue", "this-escape"}) public class EsqlBaseLexer extends LexerConfig { @@ -25,90 +27,90 @@ public class EsqlBaseLexer extends LexerConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, - LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, PIPE=24, QUOTED_STRING=25, - INTEGER_LITERAL=26, DECIMAL_LITERAL=27, BY=28, AND=29, ASC=30, ASSIGN=31, - CAST_OP=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, IN=38, IS=39, - LAST=40, LIKE=41, LP=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, RLIKE=48, - RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, - PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, - SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, - LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, - LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, - METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, - CLOSING_METRICS_WS=120; + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, + LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, COLON=24, PIPE=25, QUOTED_STRING=26, + INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, ASC=31, ASSIGN=32, + CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, IN=39, IS=40, + LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, RLIKE=49, + RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, GTE=58, + PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, + OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, + EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, + EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, + FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, + PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, + AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, + ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, + ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, + ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, + MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, + SHOW_WS=103, SETTING=104, SETTING_LINE_COMMENT=105, SETTTING_MULTILINE_COMMENT=106, + SETTING_WS=107, LOOKUP_LINE_COMMENT=108, LOOKUP_MULTILINE_COMMENT=109, + LOOKUP_WS=110, LOOKUP_FIELD_LINE_COMMENT=111, LOOKUP_FIELD_MULTILINE_COMMENT=112, + LOOKUP_FIELD_WS=113, METRICS_LINE_COMMENT=114, METRICS_MULTILINE_COMMENT=115, + METRICS_WS=116, CLOSING_METRICS_LINE_COMMENT=117, CLOSING_METRICS_MULTILINE_COMMENT=118, + CLOSING_METRICS_WS=119; public static final int - EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, - ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, + EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, METRICS_MODE=13, CLOSING_METRICS_MODE=14; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; public static String[] modeNames = { - "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", - "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", + "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", + "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "METRICS_MODE", "CLOSING_METRICS_MODE" }; private static String[] makeRuleNames() { return new String[] { - "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", - "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", - "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", "LINE_COMMENT", - "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", - "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", - "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "MATCH", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", - "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", - "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", - "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", - "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", - "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", - "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", - "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", - "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", - "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", - "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", - "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", - "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", - "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", - "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", - "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", - "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", - "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", - "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", - "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", - "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_PIPE", - "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", - "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", + "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", + "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", + "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", + "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", + "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", + "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", + "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "EXPRESSION_COLON", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", + "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", + "FROM_COLON", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", + "UNQUOTED_SOURCE", "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", + "PROJECT_PARAM", "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", + "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", + "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", + "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", + "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", + "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", + "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", + "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", + "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", + "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "SETTING_COLON", + "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", + "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", + "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", + "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", + "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", + "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_PIPE", + "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", + "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } @@ -116,46 +118,45 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "','", - "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", - "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", - "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", "'match'", null, null, "']'", null, null, - null, null, null, null, null, null, "'metadata'", null, null, null, null, - null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, - null, null, null, null, null, null, null, null, null, "'info'", null, - null, null, "':'" + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + "':'", "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", + "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", + "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", + "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", + "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -227,21 +228,23 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 18: return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); - case 105: - return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); + case 73: + return EXPRESSION_COLON_sempred((RuleContext)_localctx, predIndex); case 106: + return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); + case 107: return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 117: - return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); case 118: + return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); + case 119: return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 141: - return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); case 142: + return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); + case 143: return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 148: - return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); case 149: + return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); + case 150: return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); } return true; @@ -267,65 +270,72 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean EXPRESSION_COLON_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 3: return this.isDevVersion(); } return true; } - private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 4: return this.isDevVersion(); } return true; } - private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 5: return this.isDevVersion(); } return true; } - private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 6: return this.isDevVersion(); } return true; } - private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 7: return this.isDevVersion(); } return true; } - private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 8: return this.isDevVersion(); } return true; } - private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 9: return this.isDevVersion(); } return true; } - private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 10: return this.isDevVersion(); } return true; } + private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 11: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0000x\u05c7\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000w\u05cc\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ @@ -381,177 +391,178 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007\u00bc\u0002"+ "\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007\u00bf\u0002"+ "\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007\u00c2\u0002"+ - "\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007\u00c5\u0001"+ + "\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007\u00c5\u0002"+ + "\u00c6\u0007\u00c6\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011"+ "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0013\u0004\u0013\u0242\b\u0013\u000b\u0013\f\u0013\u0243\u0001"+ - "\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005"+ - "\u0014\u024c\b\u0014\n\u0014\f\u0014\u024f\t\u0014\u0001\u0014\u0003\u0014"+ - "\u0252\b\u0014\u0001\u0014\u0003\u0014\u0255\b\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005"+ - "\u0015\u025e\b\u0015\n\u0015\f\u0015\u0261\t\u0015\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0004\u0016\u0269\b\u0016"+ - "\u000b\u0016\f\u0016\u026a\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c"+ - "\u0001\u001c\u0003\u001c\u027e\b\u001c\u0001\u001c\u0004\u001c\u0281\b"+ - "\u001c\u000b\u001c\f\u001c\u0282\u0001\u001d\u0001\u001d\u0001\u001e\u0001"+ - "\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0003\u001f\u028c\b\u001f\u0001"+ - " \u0001 \u0001!\u0001!\u0001!\u0003!\u0293\b!\u0001\"\u0001\"\u0001\""+ - "\u0005\"\u0298\b\"\n\"\f\"\u029b\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0005\"\u02a3\b\"\n\"\f\"\u02a6\t\"\u0001\"\u0001\"\u0001\""+ - "\u0001\"\u0001\"\u0003\"\u02ad\b\"\u0001\"\u0003\"\u02b0\b\"\u0003\"\u02b2"+ - "\b\"\u0001#\u0004#\u02b5\b#\u000b#\f#\u02b6\u0001$\u0004$\u02ba\b$\u000b"+ - "$\f$\u02bb\u0001$\u0001$\u0005$\u02c0\b$\n$\f$\u02c3\t$\u0001$\u0001$"+ - "\u0004$\u02c7\b$\u000b$\f$\u02c8\u0001$\u0004$\u02cc\b$\u000b$\f$\u02cd"+ - "\u0001$\u0001$\u0005$\u02d2\b$\n$\f$\u02d5\t$\u0003$\u02d7\b$\u0001$\u0001"+ - "$\u0001$\u0001$\u0004$\u02dd\b$\u000b$\f$\u02de\u0001$\u0001$\u0003$\u02e3"+ - "\b$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001"+ - "\'\u0001\'\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001+\u0001"+ - "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0001"+ - "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001"+ - "/\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u0001"+ - "2\u00012\u00012\u00012\u00013\u00013\u00014\u00014\u00014\u00014\u0001"+ - "5\u00015\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00016\u0001"+ - "6\u00017\u00017\u00017\u00018\u00018\u00019\u00019\u00019\u00019\u0001"+ - "9\u00019\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001"+ - "<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ - "@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001"+ - "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001"+ - "H\u0001H\u0001H\u0001H\u0001I\u0001I\u0001I\u0001I\u0001J\u0001J\u0001"+ - "J\u0003J\u0367\bJ\u0001J\u0005J\u036a\bJ\nJ\fJ\u036d\tJ\u0001J\u0001J"+ - "\u0004J\u0371\bJ\u000bJ\fJ\u0372\u0003J\u0375\bJ\u0001K\u0001K\u0001K"+ - "\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0005"+ - "M\u0383\bM\nM\fM\u0386\tM\u0001M\u0001M\u0003M\u038a\bM\u0001M\u0004M"+ - "\u038d\bM\u000bM\fM\u038e\u0003M\u0391\bM\u0001N\u0001N\u0004N\u0395\b"+ - "N\u000bN\fN\u0396\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0001"+ - "P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001"+ - "S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001U\u0001"+ - "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001"+ - "W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001"+ - "\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001"+ - "^\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0003_\u03e4\b_\u0001"+ - "`\u0004`\u03e7\b`\u000b`\f`\u03e8\u0001a\u0001a\u0001a\u0001a\u0001b\u0001"+ - "b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001"+ - "d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0001f\u0001"+ - "g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001"+ - "i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ - "k\u0001k\u0003k\u041a\bk\u0001l\u0001l\u0003l\u041e\bl\u0001l\u0005l\u0421"+ - "\bl\nl\fl\u0424\tl\u0001l\u0001l\u0003l\u0428\bl\u0001l\u0004l\u042b\b"+ - "l\u000bl\fl\u042c\u0003l\u042f\bl\u0001m\u0001m\u0004m\u0433\bm\u000b"+ - "m\fm\u0434\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ - "p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001"+ - "r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001"+ - "t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001"+ - "v\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001"+ - "y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001"+ - "|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001"+ - "~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ - "\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001"+ - "\u0081\u0004\u0081\u048a\b\u0081\u000b\u0081\f\u0081\u048b\u0001\u0081"+ - "\u0001\u0081\u0003\u0081\u0490\b\u0081\u0001\u0081\u0004\u0081\u0493\b"+ - "\u0081\u000b\u0081\f\u0081\u0494\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ - "\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001"+ - "\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ - "\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ - "\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001"+ - "\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001"+ - "\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001"+ - "\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ - "\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ - "\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001"+ - "\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001"+ - "\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001"+ - "\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001"+ - "\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ - "\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ - "\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001"+ - "\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ - "\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001"+ - "\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001"+ - "\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ - "\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001"+ - "\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ - "\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1\u0001"+ - "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0004"+ - "\u00a2\u0524\b\u00a2\u000b\u00a2\f\u00a2\u0525\u0001\u00a3\u0001\u00a3"+ - "\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4"+ - "\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6"+ - "\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7"+ - "\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9"+ - "\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa"+ - "\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab"+ - "\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad"+ - "\u0001\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae"+ - "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0"+ - "\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1"+ - "\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2"+ - "\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4"+ - "\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5"+ - "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7"+ - "\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8"+ - "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9"+ - "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ - "\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc"+ - "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd"+ - "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be"+ - "\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ - "\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1"+ - "\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2"+ - "\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3\u0001\u00c3"+ - "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4"+ - "\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5"+ - "\u0001\u00c5\u0001\u00c5\u0002\u025f\u02a4\u0000\u00c6\u000f\u0001\u0011"+ - "\u0002\u0013\u0003\u0015\u0004\u0017\u0005\u0019\u0006\u001b\u0007\u001d"+ - "\b\u001f\t!\n#\u000b%\f\'\r)\u000e+\u000f-\u0010/\u00111\u00123\u0013"+ - "5\u00147\u00159\u0016;\u0017=\u0018?\u0000A\u0000C\u0000E\u0000G\u0000"+ - "I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0019U\u001aW\u001bY\u001c[\u001d"+ - "]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u00831\u0085"+ - "2\u00873\u00894\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097;\u0099"+ - "<\u009b=\u009d>\u009f?\u00a1\u0000\u00a3@\u00a5A\u00a7B\u00a9C\u00ab\u0000"+ - "\u00adD\u00afE\u00b1F\u00b3G\u00b5\u0000\u00b7\u0000\u00b9H\u00bbI\u00bd"+ - "J\u00bf\u0000\u00c1\u0000\u00c3\u0000\u00c5\u0000\u00c7\u0000\u00c9\u0000"+ - "\u00cbK\u00cd\u0000\u00cfL\u00d1\u0000\u00d3\u0000\u00d5M\u00d7N\u00d9"+ - "O\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1\u0000\u00e3\u0000\u00e5\u0000"+ - "\u00e7\u0000\u00e9P\u00ebQ\u00edR\u00efS\u00f1\u0000\u00f3\u0000\u00f5"+ - "\u0000\u00f7\u0000\u00f9\u0000\u00fb\u0000\u00fdT\u00ff\u0000\u0101U\u0103"+ - "V\u0105W\u0107\u0000\u0109\u0000\u010bX\u010dY\u010f\u0000\u0111Z\u0113"+ - "\u0000\u0115[\u0117\\\u0119]\u011b\u0000\u011d\u0000\u011f\u0000\u0121"+ - "\u0000\u0123\u0000\u0125\u0000\u0127\u0000\u0129\u0000\u012b\u0000\u012d"+ - "^\u012f_\u0131`\u0133\u0000\u0135\u0000\u0137\u0000\u0139\u0000\u013b"+ - "\u0000\u013d\u0000\u013fa\u0141b\u0143c\u0145\u0000\u0147d\u0149e\u014b"+ - "f\u014dg\u014f\u0000\u0151h\u0153i\u0155j\u0157k\u0159l\u015b\u0000\u015d"+ - "\u0000\u015f\u0000\u0161\u0000\u0163\u0000\u0165\u0000\u0167\u0000\u0169"+ - "m\u016bn\u016do\u016f\u0000\u0171\u0000\u0173\u0000\u0175\u0000\u0177"+ - "p\u0179q\u017br\u017d\u0000\u017f\u0000\u0181\u0000\u0183s\u0185t\u0187"+ - "u\u0189\u0000\u018b\u0000\u018dv\u018fw\u0191x\u0193\u0000\u0195\u0000"+ - "\u0197\u0000\u0199\u0000\u000f\u0000\u0001\u0002\u0003\u0004\u0005\u0006"+ - "\u0007\b\t\n\u000b\f\r\u000e#\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000"+ - "SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002"+ + "\u0001\u0012\u0001\u0012\u0001\u0013\u0004\u0013\u0244\b\u0013\u000b\u0013"+ + "\f\u0013\u0245\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0014\u0005\u0014\u024e\b\u0014\n\u0014\f\u0014\u0251\t\u0014\u0001"+ + "\u0014\u0003\u0014\u0254\b\u0014\u0001\u0014\u0003\u0014\u0257\b\u0014"+ + "\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0015\u0005\u0015\u0260\b\u0015\n\u0015\f\u0015\u0263\t\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0004"+ + "\u0016\u026b\b\u0016\u000b\u0016\f\u0016\u026c\u0001\u0016\u0001\u0016"+ + "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ + "\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0003\u001d"+ + "\u0282\b\u001d\u0001\u001d\u0004\u001d\u0285\b\u001d\u000b\u001d\f\u001d"+ + "\u0286\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001"+ + " \u0003 \u0290\b \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0003\"\u0297"+ + "\b\"\u0001#\u0001#\u0001#\u0005#\u029c\b#\n#\f#\u029f\t#\u0001#\u0001"+ + "#\u0001#\u0001#\u0001#\u0001#\u0005#\u02a7\b#\n#\f#\u02aa\t#\u0001#\u0001"+ + "#\u0001#\u0001#\u0001#\u0003#\u02b1\b#\u0001#\u0003#\u02b4\b#\u0003#\u02b6"+ + "\b#\u0001$\u0004$\u02b9\b$\u000b$\f$\u02ba\u0001%\u0004%\u02be\b%\u000b"+ + "%\f%\u02bf\u0001%\u0001%\u0005%\u02c4\b%\n%\f%\u02c7\t%\u0001%\u0001%"+ + "\u0004%\u02cb\b%\u000b%\f%\u02cc\u0001%\u0004%\u02d0\b%\u000b%\f%\u02d1"+ + "\u0001%\u0001%\u0005%\u02d6\b%\n%\f%\u02d9\t%\u0003%\u02db\b%\u0001%\u0001"+ + "%\u0001%\u0001%\u0004%\u02e1\b%\u000b%\f%\u02e2\u0001%\u0001%\u0003%\u02e7"+ + "\b%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001\'\u0001(\u0001"+ + "(\u0001(\u0001(\u0001)\u0001)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001"+ + ",\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001"+ + ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u0001"+ + "0\u00010\u00011\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u0001"+ + "3\u00013\u00013\u00013\u00013\u00014\u00014\u00015\u00015\u00015\u0001"+ + "5\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u0001"+ + "7\u00017\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001"+ + ":\u0001:\u0001:\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ + "=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001"+ + "@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001"+ + "D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001"+ + "I\u0001I\u0001I\u0001I\u0001J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001"+ + "K\u0003K\u036a\bK\u0001K\u0005K\u036d\bK\nK\fK\u0370\tK\u0001K\u0001K"+ + "\u0004K\u0374\bK\u000bK\fK\u0375\u0003K\u0378\bK\u0001L\u0001L\u0001L"+ + "\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001M\u0001N\u0001N\u0005"+ + "N\u0386\bN\nN\fN\u0389\tN\u0001N\u0001N\u0003N\u038d\bN\u0001N\u0004N"+ + "\u0390\bN\u000bN\fN\u0391\u0003N\u0394\bN\u0001O\u0001O\u0004O\u0398\b"+ + "O\u000bO\fO\u0399\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001"+ + "Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001"+ + "T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001U\u0001V\u0001"+ + "V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001"+ + "X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ + "[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001"+ + "]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001"+ + "_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0003`\u03e7\b`\u0001"+ + "a\u0004a\u03ea\ba\u000ba\fa\u03eb\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ + "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ + "e\u0001f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001g\u0001"+ + "h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001"+ + "j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001"+ + "l\u0001l\u0003l\u041d\bl\u0001m\u0001m\u0003m\u0421\bm\u0001m\u0005m\u0424"+ + "\bm\nm\fm\u0427\tm\u0001m\u0001m\u0003m\u042b\bm\u0001m\u0004m\u042e\b"+ + "m\u000bm\fm\u042f\u0003m\u0432\bm\u0001n\u0001n\u0004n\u0436\bn\u000b"+ + "n\fn\u0437\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001"+ + "q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001r\u0001s\u0001"+ + "s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001"+ + "u\u0001v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001"+ + "w\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001"+ + "z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001"+ + "}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001~\u0001"+ + "\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001"+ + "\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001"+ + "\u0081\u0001\u0081\u0001\u0082\u0004\u0082\u048d\b\u0082\u000b\u0082\f"+ + "\u0082\u048e\u0001\u0082\u0001\u0082\u0003\u0082\u0493\b\u0082\u0001\u0082"+ + "\u0004\u0082\u0496\b\u0082\u000b\u0082\f\u0082\u0497\u0001\u0083\u0001"+ + "\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ + "\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001"+ + "\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ + "\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001"+ + "\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ + "\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001"+ + "\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ + "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ + "\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001"+ + "\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001"+ + "\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ + "\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001"+ + "\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001"+ + "\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001"+ + "\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001"+ + "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ + "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ + "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001"+ + "\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0004\u00a3\u0529\b\u00a3\u000b"+ + "\u00a3\f\u00a3\u052a\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001"+ + "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ + "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ + "\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001"+ + "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ + "\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001"+ + "\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001"+ + "\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001"+ + "\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001"+ + "\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ + "\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001"+ + "\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001"+ + "\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001"+ + "\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001"+ + "\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ + "\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001"+ + "\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001"+ + "\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001"+ + "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001"+ + "\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ + "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001"+ + "\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001"+ + "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ + "\u00c2\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001"+ + "\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001"+ + "\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ + "\u00c5\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0002"+ + "\u0261\u02a8\u0000\u00c7\u000f\u0001\u0011\u0002\u0013\u0003\u0015\u0004"+ + "\u0017\u0005\u0019\u0006\u001b\u0007\u001d\b\u001f\t!\n#\u000b%\f\'\r"+ + ")\u000e+\u000f-\u0010/\u00111\u00123\u00135\u00147\u00159\u0016;\u0017"+ + "=\u0018?\u0019A\u0000C\u0000E\u0000G\u0000I\u0000K\u0000M\u0000O\u0000"+ + "Q\u0000S\u0000U\u001aW\u001bY\u001c[\u001d]\u001e_\u001fa c!e\"g#i$k%"+ + "m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u00831\u00852\u00873\u00894\u008b5\u008d"+ + "6\u008f7\u00918\u00939\u0095:\u0097;\u0099<\u009b=\u009d>\u009f?\u00a1"+ + "\u0000\u00a3\u0000\u00a5@\u00a7A\u00a9B\u00abC\u00ad\u0000\u00afD\u00b1"+ + "E\u00b3F\u00b5G\u00b7\u0000\u00b9\u0000\u00bbH\u00bdI\u00bfJ\u00c1\u0000"+ + "\u00c3\u0000\u00c5\u0000\u00c7\u0000\u00c9\u0000\u00cb\u0000\u00cdK\u00cf"+ + "\u0000\u00d1L\u00d3\u0000\u00d5\u0000\u00d7M\u00d9N\u00dbO\u00dd\u0000"+ + "\u00df\u0000\u00e1\u0000\u00e3\u0000\u00e5\u0000\u00e7\u0000\u00e9\u0000"+ + "\u00ebP\u00edQ\u00efR\u00f1S\u00f3\u0000\u00f5\u0000\u00f7\u0000\u00f9"+ + "\u0000\u00fb\u0000\u00fd\u0000\u00ffT\u0101\u0000\u0103U\u0105V\u0107"+ + "W\u0109\u0000\u010b\u0000\u010dX\u010fY\u0111\u0000\u0113Z\u0115\u0000"+ + "\u0117[\u0119\\\u011b]\u011d\u0000\u011f\u0000\u0121\u0000\u0123\u0000"+ + "\u0125\u0000\u0127\u0000\u0129\u0000\u012b\u0000\u012d\u0000\u012f^\u0131"+ + "_\u0133`\u0135\u0000\u0137\u0000\u0139\u0000\u013b\u0000\u013d\u0000\u013f"+ + "\u0000\u0141a\u0143b\u0145c\u0147\u0000\u0149d\u014be\u014df\u014fg\u0151"+ + "\u0000\u0153\u0000\u0155h\u0157i\u0159j\u015bk\u015d\u0000\u015f\u0000"+ + "\u0161\u0000\u0163\u0000\u0165\u0000\u0167\u0000\u0169\u0000\u016bl\u016d"+ + "m\u016fn\u0171\u0000\u0173\u0000\u0175\u0000\u0177\u0000\u0179o\u017b"+ + "p\u017dq\u017f\u0000\u0181\u0000\u0183\u0000\u0185r\u0187s\u0189t\u018b"+ + "\u0000\u018d\u0000\u018fu\u0191v\u0193w\u0195\u0000\u0197\u0000\u0199"+ + "\u0000\u019b\u0000\u000f\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ + "\b\t\n\u000b\f\r\u000e#\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SS"+ + "ss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002"+ "\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000"+ "VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002"+ "\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000"+ @@ -559,7 +570,7 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000"+ "\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000"+ "YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000"+ - "\t\n\r\r \"#,,//::<<>?\\\\||\u05e3\u0000\u000f\u0001\u0000\u0000\u0000"+ + "\t\n\r\r \"#,,//::<<>?\\\\||\u05e8\u0000\u000f\u0001\u0000\u0000\u0000"+ "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ @@ -570,7 +581,7 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u0000"+ "3\u0001\u0000\u0000\u0000\u00005\u0001\u0000\u0000\u0000\u00007\u0001"+ "\u0000\u0000\u0000\u00009\u0001\u0000\u0000\u0000\u0000;\u0001\u0000\u0000"+ - "\u0000\u0001=\u0001\u0000\u0000\u0000\u0001S\u0001\u0000\u0000\u0000\u0001"+ + "\u0000\u0000=\u0001\u0000\u0000\u0000\u0001?\u0001\u0000\u0000\u0000\u0001"+ "U\u0001\u0000\u0000\u0000\u0001W\u0001\u0000\u0000\u0000\u0001Y\u0001"+ "\u0000\u0000\u0000\u0001[\u0001\u0000\u0000\u0000\u0001]\u0001\u0000\u0000"+ "\u0000\u0001_\u0001\u0000\u0000\u0000\u0001a\u0001\u0000\u0000\u0000\u0001"+ @@ -591,700 +602,702 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u009f\u0001\u0000\u0000\u0000\u0001\u00a1\u0001\u0000\u0000\u0000\u0001"+ "\u00a3\u0001\u0000\u0000\u0000\u0001\u00a5\u0001\u0000\u0000\u0000\u0001"+ "\u00a7\u0001\u0000\u0000\u0000\u0001\u00a9\u0001\u0000\u0000\u0000\u0001"+ - "\u00ad\u0001\u0000\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001"+ - "\u00b1\u0001\u0000\u0000\u0000\u0001\u00b3\u0001\u0000\u0000\u0000\u0002"+ + "\u00ab\u0001\u0000\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001"+ + "\u00b1\u0001\u0000\u0000\u0000\u0001\u00b3\u0001\u0000\u0000\u0000\u0001"+ "\u00b5\u0001\u0000\u0000\u0000\u0002\u00b7\u0001\u0000\u0000\u0000\u0002"+ "\u00b9\u0001\u0000\u0000\u0000\u0002\u00bb\u0001\u0000\u0000\u0000\u0002"+ - "\u00bd\u0001\u0000\u0000\u0000\u0003\u00bf\u0001\u0000\u0000\u0000\u0003"+ + "\u00bd\u0001\u0000\u0000\u0000\u0002\u00bf\u0001\u0000\u0000\u0000\u0003"+ "\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0003"+ "\u00c5\u0001\u0000\u0000\u0000\u0003\u00c7\u0001\u0000\u0000\u0000\u0003"+ "\u00c9\u0001\u0000\u0000\u0000\u0003\u00cb\u0001\u0000\u0000\u0000\u0003"+ - "\u00cf\u0001\u0000\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003"+ + "\u00cd\u0001\u0000\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003"+ "\u00d3\u0001\u0000\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003"+ - "\u00d7\u0001\u0000\u0000\u0000\u0003\u00d9\u0001\u0000\u0000\u0000\u0004"+ + "\u00d7\u0001\u0000\u0000\u0000\u0003\u00d9\u0001\u0000\u0000\u0000\u0003"+ "\u00db\u0001\u0000\u0000\u0000\u0004\u00dd\u0001\u0000\u0000\u0000\u0004"+ "\u00df\u0001\u0000\u0000\u0000\u0004\u00e1\u0001\u0000\u0000\u0000\u0004"+ - "\u00e3\u0001\u0000\u0000\u0000\u0004\u00e9\u0001\u0000\u0000\u0000\u0004"+ + "\u00e3\u0001\u0000\u0000\u0000\u0004\u00e5\u0001\u0000\u0000\u0000\u0004"+ "\u00eb\u0001\u0000\u0000\u0000\u0004\u00ed\u0001\u0000\u0000\u0000\u0004"+ - "\u00ef\u0001\u0000\u0000\u0000\u0005\u00f1\u0001\u0000\u0000\u0000\u0005"+ + "\u00ef\u0001\u0000\u0000\u0000\u0004\u00f1\u0001\u0000\u0000\u0000\u0005"+ "\u00f3\u0001\u0000\u0000\u0000\u0005\u00f5\u0001\u0000\u0000\u0000\u0005"+ "\u00f7\u0001\u0000\u0000\u0000\u0005\u00f9\u0001\u0000\u0000\u0000\u0005"+ "\u00fb\u0001\u0000\u0000\u0000\u0005\u00fd\u0001\u0000\u0000\u0000\u0005"+ "\u00ff\u0001\u0000\u0000\u0000\u0005\u0101\u0001\u0000\u0000\u0000\u0005"+ - "\u0103\u0001\u0000\u0000\u0000\u0005\u0105\u0001\u0000\u0000\u0000\u0006"+ + "\u0103\u0001\u0000\u0000\u0000\u0005\u0105\u0001\u0000\u0000\u0000\u0005"+ "\u0107\u0001\u0000\u0000\u0000\u0006\u0109\u0001\u0000\u0000\u0000\u0006"+ "\u010b\u0001\u0000\u0000\u0000\u0006\u010d\u0001\u0000\u0000\u0000\u0006"+ - "\u0111\u0001\u0000\u0000\u0000\u0006\u0113\u0001\u0000\u0000\u0000\u0006"+ + "\u010f\u0001\u0000\u0000\u0000\u0006\u0113\u0001\u0000\u0000\u0000\u0006"+ "\u0115\u0001\u0000\u0000\u0000\u0006\u0117\u0001\u0000\u0000\u0000\u0006"+ - "\u0119\u0001\u0000\u0000\u0000\u0007\u011b\u0001\u0000\u0000\u0000\u0007"+ + "\u0119\u0001\u0000\u0000\u0000\u0006\u011b\u0001\u0000\u0000\u0000\u0007"+ "\u011d\u0001\u0000\u0000\u0000\u0007\u011f\u0001\u0000\u0000\u0000\u0007"+ "\u0121\u0001\u0000\u0000\u0000\u0007\u0123\u0001\u0000\u0000\u0000\u0007"+ "\u0125\u0001\u0000\u0000\u0000\u0007\u0127\u0001\u0000\u0000\u0000\u0007"+ "\u0129\u0001\u0000\u0000\u0000\u0007\u012b\u0001\u0000\u0000\u0000\u0007"+ "\u012d\u0001\u0000\u0000\u0000\u0007\u012f\u0001\u0000\u0000\u0000\u0007"+ - "\u0131\u0001\u0000\u0000\u0000\b\u0133\u0001\u0000\u0000\u0000\b\u0135"+ + "\u0131\u0001\u0000\u0000\u0000\u0007\u0133\u0001\u0000\u0000\u0000\b\u0135"+ "\u0001\u0000\u0000\u0000\b\u0137\u0001\u0000\u0000\u0000\b\u0139\u0001"+ "\u0000\u0000\u0000\b\u013b\u0001\u0000\u0000\u0000\b\u013d\u0001\u0000"+ "\u0000\u0000\b\u013f\u0001\u0000\u0000\u0000\b\u0141\u0001\u0000\u0000"+ - "\u0000\b\u0143\u0001\u0000\u0000\u0000\t\u0145\u0001\u0000\u0000\u0000"+ + "\u0000\b\u0143\u0001\u0000\u0000\u0000\b\u0145\u0001\u0000\u0000\u0000"+ "\t\u0147\u0001\u0000\u0000\u0000\t\u0149\u0001\u0000\u0000\u0000\t\u014b"+ - "\u0001\u0000\u0000\u0000\t\u014d\u0001\u0000\u0000\u0000\n\u014f\u0001"+ + "\u0001\u0000\u0000\u0000\t\u014d\u0001\u0000\u0000\u0000\t\u014f\u0001"+ "\u0000\u0000\u0000\n\u0151\u0001\u0000\u0000\u0000\n\u0153\u0001\u0000"+ "\u0000\u0000\n\u0155\u0001\u0000\u0000\u0000\n\u0157\u0001\u0000\u0000"+ - "\u0000\n\u0159\u0001\u0000\u0000\u0000\u000b\u015b\u0001\u0000\u0000\u0000"+ + "\u0000\n\u0159\u0001\u0000\u0000\u0000\n\u015b\u0001\u0000\u0000\u0000"+ "\u000b\u015d\u0001\u0000\u0000\u0000\u000b\u015f\u0001\u0000\u0000\u0000"+ "\u000b\u0161\u0001\u0000\u0000\u0000\u000b\u0163\u0001\u0000\u0000\u0000"+ "\u000b\u0165\u0001\u0000\u0000\u0000\u000b\u0167\u0001\u0000\u0000\u0000"+ "\u000b\u0169\u0001\u0000\u0000\u0000\u000b\u016b\u0001\u0000\u0000\u0000"+ - "\u000b\u016d\u0001\u0000\u0000\u0000\f\u016f\u0001\u0000\u0000\u0000\f"+ - "\u0171\u0001\u0000\u0000\u0000\f\u0173\u0001\u0000\u0000\u0000\f\u0175"+ + "\u000b\u016d\u0001\u0000\u0000\u0000\u000b\u016f\u0001\u0000\u0000\u0000"+ + "\f\u0171\u0001\u0000\u0000\u0000\f\u0173\u0001\u0000\u0000\u0000\f\u0175"+ "\u0001\u0000\u0000\u0000\f\u0177\u0001\u0000\u0000\u0000\f\u0179\u0001"+ - "\u0000\u0000\u0000\f\u017b\u0001\u0000\u0000\u0000\r\u017d\u0001\u0000"+ + "\u0000\u0000\u0000\f\u017b\u0001\u0000\u0000\u0000\f\u017d\u0001\u0000"+ "\u0000\u0000\r\u017f\u0001\u0000\u0000\u0000\r\u0181\u0001\u0000\u0000"+ "\u0000\r\u0183\u0001\u0000\u0000\u0000\r\u0185\u0001\u0000\u0000\u0000"+ - "\r\u0187\u0001\u0000\u0000\u0000\u000e\u0189\u0001\u0000\u0000\u0000\u000e"+ + "\r\u0187\u0001\u0000\u0000\u0000\r\u0189\u0001\u0000\u0000\u0000\u000e"+ "\u018b\u0001\u0000\u0000\u0000\u000e\u018d\u0001\u0000\u0000\u0000\u000e"+ "\u018f\u0001\u0000\u0000\u0000\u000e\u0191\u0001\u0000\u0000\u0000\u000e"+ "\u0193\u0001\u0000\u0000\u0000\u000e\u0195\u0001\u0000\u0000\u0000\u000e"+ - "\u0197\u0001\u0000\u0000\u0000\u000e\u0199\u0001\u0000\u0000\u0000\u000f"+ - "\u019b\u0001\u0000\u0000\u0000\u0011\u01a5\u0001\u0000\u0000\u0000\u0013"+ - "\u01ac\u0001\u0000\u0000\u0000\u0015\u01b5\u0001\u0000\u0000\u0000\u0017"+ - "\u01bc\u0001\u0000\u0000\u0000\u0019\u01c6\u0001\u0000\u0000\u0000\u001b"+ - "\u01cd\u0001\u0000\u0000\u0000\u001d\u01d4\u0001\u0000\u0000\u0000\u001f"+ - "\u01db\u0001\u0000\u0000\u0000!\u01e3\u0001\u0000\u0000\u0000#\u01ef\u0001"+ - "\u0000\u0000\u0000%\u01f8\u0001\u0000\u0000\u0000\'\u01fe\u0001\u0000"+ - "\u0000\u0000)\u0205\u0001\u0000\u0000\u0000+\u020c\u0001\u0000\u0000\u0000"+ - "-\u0214\u0001\u0000\u0000\u0000/\u021c\u0001\u0000\u0000\u00001\u022b"+ - "\u0001\u0000\u0000\u00003\u0235\u0001\u0000\u0000\u00005\u0241\u0001\u0000"+ - "\u0000\u00007\u0247\u0001\u0000\u0000\u00009\u0258\u0001\u0000\u0000\u0000"+ - ";\u0268\u0001\u0000\u0000\u0000=\u026e\u0001\u0000\u0000\u0000?\u0272"+ - "\u0001\u0000\u0000\u0000A\u0274\u0001\u0000\u0000\u0000C\u0276\u0001\u0000"+ - "\u0000\u0000E\u0279\u0001\u0000\u0000\u0000G\u027b\u0001\u0000\u0000\u0000"+ - "I\u0284\u0001\u0000\u0000\u0000K\u0286\u0001\u0000\u0000\u0000M\u028b"+ - "\u0001\u0000\u0000\u0000O\u028d\u0001\u0000\u0000\u0000Q\u0292\u0001\u0000"+ - "\u0000\u0000S\u02b1\u0001\u0000\u0000\u0000U\u02b4\u0001\u0000\u0000\u0000"+ - "W\u02e2\u0001\u0000\u0000\u0000Y\u02e4\u0001\u0000\u0000\u0000[\u02e7"+ - "\u0001\u0000\u0000\u0000]\u02eb\u0001\u0000\u0000\u0000_\u02ef\u0001\u0000"+ - "\u0000\u0000a\u02f1\u0001\u0000\u0000\u0000c\u02f4\u0001\u0000\u0000\u0000"+ - "e\u02f6\u0001\u0000\u0000\u0000g\u02fb\u0001\u0000\u0000\u0000i\u02fd"+ - "\u0001\u0000\u0000\u0000k\u0303\u0001\u0000\u0000\u0000m\u0309\u0001\u0000"+ - "\u0000\u0000o\u030c\u0001\u0000\u0000\u0000q\u030f\u0001\u0000\u0000\u0000"+ - "s\u0314\u0001\u0000\u0000\u0000u\u0319\u0001\u0000\u0000\u0000w\u031b"+ - "\u0001\u0000\u0000\u0000y\u031f\u0001\u0000\u0000\u0000{\u0324\u0001\u0000"+ - "\u0000\u0000}\u032a\u0001\u0000\u0000\u0000\u007f\u032d\u0001\u0000\u0000"+ - "\u0000\u0081\u032f\u0001\u0000\u0000\u0000\u0083\u0335\u0001\u0000\u0000"+ - "\u0000\u0085\u0337\u0001\u0000\u0000\u0000\u0087\u033c\u0001\u0000\u0000"+ - "\u0000\u0089\u033f\u0001\u0000\u0000\u0000\u008b\u0342\u0001\u0000\u0000"+ - "\u0000\u008d\u0345\u0001\u0000\u0000\u0000\u008f\u0347\u0001\u0000\u0000"+ - "\u0000\u0091\u034a\u0001\u0000\u0000\u0000\u0093\u034c\u0001\u0000\u0000"+ - "\u0000\u0095\u034f\u0001\u0000\u0000\u0000\u0097\u0351\u0001\u0000\u0000"+ - "\u0000\u0099\u0353\u0001\u0000\u0000\u0000\u009b\u0355\u0001\u0000\u0000"+ - "\u0000\u009d\u0357\u0001\u0000\u0000\u0000\u009f\u0359\u0001\u0000\u0000"+ - "\u0000\u00a1\u035f\u0001\u0000\u0000\u0000\u00a3\u0374\u0001\u0000\u0000"+ - "\u0000\u00a5\u0376\u0001\u0000\u0000\u0000\u00a7\u037b\u0001\u0000\u0000"+ - "\u0000\u00a9\u0390\u0001\u0000\u0000\u0000\u00ab\u0392\u0001\u0000\u0000"+ - "\u0000\u00ad\u039a\u0001\u0000\u0000\u0000\u00af\u039c\u0001\u0000\u0000"+ - "\u0000\u00b1\u03a0\u0001\u0000\u0000\u0000\u00b3\u03a4\u0001\u0000\u0000"+ - "\u0000\u00b5\u03a8\u0001\u0000\u0000\u0000\u00b7\u03ad\u0001\u0000\u0000"+ - "\u0000\u00b9\u03b2\u0001\u0000\u0000\u0000\u00bb\u03b6\u0001\u0000\u0000"+ - "\u0000\u00bd\u03ba\u0001\u0000\u0000\u0000\u00bf\u03be\u0001\u0000\u0000"+ - "\u0000\u00c1\u03c3\u0001\u0000\u0000\u0000\u00c3\u03c7\u0001\u0000\u0000"+ - "\u0000\u00c5\u03cb\u0001\u0000\u0000\u0000\u00c7\u03cf\u0001\u0000\u0000"+ - "\u0000\u00c9\u03d3\u0001\u0000\u0000\u0000\u00cb\u03d7\u0001\u0000\u0000"+ - "\u0000\u00cd\u03e3\u0001\u0000\u0000\u0000\u00cf\u03e6\u0001\u0000\u0000"+ - "\u0000\u00d1\u03ea\u0001\u0000\u0000\u0000\u00d3\u03ee\u0001\u0000\u0000"+ - "\u0000\u00d5\u03f2\u0001\u0000\u0000\u0000\u00d7\u03f6\u0001\u0000\u0000"+ - "\u0000\u00d9\u03fa\u0001\u0000\u0000\u0000\u00db\u03fe\u0001\u0000\u0000"+ - "\u0000\u00dd\u0403\u0001\u0000\u0000\u0000\u00df\u0407\u0001\u0000\u0000"+ - "\u0000\u00e1\u040b\u0001\u0000\u0000\u0000\u00e3\u0410\u0001\u0000\u0000"+ - "\u0000\u00e5\u0419\u0001\u0000\u0000\u0000\u00e7\u042e\u0001\u0000\u0000"+ - "\u0000\u00e9\u0432\u0001\u0000\u0000\u0000\u00eb\u0436\u0001\u0000\u0000"+ - "\u0000\u00ed\u043a\u0001\u0000\u0000\u0000\u00ef\u043e\u0001\u0000\u0000"+ - "\u0000\u00f1\u0442\u0001\u0000\u0000\u0000\u00f3\u0447\u0001\u0000\u0000"+ - "\u0000\u00f5\u044b\u0001\u0000\u0000\u0000\u00f7\u044f\u0001\u0000\u0000"+ - "\u0000\u00f9\u0453\u0001\u0000\u0000\u0000\u00fb\u0458\u0001\u0000\u0000"+ - "\u0000\u00fd\u045d\u0001\u0000\u0000\u0000\u00ff\u0460\u0001\u0000\u0000"+ - "\u0000\u0101\u0464\u0001\u0000\u0000\u0000\u0103\u0468\u0001\u0000\u0000"+ - "\u0000\u0105\u046c\u0001\u0000\u0000\u0000\u0107\u0470\u0001\u0000\u0000"+ - "\u0000\u0109\u0475\u0001\u0000\u0000\u0000\u010b\u047a\u0001\u0000\u0000"+ - "\u0000\u010d\u047f\u0001\u0000\u0000\u0000\u010f\u0486\u0001\u0000\u0000"+ - "\u0000\u0111\u048f\u0001\u0000\u0000\u0000\u0113\u0496\u0001\u0000\u0000"+ - "\u0000\u0115\u049a\u0001\u0000\u0000\u0000\u0117\u049e\u0001\u0000\u0000"+ - "\u0000\u0119\u04a2\u0001\u0000\u0000\u0000\u011b\u04a6\u0001\u0000\u0000"+ - "\u0000\u011d\u04ac\u0001\u0000\u0000\u0000\u011f\u04b0\u0001\u0000\u0000"+ - "\u0000\u0121\u04b4\u0001\u0000\u0000\u0000\u0123\u04b8\u0001\u0000\u0000"+ - "\u0000\u0125\u04bc\u0001\u0000\u0000\u0000\u0127\u04c0\u0001\u0000\u0000"+ - "\u0000\u0129\u04c4\u0001\u0000\u0000\u0000\u012b\u04c9\u0001\u0000\u0000"+ - "\u0000\u012d\u04ce\u0001\u0000\u0000\u0000\u012f\u04d2\u0001\u0000\u0000"+ - "\u0000\u0131\u04d6\u0001\u0000\u0000\u0000\u0133\u04da\u0001\u0000\u0000"+ - "\u0000\u0135\u04df\u0001\u0000\u0000\u0000\u0137\u04e3\u0001\u0000\u0000"+ - "\u0000\u0139\u04e8\u0001\u0000\u0000\u0000\u013b\u04ed\u0001\u0000\u0000"+ - "\u0000\u013d\u04f1\u0001\u0000\u0000\u0000\u013f\u04f5\u0001\u0000\u0000"+ - "\u0000\u0141\u04f9\u0001\u0000\u0000\u0000\u0143\u04fd\u0001\u0000\u0000"+ - "\u0000\u0145\u0501\u0001\u0000\u0000\u0000\u0147\u0506\u0001\u0000\u0000"+ - "\u0000\u0149\u050b\u0001\u0000\u0000\u0000\u014b\u050f\u0001\u0000\u0000"+ - "\u0000\u014d\u0513\u0001\u0000\u0000\u0000\u014f\u0517\u0001\u0000\u0000"+ - "\u0000\u0151\u051c\u0001\u0000\u0000\u0000\u0153\u0523\u0001\u0000\u0000"+ - "\u0000\u0155\u0527\u0001\u0000\u0000\u0000\u0157\u052b\u0001\u0000\u0000"+ - "\u0000\u0159\u052f\u0001\u0000\u0000\u0000\u015b\u0533\u0001\u0000\u0000"+ - "\u0000\u015d\u0538\u0001\u0000\u0000\u0000\u015f\u053c\u0001\u0000\u0000"+ - "\u0000\u0161\u0540\u0001\u0000\u0000\u0000\u0163\u0544\u0001\u0000\u0000"+ - "\u0000\u0165\u0549\u0001\u0000\u0000\u0000\u0167\u054d\u0001\u0000\u0000"+ - "\u0000\u0169\u0551\u0001\u0000\u0000\u0000\u016b\u0555\u0001\u0000\u0000"+ - "\u0000\u016d\u0559\u0001\u0000\u0000\u0000\u016f\u055d\u0001\u0000\u0000"+ - "\u0000\u0171\u0563\u0001\u0000\u0000\u0000\u0173\u0567\u0001\u0000\u0000"+ - "\u0000\u0175\u056b\u0001\u0000\u0000\u0000\u0177\u056f\u0001\u0000\u0000"+ - "\u0000\u0179\u0573\u0001\u0000\u0000\u0000\u017b\u0577\u0001\u0000\u0000"+ - "\u0000\u017d\u057b\u0001\u0000\u0000\u0000\u017f\u0580\u0001\u0000\u0000"+ - "\u0000\u0181\u0586\u0001\u0000\u0000\u0000\u0183\u058c\u0001\u0000\u0000"+ - "\u0000\u0185\u0590\u0001\u0000\u0000\u0000\u0187\u0594\u0001\u0000\u0000"+ - "\u0000\u0189\u0598\u0001\u0000\u0000\u0000\u018b\u059e\u0001\u0000\u0000"+ - "\u0000\u018d\u05a4\u0001\u0000\u0000\u0000\u018f\u05a8\u0001\u0000\u0000"+ - "\u0000\u0191\u05ac\u0001\u0000\u0000\u0000\u0193\u05b0\u0001\u0000\u0000"+ - "\u0000\u0195\u05b6\u0001\u0000\u0000\u0000\u0197\u05bc\u0001\u0000\u0000"+ - "\u0000\u0199\u05c2\u0001\u0000\u0000\u0000\u019b\u019c\u0007\u0000\u0000"+ - "\u0000\u019c\u019d\u0007\u0001\u0000\u0000\u019d\u019e\u0007\u0002\u0000"+ - "\u0000\u019e\u019f\u0007\u0002\u0000\u0000\u019f\u01a0\u0007\u0003\u0000"+ - "\u0000\u01a0\u01a1\u0007\u0004\u0000\u0000\u01a1\u01a2\u0007\u0005\u0000"+ - "\u0000\u01a2\u01a3\u0001\u0000\u0000\u0000\u01a3\u01a4\u0006\u0000\u0000"+ - "\u0000\u01a4\u0010\u0001\u0000\u0000\u0000\u01a5\u01a6\u0007\u0000\u0000"+ - "\u0000\u01a6\u01a7\u0007\u0006\u0000\u0000\u01a7\u01a8\u0007\u0007\u0000"+ - "\u0000\u01a8\u01a9\u0007\b\u0000\u0000\u01a9\u01aa\u0001\u0000\u0000\u0000"+ - "\u01aa\u01ab\u0006\u0001\u0001\u0000\u01ab\u0012\u0001\u0000\u0000\u0000"+ - "\u01ac\u01ad\u0007\u0003\u0000\u0000\u01ad\u01ae\u0007\t\u0000\u0000\u01ae"+ - "\u01af\u0007\u0006\u0000\u0000\u01af\u01b0\u0007\u0001\u0000\u0000\u01b0"+ - "\u01b1\u0007\u0004\u0000\u0000\u01b1\u01b2\u0007\n\u0000\u0000\u01b2\u01b3"+ - "\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u0002\u0002\u0000\u01b4\u0014"+ - "\u0001\u0000\u0000\u0000\u01b5\u01b6\u0007\u0003\u0000\u0000\u01b6\u01b7"+ - "\u0007\u000b\u0000\u0000\u01b7\u01b8\u0007\f\u0000\u0000\u01b8\u01b9\u0007"+ - "\r\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba\u01bb\u0006\u0003"+ - "\u0000\u0000\u01bb\u0016\u0001\u0000\u0000\u0000\u01bc\u01bd\u0007\u0003"+ - "\u0000\u0000\u01bd\u01be\u0007\u000e\u0000\u0000\u01be\u01bf\u0007\b\u0000"+ - "\u0000\u01bf\u01c0\u0007\r\u0000\u0000\u01c0\u01c1\u0007\f\u0000\u0000"+ - "\u01c1\u01c2\u0007\u0001\u0000\u0000\u01c2\u01c3\u0007\t\u0000\u0000\u01c3"+ - "\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c5\u0006\u0004\u0003\u0000\u01c5"+ - "\u0018\u0001\u0000\u0000\u0000\u01c6\u01c7\u0007\u000f\u0000\u0000\u01c7"+ - "\u01c8\u0007\u0006\u0000\u0000\u01c8\u01c9\u0007\u0007\u0000\u0000\u01c9"+ - "\u01ca\u0007\u0010\u0000\u0000\u01ca\u01cb\u0001\u0000\u0000\u0000\u01cb"+ - "\u01cc\u0006\u0005\u0004\u0000\u01cc\u001a\u0001\u0000\u0000\u0000\u01cd"+ - "\u01ce\u0007\u0011\u0000\u0000\u01ce\u01cf\u0007\u0006\u0000\u0000\u01cf"+ - "\u01d0\u0007\u0007\u0000\u0000\u01d0\u01d1\u0007\u0012\u0000\u0000\u01d1"+ - "\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0006\u0000\u0000\u01d3"+ - "\u001c\u0001\u0000\u0000\u0000\u01d4\u01d5\u0007\u0012\u0000\u0000\u01d5"+ - "\u01d6\u0007\u0003\u0000\u0000\u01d6\u01d7\u0007\u0003\u0000\u0000\u01d7"+ - "\u01d8\u0007\b\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000\u01d9\u01da"+ - "\u0006\u0007\u0001\u0000\u01da\u001e\u0001\u0000\u0000\u0000\u01db\u01dc"+ - "\u0007\r\u0000\u0000\u01dc\u01dd\u0007\u0001\u0000\u0000\u01dd\u01de\u0007"+ - "\u0010\u0000\u0000\u01de\u01df\u0007\u0001\u0000\u0000\u01df\u01e0\u0007"+ - "\u0005\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e1\u01e2\u0006"+ - "\b\u0000\u0000\u01e2 \u0001\u0000\u0000\u0000\u01e3\u01e4\u0007\u0010"+ - "\u0000\u0000\u01e4\u01e5\u0007\u000b\u0000\u0000\u01e5\u01e6\u0005_\u0000"+ - "\u0000\u01e6\u01e7\u0007\u0003\u0000\u0000\u01e7\u01e8\u0007\u000e\u0000"+ - "\u0000\u01e8\u01e9\u0007\b\u0000\u0000\u01e9\u01ea\u0007\f\u0000\u0000"+ - "\u01ea\u01eb\u0007\t\u0000\u0000\u01eb\u01ec\u0007\u0000\u0000\u0000\u01ec"+ - "\u01ed\u0001\u0000\u0000\u0000\u01ed\u01ee\u0006\t\u0005\u0000\u01ee\""+ - "\u0001\u0000\u0000\u0000\u01ef\u01f0\u0007\u0006\u0000\u0000\u01f0\u01f1"+ - "\u0007\u0003\u0000\u0000\u01f1\u01f2\u0007\t\u0000\u0000\u01f2\u01f3\u0007"+ - "\f\u0000\u0000\u01f3\u01f4\u0007\u0010\u0000\u0000\u01f4\u01f5\u0007\u0003"+ - "\u0000\u0000\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\n\u0006"+ - "\u0000\u01f7$\u0001\u0000\u0000\u0000\u01f8\u01f9\u0007\u0006\u0000\u0000"+ - "\u01f9\u01fa\u0007\u0007\u0000\u0000\u01fa\u01fb\u0007\u0013\u0000\u0000"+ - "\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fc\u01fd\u0006\u000b\u0000\u0000"+ - "\u01fd&\u0001\u0000\u0000\u0000\u01fe\u01ff\u0007\u0002\u0000\u0000\u01ff"+ - "\u0200\u0007\n\u0000\u0000\u0200\u0201\u0007\u0007\u0000\u0000\u0201\u0202"+ - "\u0007\u0013\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203\u0204"+ - "\u0006\f\u0007\u0000\u0204(\u0001\u0000\u0000\u0000\u0205\u0206\u0007"+ - "\u0002\u0000\u0000\u0206\u0207\u0007\u0007\u0000\u0000\u0207\u0208\u0007"+ - "\u0006\u0000\u0000\u0208\u0209\u0007\u0005\u0000\u0000\u0209\u020a\u0001"+ - "\u0000\u0000\u0000\u020a\u020b\u0006\r\u0000\u0000\u020b*\u0001\u0000"+ - "\u0000\u0000\u020c\u020d\u0007\u0002\u0000\u0000\u020d\u020e\u0007\u0005"+ - "\u0000\u0000\u020e\u020f\u0007\f\u0000\u0000\u020f\u0210\u0007\u0005\u0000"+ - "\u0000\u0210\u0211\u0007\u0002\u0000\u0000\u0211\u0212\u0001\u0000\u0000"+ - "\u0000\u0212\u0213\u0006\u000e\u0000\u0000\u0213,\u0001\u0000\u0000\u0000"+ - "\u0214\u0215\u0007\u0013\u0000\u0000\u0215\u0216\u0007\n\u0000\u0000\u0216"+ - "\u0217\u0007\u0003\u0000\u0000\u0217\u0218\u0007\u0006\u0000\u0000\u0218"+ - "\u0219\u0007\u0003\u0000\u0000\u0219\u021a\u0001\u0000\u0000\u0000\u021a"+ - "\u021b\u0006\u000f\u0000\u0000\u021b.\u0001\u0000\u0000\u0000\u021c\u021d"+ - "\u0004\u0010\u0000\u0000\u021d\u021e\u0007\u0001\u0000\u0000\u021e\u021f"+ - "\u0007\t\u0000\u0000\u021f\u0220\u0007\r\u0000\u0000\u0220\u0221\u0007"+ - "\u0001\u0000\u0000\u0221\u0222\u0007\t\u0000\u0000\u0222\u0223\u0007\u0003"+ - "\u0000\u0000\u0223\u0224\u0007\u0002\u0000\u0000\u0224\u0225\u0007\u0005"+ - "\u0000\u0000\u0225\u0226\u0007\f\u0000\u0000\u0226\u0227\u0007\u0005\u0000"+ - "\u0000\u0227\u0228\u0007\u0002\u0000\u0000\u0228\u0229\u0001\u0000\u0000"+ - "\u0000\u0229\u022a\u0006\u0010\u0000\u0000\u022a0\u0001\u0000\u0000\u0000"+ - "\u022b\u022c\u0004\u0011\u0001\u0000\u022c\u022d\u0007\r\u0000\u0000\u022d"+ - "\u022e\u0007\u0007\u0000\u0000\u022e\u022f\u0007\u0007\u0000\u0000\u022f"+ - "\u0230\u0007\u0012\u0000\u0000\u0230\u0231\u0007\u0014\u0000\u0000\u0231"+ - "\u0232\u0007\b\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000\u0233\u0234"+ - "\u0006\u0011\b\u0000\u02342\u0001\u0000\u0000\u0000\u0235\u0236\u0004"+ - "\u0012\u0002\u0000\u0236\u0237\u0007\u0010\u0000\u0000\u0237\u0238\u0007"+ - "\u0003\u0000\u0000\u0238\u0239\u0007\u0005\u0000\u0000\u0239\u023a\u0007"+ - "\u0006\u0000\u0000\u023a\u023b\u0007\u0001\u0000\u0000\u023b\u023c\u0007"+ - "\u0004\u0000\u0000\u023c\u023d\u0007\u0002\u0000\u0000\u023d\u023e\u0001"+ - "\u0000\u0000\u0000\u023e\u023f\u0006\u0012\t\u0000\u023f4\u0001\u0000"+ - "\u0000\u0000\u0240\u0242\b\u0015\u0000\u0000\u0241\u0240\u0001\u0000\u0000"+ - "\u0000\u0242\u0243\u0001\u0000\u0000\u0000\u0243\u0241\u0001\u0000\u0000"+ - "\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0245\u0001\u0000\u0000"+ - "\u0000\u0245\u0246\u0006\u0013\u0000\u0000\u02466\u0001\u0000\u0000\u0000"+ - "\u0247\u0248\u0005/\u0000\u0000\u0248\u0249\u0005/\u0000\u0000\u0249\u024d"+ - "\u0001\u0000\u0000\u0000\u024a\u024c\b\u0016\u0000\u0000\u024b\u024a\u0001"+ - "\u0000\u0000\u0000\u024c\u024f\u0001\u0000\u0000\u0000\u024d\u024b\u0001"+ - "\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e\u0251\u0001"+ - "\u0000\u0000\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u0250\u0252\u0005"+ - "\r\u0000\u0000\u0251\u0250\u0001\u0000\u0000\u0000\u0251\u0252\u0001\u0000"+ - "\u0000\u0000\u0252\u0254\u0001\u0000\u0000\u0000\u0253\u0255\u0005\n\u0000"+ - "\u0000\u0254\u0253\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000\u0000"+ - "\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0006\u0014\n\u0000"+ - "\u02578\u0001\u0000\u0000\u0000\u0258\u0259\u0005/\u0000\u0000\u0259\u025a"+ - "\u0005*\u0000\u0000\u025a\u025f\u0001\u0000\u0000\u0000\u025b\u025e\u0003"+ - "9\u0015\u0000\u025c\u025e\t\u0000\u0000\u0000\u025d\u025b\u0001\u0000"+ - "\u0000\u0000\u025d\u025c\u0001\u0000\u0000\u0000\u025e\u0261\u0001\u0000"+ - "\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u025f\u025d\u0001\u0000"+ - "\u0000\u0000\u0260\u0262\u0001\u0000\u0000\u0000\u0261\u025f\u0001\u0000"+ - "\u0000\u0000\u0262\u0263\u0005*\u0000\u0000\u0263\u0264\u0005/\u0000\u0000"+ - "\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0266\u0006\u0015\n\u0000\u0266"+ - ":\u0001\u0000\u0000\u0000\u0267\u0269\u0007\u0017\u0000\u0000\u0268\u0267"+ - "\u0001\u0000\u0000\u0000\u0269\u026a\u0001\u0000\u0000\u0000\u026a\u0268"+ - "\u0001\u0000\u0000\u0000\u026a\u026b\u0001\u0000\u0000\u0000\u026b\u026c"+ - "\u0001\u0000\u0000\u0000\u026c\u026d\u0006\u0016\n\u0000\u026d<\u0001"+ - "\u0000\u0000\u0000\u026e\u026f\u0005|\u0000\u0000\u026f\u0270\u0001\u0000"+ - "\u0000\u0000\u0270\u0271\u0006\u0017\u000b\u0000\u0271>\u0001\u0000\u0000"+ - "\u0000\u0272\u0273\u0007\u0018\u0000\u0000\u0273@\u0001\u0000\u0000\u0000"+ - "\u0274\u0275\u0007\u0019\u0000\u0000\u0275B\u0001\u0000\u0000\u0000\u0276"+ - "\u0277\u0005\\\u0000\u0000\u0277\u0278\u0007\u001a\u0000\u0000\u0278D"+ - "\u0001\u0000\u0000\u0000\u0279\u027a\b\u001b\u0000\u0000\u027aF\u0001"+ - "\u0000\u0000\u0000\u027b\u027d\u0007\u0003\u0000\u0000\u027c\u027e\u0007"+ - "\u001c\u0000\u0000\u027d\u027c\u0001\u0000\u0000\u0000\u027d\u027e\u0001"+ - "\u0000\u0000\u0000\u027e\u0280\u0001\u0000\u0000\u0000\u027f\u0281\u0003"+ - "?\u0018\u0000\u0280\u027f\u0001\u0000\u0000\u0000\u0281\u0282\u0001\u0000"+ - "\u0000\u0000\u0282\u0280\u0001\u0000\u0000\u0000\u0282\u0283\u0001\u0000"+ - "\u0000\u0000\u0283H\u0001\u0000\u0000\u0000\u0284\u0285\u0005@\u0000\u0000"+ - "\u0285J\u0001\u0000\u0000\u0000\u0286\u0287\u0005`\u0000\u0000\u0287L"+ - "\u0001\u0000\u0000\u0000\u0288\u028c\b\u001d\u0000\u0000\u0289\u028a\u0005"+ - "`\u0000\u0000\u028a\u028c\u0005`\u0000\u0000\u028b\u0288\u0001\u0000\u0000"+ - "\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028cN\u0001\u0000\u0000\u0000"+ - "\u028d\u028e\u0005_\u0000\u0000\u028eP\u0001\u0000\u0000\u0000\u028f\u0293"+ - "\u0003A\u0019\u0000\u0290\u0293\u0003?\u0018\u0000\u0291\u0293\u0003O"+ - " \u0000\u0292\u028f\u0001\u0000\u0000\u0000\u0292\u0290\u0001\u0000\u0000"+ - "\u0000\u0292\u0291\u0001\u0000\u0000\u0000\u0293R\u0001\u0000\u0000\u0000"+ - "\u0294\u0299\u0005\"\u0000\u0000\u0295\u0298\u0003C\u001a\u0000\u0296"+ - "\u0298\u0003E\u001b\u0000\u0297\u0295\u0001\u0000\u0000\u0000\u0297\u0296"+ - "\u0001\u0000\u0000\u0000\u0298\u029b\u0001\u0000\u0000\u0000\u0299\u0297"+ - "\u0001\u0000\u0000\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029c"+ - "\u0001\u0000\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029c\u02b2"+ - "\u0005\"\u0000\u0000\u029d\u029e\u0005\"\u0000\u0000\u029e\u029f\u0005"+ - "\"\u0000\u0000\u029f\u02a0\u0005\"\u0000\u0000\u02a0\u02a4\u0001\u0000"+ - "\u0000\u0000\u02a1\u02a3\b\u0016\u0000\u0000\u02a2\u02a1\u0001\u0000\u0000"+ - "\u0000\u02a3\u02a6\u0001\u0000\u0000\u0000\u02a4\u02a5\u0001\u0000\u0000"+ - "\u0000\u02a4\u02a2\u0001\u0000\u0000\u0000\u02a5\u02a7\u0001\u0000\u0000"+ - "\u0000\u02a6\u02a4\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005\"\u0000\u0000"+ - "\u02a8\u02a9\u0005\"\u0000\u0000\u02a9\u02aa\u0005\"\u0000\u0000\u02aa"+ - "\u02ac\u0001\u0000\u0000\u0000\u02ab\u02ad\u0005\"\u0000\u0000\u02ac\u02ab"+ - "\u0001\u0000\u0000\u0000\u02ac\u02ad\u0001\u0000\u0000\u0000\u02ad\u02af"+ - "\u0001\u0000\u0000\u0000\u02ae\u02b0\u0005\"\u0000\u0000\u02af\u02ae\u0001"+ - "\u0000\u0000\u0000\u02af\u02b0\u0001\u0000\u0000\u0000\u02b0\u02b2\u0001"+ - "\u0000\u0000\u0000\u02b1\u0294\u0001\u0000\u0000\u0000\u02b1\u029d\u0001"+ - "\u0000\u0000\u0000\u02b2T\u0001\u0000\u0000\u0000\u02b3\u02b5\u0003?\u0018"+ - "\u0000\u02b4\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000"+ - "\u0000\u02b6\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001\u0000\u0000"+ - "\u0000\u02b7V\u0001\u0000\u0000\u0000\u02b8\u02ba\u0003?\u0018\u0000\u02b9"+ + "\u0197\u0001\u0000\u0000\u0000\u000e\u0199\u0001\u0000\u0000\u0000\u000e"+ + "\u019b\u0001\u0000\u0000\u0000\u000f\u019d\u0001\u0000\u0000\u0000\u0011"+ + "\u01a7\u0001\u0000\u0000\u0000\u0013\u01ae\u0001\u0000\u0000\u0000\u0015"+ + "\u01b7\u0001\u0000\u0000\u0000\u0017\u01be\u0001\u0000\u0000\u0000\u0019"+ + "\u01c8\u0001\u0000\u0000\u0000\u001b\u01cf\u0001\u0000\u0000\u0000\u001d"+ + "\u01d6\u0001\u0000\u0000\u0000\u001f\u01dd\u0001\u0000\u0000\u0000!\u01e5"+ + "\u0001\u0000\u0000\u0000#\u01f1\u0001\u0000\u0000\u0000%\u01fa\u0001\u0000"+ + "\u0000\u0000\'\u0200\u0001\u0000\u0000\u0000)\u0207\u0001\u0000\u0000"+ + "\u0000+\u020e\u0001\u0000\u0000\u0000-\u0216\u0001\u0000\u0000\u0000/"+ + "\u021e\u0001\u0000\u0000\u00001\u022d\u0001\u0000\u0000\u00003\u0237\u0001"+ + "\u0000\u0000\u00005\u0243\u0001\u0000\u0000\u00007\u0249\u0001\u0000\u0000"+ + "\u00009\u025a\u0001\u0000\u0000\u0000;\u026a\u0001\u0000\u0000\u0000="+ + "\u0270\u0001\u0000\u0000\u0000?\u0272\u0001\u0000\u0000\u0000A\u0276\u0001"+ + "\u0000\u0000\u0000C\u0278\u0001\u0000\u0000\u0000E\u027a\u0001\u0000\u0000"+ + "\u0000G\u027d\u0001\u0000\u0000\u0000I\u027f\u0001\u0000\u0000\u0000K"+ + "\u0288\u0001\u0000\u0000\u0000M\u028a\u0001\u0000\u0000\u0000O\u028f\u0001"+ + "\u0000\u0000\u0000Q\u0291\u0001\u0000\u0000\u0000S\u0296\u0001\u0000\u0000"+ + "\u0000U\u02b5\u0001\u0000\u0000\u0000W\u02b8\u0001\u0000\u0000\u0000Y"+ + "\u02e6\u0001\u0000\u0000\u0000[\u02e8\u0001\u0000\u0000\u0000]\u02eb\u0001"+ + "\u0000\u0000\u0000_\u02ef\u0001\u0000\u0000\u0000a\u02f3\u0001\u0000\u0000"+ + "\u0000c\u02f5\u0001\u0000\u0000\u0000e\u02f8\u0001\u0000\u0000\u0000g"+ + "\u02fa\u0001\u0000\u0000\u0000i\u02ff\u0001\u0000\u0000\u0000k\u0301\u0001"+ + "\u0000\u0000\u0000m\u0307\u0001\u0000\u0000\u0000o\u030d\u0001\u0000\u0000"+ + "\u0000q\u0310\u0001\u0000\u0000\u0000s\u0313\u0001\u0000\u0000\u0000u"+ + "\u0318\u0001\u0000\u0000\u0000w\u031d\u0001\u0000\u0000\u0000y\u031f\u0001"+ + "\u0000\u0000\u0000{\u0323\u0001\u0000\u0000\u0000}\u0328\u0001\u0000\u0000"+ + "\u0000\u007f\u032e\u0001\u0000\u0000\u0000\u0081\u0331\u0001\u0000\u0000"+ + "\u0000\u0083\u0333\u0001\u0000\u0000\u0000\u0085\u0339\u0001\u0000\u0000"+ + "\u0000\u0087\u033b\u0001\u0000\u0000\u0000\u0089\u0340\u0001\u0000\u0000"+ + "\u0000\u008b\u0343\u0001\u0000\u0000\u0000\u008d\u0346\u0001\u0000\u0000"+ + "\u0000\u008f\u0349\u0001\u0000\u0000\u0000\u0091\u034b\u0001\u0000\u0000"+ + "\u0000\u0093\u034e\u0001\u0000\u0000\u0000\u0095\u0350\u0001\u0000\u0000"+ + "\u0000\u0097\u0353\u0001\u0000\u0000\u0000\u0099\u0355\u0001\u0000\u0000"+ + "\u0000\u009b\u0357\u0001\u0000\u0000\u0000\u009d\u0359\u0001\u0000\u0000"+ + "\u0000\u009f\u035b\u0001\u0000\u0000\u0000\u00a1\u035d\u0001\u0000\u0000"+ + "\u0000\u00a3\u0362\u0001\u0000\u0000\u0000\u00a5\u0377\u0001\u0000\u0000"+ + "\u0000\u00a7\u0379\u0001\u0000\u0000\u0000\u00a9\u037e\u0001\u0000\u0000"+ + "\u0000\u00ab\u0393\u0001\u0000\u0000\u0000\u00ad\u0395\u0001\u0000\u0000"+ + "\u0000\u00af\u039d\u0001\u0000\u0000\u0000\u00b1\u039f\u0001\u0000\u0000"+ + "\u0000\u00b3\u03a3\u0001\u0000\u0000\u0000\u00b5\u03a7\u0001\u0000\u0000"+ + "\u0000\u00b7\u03ab\u0001\u0000\u0000\u0000\u00b9\u03b0\u0001\u0000\u0000"+ + "\u0000\u00bb\u03b5\u0001\u0000\u0000\u0000\u00bd\u03b9\u0001\u0000\u0000"+ + "\u0000\u00bf\u03bd\u0001\u0000\u0000\u0000\u00c1\u03c1\u0001\u0000\u0000"+ + "\u0000\u00c3\u03c6\u0001\u0000\u0000\u0000\u00c5\u03ca\u0001\u0000\u0000"+ + "\u0000\u00c7\u03ce\u0001\u0000\u0000\u0000\u00c9\u03d2\u0001\u0000\u0000"+ + "\u0000\u00cb\u03d6\u0001\u0000\u0000\u0000\u00cd\u03da\u0001\u0000\u0000"+ + "\u0000\u00cf\u03e6\u0001\u0000\u0000\u0000\u00d1\u03e9\u0001\u0000\u0000"+ + "\u0000\u00d3\u03ed\u0001\u0000\u0000\u0000\u00d5\u03f1\u0001\u0000\u0000"+ + "\u0000\u00d7\u03f5\u0001\u0000\u0000\u0000\u00d9\u03f9\u0001\u0000\u0000"+ + "\u0000\u00db\u03fd\u0001\u0000\u0000\u0000\u00dd\u0401\u0001\u0000\u0000"+ + "\u0000\u00df\u0406\u0001\u0000\u0000\u0000\u00e1\u040a\u0001\u0000\u0000"+ + "\u0000\u00e3\u040e\u0001\u0000\u0000\u0000\u00e5\u0413\u0001\u0000\u0000"+ + "\u0000\u00e7\u041c\u0001\u0000\u0000\u0000\u00e9\u0431\u0001\u0000\u0000"+ + "\u0000\u00eb\u0435\u0001\u0000\u0000\u0000\u00ed\u0439\u0001\u0000\u0000"+ + "\u0000\u00ef\u043d\u0001\u0000\u0000\u0000\u00f1\u0441\u0001\u0000\u0000"+ + "\u0000\u00f3\u0445\u0001\u0000\u0000\u0000\u00f5\u044a\u0001\u0000\u0000"+ + "\u0000\u00f7\u044e\u0001\u0000\u0000\u0000\u00f9\u0452\u0001\u0000\u0000"+ + "\u0000\u00fb\u0456\u0001\u0000\u0000\u0000\u00fd\u045b\u0001\u0000\u0000"+ + "\u0000\u00ff\u0460\u0001\u0000\u0000\u0000\u0101\u0463\u0001\u0000\u0000"+ + "\u0000\u0103\u0467\u0001\u0000\u0000\u0000\u0105\u046b\u0001\u0000\u0000"+ + "\u0000\u0107\u046f\u0001\u0000\u0000\u0000\u0109\u0473\u0001\u0000\u0000"+ + "\u0000\u010b\u0478\u0001\u0000\u0000\u0000\u010d\u047d\u0001\u0000\u0000"+ + "\u0000\u010f\u0482\u0001\u0000\u0000\u0000\u0111\u0489\u0001\u0000\u0000"+ + "\u0000\u0113\u0492\u0001\u0000\u0000\u0000\u0115\u0499\u0001\u0000\u0000"+ + "\u0000\u0117\u049d\u0001\u0000\u0000\u0000\u0119\u04a1\u0001\u0000\u0000"+ + "\u0000\u011b\u04a5\u0001\u0000\u0000\u0000\u011d\u04a9\u0001\u0000\u0000"+ + "\u0000\u011f\u04af\u0001\u0000\u0000\u0000\u0121\u04b3\u0001\u0000\u0000"+ + "\u0000\u0123\u04b7\u0001\u0000\u0000\u0000\u0125\u04bb\u0001\u0000\u0000"+ + "\u0000\u0127\u04bf\u0001\u0000\u0000\u0000\u0129\u04c3\u0001\u0000\u0000"+ + "\u0000\u012b\u04c7\u0001\u0000\u0000\u0000\u012d\u04cc\u0001\u0000\u0000"+ + "\u0000\u012f\u04d1\u0001\u0000\u0000\u0000\u0131\u04d5\u0001\u0000\u0000"+ + "\u0000\u0133\u04d9\u0001\u0000\u0000\u0000\u0135\u04dd\u0001\u0000\u0000"+ + "\u0000\u0137\u04e2\u0001\u0000\u0000\u0000\u0139\u04e6\u0001\u0000\u0000"+ + "\u0000\u013b\u04eb\u0001\u0000\u0000\u0000\u013d\u04f0\u0001\u0000\u0000"+ + "\u0000\u013f\u04f4\u0001\u0000\u0000\u0000\u0141\u04f8\u0001\u0000\u0000"+ + "\u0000\u0143\u04fc\u0001\u0000\u0000\u0000\u0145\u0500\u0001\u0000\u0000"+ + "\u0000\u0147\u0504\u0001\u0000\u0000\u0000\u0149\u0509\u0001\u0000\u0000"+ + "\u0000\u014b\u050e\u0001\u0000\u0000\u0000\u014d\u0512\u0001\u0000\u0000"+ + "\u0000\u014f\u0516\u0001\u0000\u0000\u0000\u0151\u051a\u0001\u0000\u0000"+ + "\u0000\u0153\u051f\u0001\u0000\u0000\u0000\u0155\u0528\u0001\u0000\u0000"+ + "\u0000\u0157\u052c\u0001\u0000\u0000\u0000\u0159\u0530\u0001\u0000\u0000"+ + "\u0000\u015b\u0534\u0001\u0000\u0000\u0000\u015d\u0538\u0001\u0000\u0000"+ + "\u0000\u015f\u053d\u0001\u0000\u0000\u0000\u0161\u0541\u0001\u0000\u0000"+ + "\u0000\u0163\u0545\u0001\u0000\u0000\u0000\u0165\u0549\u0001\u0000\u0000"+ + "\u0000\u0167\u054e\u0001\u0000\u0000\u0000\u0169\u0552\u0001\u0000\u0000"+ + "\u0000\u016b\u0556\u0001\u0000\u0000\u0000\u016d\u055a\u0001\u0000\u0000"+ + "\u0000\u016f\u055e\u0001\u0000\u0000\u0000\u0171\u0562\u0001\u0000\u0000"+ + "\u0000\u0173\u0568\u0001\u0000\u0000\u0000\u0175\u056c\u0001\u0000\u0000"+ + "\u0000\u0177\u0570\u0001\u0000\u0000\u0000\u0179\u0574\u0001\u0000\u0000"+ + "\u0000\u017b\u0578\u0001\u0000\u0000\u0000\u017d\u057c\u0001\u0000\u0000"+ + "\u0000\u017f\u0580\u0001\u0000\u0000\u0000\u0181\u0585\u0001\u0000\u0000"+ + "\u0000\u0183\u058b\u0001\u0000\u0000\u0000\u0185\u0591\u0001\u0000\u0000"+ + "\u0000\u0187\u0595\u0001\u0000\u0000\u0000\u0189\u0599\u0001\u0000\u0000"+ + "\u0000\u018b\u059d\u0001\u0000\u0000\u0000\u018d\u05a3\u0001\u0000\u0000"+ + "\u0000\u018f\u05a9\u0001\u0000\u0000\u0000\u0191\u05ad\u0001\u0000\u0000"+ + "\u0000\u0193\u05b1\u0001\u0000\u0000\u0000\u0195\u05b5\u0001\u0000\u0000"+ + "\u0000\u0197\u05bb\u0001\u0000\u0000\u0000\u0199\u05c1\u0001\u0000\u0000"+ + "\u0000\u019b\u05c7\u0001\u0000\u0000\u0000\u019d\u019e\u0007\u0000\u0000"+ + "\u0000\u019e\u019f\u0007\u0001\u0000\u0000\u019f\u01a0\u0007\u0002\u0000"+ + "\u0000\u01a0\u01a1\u0007\u0002\u0000\u0000\u01a1\u01a2\u0007\u0003\u0000"+ + "\u0000\u01a2\u01a3\u0007\u0004\u0000\u0000\u01a3\u01a4\u0007\u0005\u0000"+ + "\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a6\u0006\u0000\u0000"+ + "\u0000\u01a6\u0010\u0001\u0000\u0000\u0000\u01a7\u01a8\u0007\u0000\u0000"+ + "\u0000\u01a8\u01a9\u0007\u0006\u0000\u0000\u01a9\u01aa\u0007\u0007\u0000"+ + "\u0000\u01aa\u01ab\u0007\b\u0000\u0000\u01ab\u01ac\u0001\u0000\u0000\u0000"+ + "\u01ac\u01ad\u0006\u0001\u0001\u0000\u01ad\u0012\u0001\u0000\u0000\u0000"+ + "\u01ae\u01af\u0007\u0003\u0000\u0000\u01af\u01b0\u0007\t\u0000\u0000\u01b0"+ + "\u01b1\u0007\u0006\u0000\u0000\u01b1\u01b2\u0007\u0001\u0000\u0000\u01b2"+ + "\u01b3\u0007\u0004\u0000\u0000\u01b3\u01b4\u0007\n\u0000\u0000\u01b4\u01b5"+ + "\u0001\u0000\u0000\u0000\u01b5\u01b6\u0006\u0002\u0002\u0000\u01b6\u0014"+ + "\u0001\u0000\u0000\u0000\u01b7\u01b8\u0007\u0003\u0000\u0000\u01b8\u01b9"+ + "\u0007\u000b\u0000\u0000\u01b9\u01ba\u0007\f\u0000\u0000\u01ba\u01bb\u0007"+ + "\r\u0000\u0000\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc\u01bd\u0006\u0003"+ + "\u0000\u0000\u01bd\u0016\u0001\u0000\u0000\u0000\u01be\u01bf\u0007\u0003"+ + "\u0000\u0000\u01bf\u01c0\u0007\u000e\u0000\u0000\u01c0\u01c1\u0007\b\u0000"+ + "\u0000\u01c1\u01c2\u0007\r\u0000\u0000\u01c2\u01c3\u0007\f\u0000\u0000"+ + "\u01c3\u01c4\u0007\u0001\u0000\u0000\u01c4\u01c5\u0007\t\u0000\u0000\u01c5"+ + "\u01c6\u0001\u0000\u0000\u0000\u01c6\u01c7\u0006\u0004\u0003\u0000\u01c7"+ + "\u0018\u0001\u0000\u0000\u0000\u01c8\u01c9\u0007\u000f\u0000\u0000\u01c9"+ + "\u01ca\u0007\u0006\u0000\u0000\u01ca\u01cb\u0007\u0007\u0000\u0000\u01cb"+ + "\u01cc\u0007\u0010\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000\u01cd"+ + "\u01ce\u0006\u0005\u0004\u0000\u01ce\u001a\u0001\u0000\u0000\u0000\u01cf"+ + "\u01d0\u0007\u0011\u0000\u0000\u01d0\u01d1\u0007\u0006\u0000\u0000\u01d1"+ + "\u01d2\u0007\u0007\u0000\u0000\u01d2\u01d3\u0007\u0012\u0000\u0000\u01d3"+ + "\u01d4\u0001\u0000\u0000\u0000\u01d4\u01d5\u0006\u0006\u0000\u0000\u01d5"+ + "\u001c\u0001\u0000\u0000\u0000\u01d6\u01d7\u0007\u0012\u0000\u0000\u01d7"+ + "\u01d8\u0007\u0003\u0000\u0000\u01d8\u01d9\u0007\u0003\u0000\u0000\u01d9"+ + "\u01da\u0007\b\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db\u01dc"+ + "\u0006\u0007\u0001\u0000\u01dc\u001e\u0001\u0000\u0000\u0000\u01dd\u01de"+ + "\u0007\r\u0000\u0000\u01de\u01df\u0007\u0001\u0000\u0000\u01df\u01e0\u0007"+ + "\u0010\u0000\u0000\u01e0\u01e1\u0007\u0001\u0000\u0000\u01e1\u01e2\u0007"+ + "\u0005\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006"+ + "\b\u0000\u0000\u01e4 \u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0010"+ + "\u0000\u0000\u01e6\u01e7\u0007\u000b\u0000\u0000\u01e7\u01e8\u0005_\u0000"+ + "\u0000\u01e8\u01e9\u0007\u0003\u0000\u0000\u01e9\u01ea\u0007\u000e\u0000"+ + "\u0000\u01ea\u01eb\u0007\b\u0000\u0000\u01eb\u01ec\u0007\f\u0000\u0000"+ + "\u01ec\u01ed\u0007\t\u0000\u0000\u01ed\u01ee\u0007\u0000\u0000\u0000\u01ee"+ + "\u01ef\u0001\u0000\u0000\u0000\u01ef\u01f0\u0006\t\u0005\u0000\u01f0\""+ + "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0007\u0006\u0000\u0000\u01f2\u01f3"+ + "\u0007\u0003\u0000\u0000\u01f3\u01f4\u0007\t\u0000\u0000\u01f4\u01f5\u0007"+ + "\f\u0000\u0000\u01f5\u01f6\u0007\u0010\u0000\u0000\u01f6\u01f7\u0007\u0003"+ + "\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\n\u0006"+ + "\u0000\u01f9$\u0001\u0000\u0000\u0000\u01fa\u01fb\u0007\u0006\u0000\u0000"+ + "\u01fb\u01fc\u0007\u0007\u0000\u0000\u01fc\u01fd\u0007\u0013\u0000\u0000"+ + "\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fe\u01ff\u0006\u000b\u0000\u0000"+ + "\u01ff&\u0001\u0000\u0000\u0000\u0200\u0201\u0007\u0002\u0000\u0000\u0201"+ + "\u0202\u0007\n\u0000\u0000\u0202\u0203\u0007\u0007\u0000\u0000\u0203\u0204"+ + "\u0007\u0013\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205\u0206"+ + "\u0006\f\u0007\u0000\u0206(\u0001\u0000\u0000\u0000\u0207\u0208\u0007"+ + "\u0002\u0000\u0000\u0208\u0209\u0007\u0007\u0000\u0000\u0209\u020a\u0007"+ + "\u0006\u0000\u0000\u020a\u020b\u0007\u0005\u0000\u0000\u020b\u020c\u0001"+ + "\u0000\u0000\u0000\u020c\u020d\u0006\r\u0000\u0000\u020d*\u0001\u0000"+ + "\u0000\u0000\u020e\u020f\u0007\u0002\u0000\u0000\u020f\u0210\u0007\u0005"+ + "\u0000\u0000\u0210\u0211\u0007\f\u0000\u0000\u0211\u0212\u0007\u0005\u0000"+ + "\u0000\u0212\u0213\u0007\u0002\u0000\u0000\u0213\u0214\u0001\u0000\u0000"+ + "\u0000\u0214\u0215\u0006\u000e\u0000\u0000\u0215,\u0001\u0000\u0000\u0000"+ + "\u0216\u0217\u0007\u0013\u0000\u0000\u0217\u0218\u0007\n\u0000\u0000\u0218"+ + "\u0219\u0007\u0003\u0000\u0000\u0219\u021a\u0007\u0006\u0000\u0000\u021a"+ + "\u021b\u0007\u0003\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c"+ + "\u021d\u0006\u000f\u0000\u0000\u021d.\u0001\u0000\u0000\u0000\u021e\u021f"+ + "\u0004\u0010\u0000\u0000\u021f\u0220\u0007\u0001\u0000\u0000\u0220\u0221"+ + "\u0007\t\u0000\u0000\u0221\u0222\u0007\r\u0000\u0000\u0222\u0223\u0007"+ + "\u0001\u0000\u0000\u0223\u0224\u0007\t\u0000\u0000\u0224\u0225\u0007\u0003"+ + "\u0000\u0000\u0225\u0226\u0007\u0002\u0000\u0000\u0226\u0227\u0007\u0005"+ + "\u0000\u0000\u0227\u0228\u0007\f\u0000\u0000\u0228\u0229\u0007\u0005\u0000"+ + "\u0000\u0229\u022a\u0007\u0002\u0000\u0000\u022a\u022b\u0001\u0000\u0000"+ + "\u0000\u022b\u022c\u0006\u0010\u0000\u0000\u022c0\u0001\u0000\u0000\u0000"+ + "\u022d\u022e\u0004\u0011\u0001\u0000\u022e\u022f\u0007\r\u0000\u0000\u022f"+ + "\u0230\u0007\u0007\u0000\u0000\u0230\u0231\u0007\u0007\u0000\u0000\u0231"+ + "\u0232\u0007\u0012\u0000\u0000\u0232\u0233\u0007\u0014\u0000\u0000\u0233"+ + "\u0234\u0007\b\u0000\u0000\u0234\u0235\u0001\u0000\u0000\u0000\u0235\u0236"+ + "\u0006\u0011\b\u0000\u02362\u0001\u0000\u0000\u0000\u0237\u0238\u0004"+ + "\u0012\u0002\u0000\u0238\u0239\u0007\u0010\u0000\u0000\u0239\u023a\u0007"+ + "\u0003\u0000\u0000\u023a\u023b\u0007\u0005\u0000\u0000\u023b\u023c\u0007"+ + "\u0006\u0000\u0000\u023c\u023d\u0007\u0001\u0000\u0000\u023d\u023e\u0007"+ + "\u0004\u0000\u0000\u023e\u023f\u0007\u0002\u0000\u0000\u023f\u0240\u0001"+ + "\u0000\u0000\u0000\u0240\u0241\u0006\u0012\t\u0000\u02414\u0001\u0000"+ + "\u0000\u0000\u0242\u0244\b\u0015\u0000\u0000\u0243\u0242\u0001\u0000\u0000"+ + "\u0000\u0244\u0245\u0001\u0000\u0000\u0000\u0245\u0243\u0001\u0000\u0000"+ + "\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0247\u0001\u0000\u0000"+ + "\u0000\u0247\u0248\u0006\u0013\u0000\u0000\u02486\u0001\u0000\u0000\u0000"+ + "\u0249\u024a\u0005/\u0000\u0000\u024a\u024b\u0005/\u0000\u0000\u024b\u024f"+ + "\u0001\u0000\u0000\u0000\u024c\u024e\b\u0016\u0000\u0000\u024d\u024c\u0001"+ + "\u0000\u0000\u0000\u024e\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001"+ + "\u0000\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0253\u0001"+ + "\u0000\u0000\u0000\u0251\u024f\u0001\u0000\u0000\u0000\u0252\u0254\u0005"+ + "\r\u0000\u0000\u0253\u0252\u0001\u0000\u0000\u0000\u0253\u0254\u0001\u0000"+ + "\u0000\u0000\u0254\u0256\u0001\u0000\u0000\u0000\u0255\u0257\u0005\n\u0000"+ + "\u0000\u0256\u0255\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000\u0000"+ + "\u0000\u0257\u0258\u0001\u0000\u0000\u0000\u0258\u0259\u0006\u0014\n\u0000"+ + "\u02598\u0001\u0000\u0000\u0000\u025a\u025b\u0005/\u0000\u0000\u025b\u025c"+ + "\u0005*\u0000\u0000\u025c\u0261\u0001\u0000\u0000\u0000\u025d\u0260\u0003"+ + "9\u0015\u0000\u025e\u0260\t\u0000\u0000\u0000\u025f\u025d\u0001\u0000"+ + "\u0000\u0000\u025f\u025e\u0001\u0000\u0000\u0000\u0260\u0263\u0001\u0000"+ + "\u0000\u0000\u0261\u0262\u0001\u0000\u0000\u0000\u0261\u025f\u0001\u0000"+ + "\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000\u0263\u0261\u0001\u0000"+ + "\u0000\u0000\u0264\u0265\u0005*\u0000\u0000\u0265\u0266\u0005/\u0000\u0000"+ + "\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0006\u0015\n\u0000\u0268"+ + ":\u0001\u0000\u0000\u0000\u0269\u026b\u0007\u0017\u0000\u0000\u026a\u0269"+ + "\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026a"+ + "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026e"+ + "\u0001\u0000\u0000\u0000\u026e\u026f\u0006\u0016\n\u0000\u026f<\u0001"+ + "\u0000\u0000\u0000\u0270\u0271\u0005:\u0000\u0000\u0271>\u0001\u0000\u0000"+ + "\u0000\u0272\u0273\u0005|\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000"+ + "\u0274\u0275\u0006\u0018\u000b\u0000\u0275@\u0001\u0000\u0000\u0000\u0276"+ + "\u0277\u0007\u0018\u0000\u0000\u0277B\u0001\u0000\u0000\u0000\u0278\u0279"+ + "\u0007\u0019\u0000\u0000\u0279D\u0001\u0000\u0000\u0000\u027a\u027b\u0005"+ + "\\\u0000\u0000\u027b\u027c\u0007\u001a\u0000\u0000\u027cF\u0001\u0000"+ + "\u0000\u0000\u027d\u027e\b\u001b\u0000\u0000\u027eH\u0001\u0000\u0000"+ + "\u0000\u027f\u0281\u0007\u0003\u0000\u0000\u0280\u0282\u0007\u001c\u0000"+ + "\u0000\u0281\u0280\u0001\u0000\u0000\u0000\u0281\u0282\u0001\u0000\u0000"+ + "\u0000\u0282\u0284\u0001\u0000\u0000\u0000\u0283\u0285\u0003A\u0019\u0000"+ + "\u0284\u0283\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000"+ + "\u0286\u0284\u0001\u0000\u0000\u0000\u0286\u0287\u0001\u0000\u0000\u0000"+ + "\u0287J\u0001\u0000\u0000\u0000\u0288\u0289\u0005@\u0000\u0000\u0289L"+ + "\u0001\u0000\u0000\u0000\u028a\u028b\u0005`\u0000\u0000\u028bN\u0001\u0000"+ + "\u0000\u0000\u028c\u0290\b\u001d\u0000\u0000\u028d\u028e\u0005`\u0000"+ + "\u0000\u028e\u0290\u0005`\u0000\u0000\u028f\u028c\u0001\u0000\u0000\u0000"+ + "\u028f\u028d\u0001\u0000\u0000\u0000\u0290P\u0001\u0000\u0000\u0000\u0291"+ + "\u0292\u0005_\u0000\u0000\u0292R\u0001\u0000\u0000\u0000\u0293\u0297\u0003"+ + "C\u001a\u0000\u0294\u0297\u0003A\u0019\u0000\u0295\u0297\u0003Q!\u0000"+ + "\u0296\u0293\u0001\u0000\u0000\u0000\u0296\u0294\u0001\u0000\u0000\u0000"+ + "\u0296\u0295\u0001\u0000\u0000\u0000\u0297T\u0001\u0000\u0000\u0000\u0298"+ + "\u029d\u0005\"\u0000\u0000\u0299\u029c\u0003E\u001b\u0000\u029a\u029c"+ + "\u0003G\u001c\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029b\u029a\u0001"+ + "\u0000\u0000\u0000\u029c\u029f\u0001\u0000\u0000\u0000\u029d\u029b\u0001"+ + "\u0000\u0000\u0000\u029d\u029e\u0001\u0000\u0000\u0000\u029e\u02a0\u0001"+ + "\u0000\u0000\u0000\u029f\u029d\u0001\u0000\u0000\u0000\u02a0\u02b6\u0005"+ + "\"\u0000\u0000\u02a1\u02a2\u0005\"\u0000\u0000\u02a2\u02a3\u0005\"\u0000"+ + "\u0000\u02a3\u02a4\u0005\"\u0000\u0000\u02a4\u02a8\u0001\u0000\u0000\u0000"+ + "\u02a5\u02a7\b\u0016\u0000\u0000\u02a6\u02a5\u0001\u0000\u0000\u0000\u02a7"+ + "\u02aa\u0001\u0000\u0000\u0000\u02a8\u02a9\u0001\u0000\u0000\u0000\u02a8"+ + "\u02a6\u0001\u0000\u0000\u0000\u02a9\u02ab\u0001\u0000\u0000\u0000\u02aa"+ + "\u02a8\u0001\u0000\u0000\u0000\u02ab\u02ac\u0005\"\u0000\u0000\u02ac\u02ad"+ + "\u0005\"\u0000\u0000\u02ad\u02ae\u0005\"\u0000\u0000\u02ae\u02b0\u0001"+ + "\u0000\u0000\u0000\u02af\u02b1\u0005\"\u0000\u0000\u02b0\u02af\u0001\u0000"+ + "\u0000\u0000\u02b0\u02b1\u0001\u0000\u0000\u0000\u02b1\u02b3\u0001\u0000"+ + "\u0000\u0000\u02b2\u02b4\u0005\"\u0000\u0000\u02b3\u02b2\u0001\u0000\u0000"+ + "\u0000\u02b3\u02b4\u0001\u0000\u0000\u0000\u02b4\u02b6\u0001\u0000\u0000"+ + "\u0000\u02b5\u0298\u0001\u0000\u0000\u0000\u02b5\u02a1\u0001\u0000\u0000"+ + "\u0000\u02b6V\u0001\u0000\u0000\u0000\u02b7\u02b9\u0003A\u0019\u0000\u02b8"+ + "\u02b7\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000\u0000\u02ba"+ "\u02b8\u0001\u0000\u0000\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb"+ - "\u02b9\u0001\u0000\u0000\u0000\u02bb\u02bc\u0001\u0000\u0000\u0000\u02bc"+ - "\u02bd\u0001\u0000\u0000\u0000\u02bd\u02c1\u0003g,\u0000\u02be\u02c0\u0003"+ - "?\u0018\u0000\u02bf\u02be\u0001\u0000\u0000\u0000\u02c0\u02c3\u0001\u0000"+ - "\u0000\u0000\u02c1\u02bf\u0001\u0000\u0000\u0000\u02c1\u02c2\u0001\u0000"+ - "\u0000\u0000\u02c2\u02e3\u0001\u0000\u0000\u0000\u02c3\u02c1\u0001\u0000"+ - "\u0000\u0000\u02c4\u02c6\u0003g,\u0000\u02c5\u02c7\u0003?\u0018\u0000"+ - "\u02c6\u02c5\u0001\u0000\u0000\u0000\u02c7\u02c8\u0001\u0000\u0000\u0000"+ - "\u02c8\u02c6\u0001\u0000\u0000\u0000\u02c8\u02c9\u0001\u0000\u0000\u0000"+ - "\u02c9\u02e3\u0001\u0000\u0000\u0000\u02ca\u02cc\u0003?\u0018\u0000\u02cb"+ + "X\u0001\u0000\u0000\u0000\u02bc\u02be\u0003A\u0019\u0000\u02bd\u02bc\u0001"+ + "\u0000\u0000\u0000\u02be\u02bf\u0001\u0000\u0000\u0000\u02bf\u02bd\u0001"+ + "\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000\u02c0\u02c1\u0001"+ + "\u0000\u0000\u0000\u02c1\u02c5\u0003i-\u0000\u02c2\u02c4\u0003A\u0019"+ + "\u0000\u02c3\u02c2\u0001\u0000\u0000\u0000\u02c4\u02c7\u0001\u0000\u0000"+ + "\u0000\u02c5\u02c3\u0001\u0000\u0000\u0000\u02c5\u02c6\u0001\u0000\u0000"+ + "\u0000\u02c6\u02e7\u0001\u0000\u0000\u0000\u02c7\u02c5\u0001\u0000\u0000"+ + "\u0000\u02c8\u02ca\u0003i-\u0000\u02c9\u02cb\u0003A\u0019\u0000\u02ca"+ + "\u02c9\u0001\u0000\u0000\u0000\u02cb\u02cc\u0001\u0000\u0000\u0000\u02cc"+ "\u02ca\u0001\u0000\u0000\u0000\u02cc\u02cd\u0001\u0000\u0000\u0000\u02cd"+ - "\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000\u0000\u0000\u02ce"+ - "\u02d6\u0001\u0000\u0000\u0000\u02cf\u02d3\u0003g,\u0000\u02d0\u02d2\u0003"+ - "?\u0018\u0000\u02d1\u02d0\u0001\u0000\u0000\u0000\u02d2\u02d5\u0001\u0000"+ - "\u0000\u0000\u02d3\u02d1\u0001\u0000\u0000\u0000\u02d3\u02d4\u0001\u0000"+ - "\u0000\u0000\u02d4\u02d7\u0001\u0000\u0000\u0000\u02d5\u02d3\u0001\u0000"+ - "\u0000\u0000\u02d6\u02cf\u0001\u0000\u0000\u0000\u02d6\u02d7\u0001\u0000"+ - "\u0000\u0000\u02d7\u02d8\u0001\u0000\u0000\u0000\u02d8\u02d9\u0003G\u001c"+ - "\u0000\u02d9\u02e3\u0001\u0000\u0000\u0000\u02da\u02dc\u0003g,\u0000\u02db"+ - "\u02dd\u0003?\u0018\u0000\u02dc\u02db\u0001\u0000\u0000\u0000\u02dd\u02de"+ - "\u0001\u0000\u0000\u0000\u02de\u02dc\u0001\u0000\u0000\u0000\u02de\u02df"+ - "\u0001\u0000\u0000\u0000\u02df\u02e0\u0001\u0000\u0000\u0000\u02e0\u02e1"+ - "\u0003G\u001c\u0000\u02e1\u02e3\u0001\u0000\u0000\u0000\u02e2\u02b9\u0001"+ - "\u0000\u0000\u0000\u02e2\u02c4\u0001\u0000\u0000\u0000\u02e2\u02cb\u0001"+ - "\u0000\u0000\u0000\u02e2\u02da\u0001\u0000\u0000\u0000\u02e3X\u0001\u0000"+ - "\u0000\u0000\u02e4\u02e5\u0007\u001e\u0000\u0000\u02e5\u02e6\u0007\u001f"+ - "\u0000\u0000\u02e6Z\u0001\u0000\u0000\u0000\u02e7\u02e8\u0007\f\u0000"+ - "\u0000\u02e8\u02e9\u0007\t\u0000\u0000\u02e9\u02ea\u0007\u0000\u0000\u0000"+ - "\u02ea\\\u0001\u0000\u0000\u0000\u02eb\u02ec\u0007\f\u0000\u0000\u02ec"+ - "\u02ed\u0007\u0002\u0000\u0000\u02ed\u02ee\u0007\u0004\u0000\u0000\u02ee"+ - "^\u0001\u0000\u0000\u0000\u02ef\u02f0\u0005=\u0000\u0000\u02f0`\u0001"+ - "\u0000\u0000\u0000\u02f1\u02f2\u0005:\u0000\u0000\u02f2\u02f3\u0005:\u0000"+ - "\u0000\u02f3b\u0001\u0000\u0000\u0000\u02f4\u02f5\u0005,\u0000\u0000\u02f5"+ - "d\u0001\u0000\u0000\u0000\u02f6\u02f7\u0007\u0000\u0000\u0000\u02f7\u02f8"+ - "\u0007\u0003\u0000\u0000\u02f8\u02f9\u0007\u0002\u0000\u0000\u02f9\u02fa"+ - "\u0007\u0004\u0000\u0000\u02faf\u0001\u0000\u0000\u0000\u02fb\u02fc\u0005"+ - ".\u0000\u0000\u02fch\u0001\u0000\u0000\u0000\u02fd\u02fe\u0007\u000f\u0000"+ - "\u0000\u02fe\u02ff\u0007\f\u0000\u0000\u02ff\u0300\u0007\r\u0000\u0000"+ - "\u0300\u0301\u0007\u0002\u0000\u0000\u0301\u0302\u0007\u0003\u0000\u0000"+ - "\u0302j\u0001\u0000\u0000\u0000\u0303\u0304\u0007\u000f\u0000\u0000\u0304"+ - "\u0305\u0007\u0001\u0000\u0000\u0305\u0306\u0007\u0006\u0000\u0000\u0306"+ - "\u0307\u0007\u0002\u0000\u0000\u0307\u0308\u0007\u0005\u0000\u0000\u0308"+ - "l\u0001\u0000\u0000\u0000\u0309\u030a\u0007\u0001\u0000\u0000\u030a\u030b"+ - "\u0007\t\u0000\u0000\u030bn\u0001\u0000\u0000\u0000\u030c\u030d\u0007"+ - "\u0001\u0000\u0000\u030d\u030e\u0007\u0002\u0000\u0000\u030ep\u0001\u0000"+ - "\u0000\u0000\u030f\u0310\u0007\r\u0000\u0000\u0310\u0311\u0007\f\u0000"+ - "\u0000\u0311\u0312\u0007\u0002\u0000\u0000\u0312\u0313\u0007\u0005\u0000"+ - "\u0000\u0313r\u0001\u0000\u0000\u0000\u0314\u0315\u0007\r\u0000\u0000"+ - "\u0315\u0316\u0007\u0001\u0000\u0000\u0316\u0317\u0007\u0012\u0000\u0000"+ - "\u0317\u0318\u0007\u0003\u0000\u0000\u0318t\u0001\u0000\u0000\u0000\u0319"+ - "\u031a\u0005(\u0000\u0000\u031av\u0001\u0000\u0000\u0000\u031b\u031c\u0007"+ - "\t\u0000\u0000\u031c\u031d\u0007\u0007\u0000\u0000\u031d\u031e\u0007\u0005"+ - "\u0000\u0000\u031ex\u0001\u0000\u0000\u0000\u031f\u0320\u0007\t\u0000"+ - "\u0000\u0320\u0321\u0007\u0014\u0000\u0000\u0321\u0322\u0007\r\u0000\u0000"+ - "\u0322\u0323\u0007\r\u0000\u0000\u0323z\u0001\u0000\u0000\u0000\u0324"+ - "\u0325\u0007\t\u0000\u0000\u0325\u0326\u0007\u0014\u0000\u0000\u0326\u0327"+ - "\u0007\r\u0000\u0000\u0327\u0328\u0007\r\u0000\u0000\u0328\u0329\u0007"+ - "\u0002\u0000\u0000\u0329|\u0001\u0000\u0000\u0000\u032a\u032b\u0007\u0007"+ - "\u0000\u0000\u032b\u032c\u0007\u0006\u0000\u0000\u032c~\u0001\u0000\u0000"+ - "\u0000\u032d\u032e\u0005?\u0000\u0000\u032e\u0080\u0001\u0000\u0000\u0000"+ - "\u032f\u0330\u0007\u0006\u0000\u0000\u0330\u0331\u0007\r\u0000\u0000\u0331"+ - "\u0332\u0007\u0001\u0000\u0000\u0332\u0333\u0007\u0012\u0000\u0000\u0333"+ - "\u0334\u0007\u0003\u0000\u0000\u0334\u0082\u0001\u0000\u0000\u0000\u0335"+ - "\u0336\u0005)\u0000\u0000\u0336\u0084\u0001\u0000\u0000\u0000\u0337\u0338"+ - "\u0007\u0005\u0000\u0000\u0338\u0339\u0007\u0006\u0000\u0000\u0339\u033a"+ - "\u0007\u0014\u0000\u0000\u033a\u033b\u0007\u0003\u0000\u0000\u033b\u0086"+ - "\u0001\u0000\u0000\u0000\u033c\u033d\u0005=\u0000\u0000\u033d\u033e\u0005"+ - "=\u0000\u0000\u033e\u0088\u0001\u0000\u0000\u0000\u033f\u0340\u0005=\u0000"+ - "\u0000\u0340\u0341\u0005~\u0000\u0000\u0341\u008a\u0001\u0000\u0000\u0000"+ - "\u0342\u0343\u0005!\u0000\u0000\u0343\u0344\u0005=\u0000\u0000\u0344\u008c"+ - "\u0001\u0000\u0000\u0000\u0345\u0346\u0005<\u0000\u0000\u0346\u008e\u0001"+ - "\u0000\u0000\u0000\u0347\u0348\u0005<\u0000\u0000\u0348\u0349\u0005=\u0000"+ - "\u0000\u0349\u0090\u0001\u0000\u0000\u0000\u034a\u034b\u0005>\u0000\u0000"+ - "\u034b\u0092\u0001\u0000\u0000\u0000\u034c\u034d\u0005>\u0000\u0000\u034d"+ - "\u034e\u0005=\u0000\u0000\u034e\u0094\u0001\u0000\u0000\u0000\u034f\u0350"+ - "\u0005+\u0000\u0000\u0350\u0096\u0001\u0000\u0000\u0000\u0351\u0352\u0005"+ - "-\u0000\u0000\u0352\u0098\u0001\u0000\u0000\u0000\u0353\u0354\u0005*\u0000"+ - "\u0000\u0354\u009a\u0001\u0000\u0000\u0000\u0355\u0356\u0005/\u0000\u0000"+ - "\u0356\u009c\u0001\u0000\u0000\u0000\u0357\u0358\u0005%\u0000\u0000\u0358"+ - "\u009e\u0001\u0000\u0000\u0000\u0359\u035a\u0007\u0010\u0000\u0000\u035a"+ - "\u035b\u0007\f\u0000\u0000\u035b\u035c\u0007\u0005\u0000\u0000\u035c\u035d"+ - "\u0007\u0004\u0000\u0000\u035d\u035e\u0007\n\u0000\u0000\u035e\u00a0\u0001"+ - "\u0000\u0000\u0000\u035f\u0360\u0003-\u000f\u0000\u0360\u0361\u0001\u0000"+ - "\u0000\u0000\u0361\u0362\u0006I\f\u0000\u0362\u00a2\u0001\u0000\u0000"+ - "\u0000\u0363\u0366\u0003\u007f8\u0000\u0364\u0367\u0003A\u0019\u0000\u0365"+ - "\u0367\u0003O \u0000\u0366\u0364\u0001\u0000\u0000\u0000\u0366\u0365\u0001"+ - "\u0000\u0000\u0000\u0367\u036b\u0001\u0000\u0000\u0000\u0368\u036a\u0003"+ - "Q!\u0000\u0369\u0368\u0001\u0000\u0000\u0000\u036a\u036d\u0001\u0000\u0000"+ - "\u0000\u036b\u0369\u0001\u0000\u0000\u0000\u036b\u036c\u0001\u0000\u0000"+ - "\u0000\u036c\u0375\u0001\u0000\u0000\u0000\u036d\u036b\u0001\u0000\u0000"+ - "\u0000\u036e\u0370\u0003\u007f8\u0000\u036f\u0371\u0003?\u0018\u0000\u0370"+ - "\u036f\u0001\u0000\u0000\u0000\u0371\u0372\u0001\u0000\u0000\u0000\u0372"+ - "\u0370\u0001\u0000\u0000\u0000\u0372\u0373\u0001\u0000\u0000\u0000\u0373"+ - "\u0375\u0001\u0000\u0000\u0000\u0374\u0363\u0001\u0000\u0000\u0000\u0374"+ - "\u036e\u0001\u0000\u0000\u0000\u0375\u00a4\u0001\u0000\u0000\u0000\u0376"+ - "\u0377\u0005[\u0000\u0000\u0377\u0378\u0001\u0000\u0000\u0000\u0378\u0379"+ - "\u0006K\u0000\u0000\u0379\u037a\u0006K\u0000\u0000\u037a\u00a6\u0001\u0000"+ - "\u0000\u0000\u037b\u037c\u0005]\u0000\u0000\u037c\u037d\u0001\u0000\u0000"+ - "\u0000\u037d\u037e\u0006L\u000b\u0000\u037e\u037f\u0006L\u000b\u0000\u037f"+ - "\u00a8\u0001\u0000\u0000\u0000\u0380\u0384\u0003A\u0019\u0000\u0381\u0383"+ - "\u0003Q!\u0000\u0382\u0381\u0001\u0000\u0000\u0000\u0383\u0386\u0001\u0000"+ - "\u0000\u0000\u0384\u0382\u0001\u0000\u0000\u0000\u0384\u0385\u0001\u0000"+ - "\u0000\u0000\u0385\u0391\u0001\u0000\u0000\u0000\u0386\u0384\u0001\u0000"+ - "\u0000\u0000\u0387\u038a\u0003O \u0000\u0388\u038a\u0003I\u001d\u0000"+ - "\u0389\u0387\u0001\u0000\u0000\u0000\u0389\u0388\u0001\u0000\u0000\u0000"+ - "\u038a\u038c\u0001\u0000\u0000\u0000\u038b\u038d\u0003Q!\u0000\u038c\u038b"+ - "\u0001\u0000\u0000\u0000\u038d\u038e\u0001\u0000\u0000\u0000\u038e\u038c"+ - "\u0001\u0000\u0000\u0000\u038e\u038f\u0001\u0000\u0000\u0000\u038f\u0391"+ - "\u0001\u0000\u0000\u0000\u0390\u0380\u0001\u0000\u0000\u0000\u0390\u0389"+ - "\u0001\u0000\u0000\u0000\u0391\u00aa\u0001\u0000\u0000\u0000\u0392\u0394"+ - "\u0003K\u001e\u0000\u0393\u0395\u0003M\u001f\u0000\u0394\u0393\u0001\u0000"+ - "\u0000\u0000\u0395\u0396\u0001\u0000\u0000\u0000\u0396\u0394\u0001\u0000"+ - "\u0000\u0000\u0396\u0397\u0001\u0000\u0000\u0000\u0397\u0398\u0001\u0000"+ - "\u0000\u0000\u0398\u0399\u0003K\u001e\u0000\u0399\u00ac\u0001\u0000\u0000"+ - "\u0000\u039a\u039b\u0003\u00abN\u0000\u039b\u00ae\u0001\u0000\u0000\u0000"+ - "\u039c\u039d\u00037\u0014\u0000\u039d\u039e\u0001\u0000\u0000\u0000\u039e"+ - "\u039f\u0006P\n\u0000\u039f\u00b0\u0001\u0000\u0000\u0000\u03a0\u03a1"+ - "\u00039\u0015\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03a3\u0006"+ - "Q\n\u0000\u03a3\u00b2\u0001\u0000\u0000\u0000\u03a4\u03a5\u0003;\u0016"+ - "\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a7\u0006R\n\u0000"+ - "\u03a7\u00b4\u0001\u0000\u0000\u0000\u03a8\u03a9\u0003\u00a5K\u0000\u03a9"+ - "\u03aa\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006S\r\u0000\u03ab\u03ac"+ - "\u0006S\u000e\u0000\u03ac\u00b6\u0001\u0000\u0000\u0000\u03ad\u03ae\u0003"+ - "=\u0017\u0000\u03ae\u03af\u0001\u0000\u0000\u0000\u03af\u03b0\u0006T\u000f"+ - "\u0000\u03b0\u03b1\u0006T\u000b\u0000\u03b1\u00b8\u0001\u0000\u0000\u0000"+ - "\u03b2\u03b3\u0003;\u0016\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4"+ - "\u03b5\u0006U\n\u0000\u03b5\u00ba\u0001\u0000\u0000\u0000\u03b6\u03b7"+ - "\u00037\u0014\u0000\u03b7\u03b8\u0001\u0000\u0000\u0000\u03b8\u03b9\u0006"+ - "V\n\u0000\u03b9\u00bc\u0001\u0000\u0000\u0000\u03ba\u03bb\u00039\u0015"+ - "\u0000\u03bb\u03bc\u0001\u0000\u0000\u0000\u03bc\u03bd\u0006W\n\u0000"+ - "\u03bd\u00be\u0001\u0000\u0000\u0000\u03be\u03bf\u0003=\u0017\u0000\u03bf"+ - "\u03c0\u0001\u0000\u0000\u0000\u03c0\u03c1\u0006X\u000f\u0000\u03c1\u03c2"+ - "\u0006X\u000b\u0000\u03c2\u00c0\u0001\u0000\u0000\u0000\u03c3\u03c4\u0003"+ - "\u00a5K\u0000\u03c4\u03c5\u0001\u0000\u0000\u0000\u03c5\u03c6\u0006Y\r"+ - "\u0000\u03c6\u00c2\u0001\u0000\u0000\u0000\u03c7\u03c8\u0003\u00a7L\u0000"+ - "\u03c8\u03c9\u0001\u0000\u0000\u0000\u03c9\u03ca\u0006Z\u0010\u0000\u03ca"+ - "\u00c4\u0001\u0000\u0000\u0000\u03cb\u03cc\u0003\u0151\u00a1\u0000\u03cc"+ - "\u03cd\u0001\u0000\u0000\u0000\u03cd\u03ce\u0006[\u0011\u0000\u03ce\u00c6"+ - "\u0001\u0000\u0000\u0000\u03cf\u03d0\u0003c*\u0000\u03d0\u03d1\u0001\u0000"+ - "\u0000\u0000\u03d1\u03d2\u0006\\\u0012\u0000\u03d2\u00c8\u0001\u0000\u0000"+ - "\u0000\u03d3\u03d4\u0003_(\u0000\u03d4\u03d5\u0001\u0000\u0000\u0000\u03d5"+ - "\u03d6\u0006]\u0013\u0000\u03d6\u00ca\u0001\u0000\u0000\u0000\u03d7\u03d8"+ - "\u0007\u0010\u0000\u0000\u03d8\u03d9\u0007\u0003\u0000\u0000\u03d9\u03da"+ - "\u0007\u0005\u0000\u0000\u03da\u03db\u0007\f\u0000\u0000\u03db\u03dc\u0007"+ - "\u0000\u0000\u0000\u03dc\u03dd\u0007\f\u0000\u0000\u03dd\u03de\u0007\u0005"+ - "\u0000\u0000\u03de\u03df\u0007\f\u0000\u0000\u03df\u00cc\u0001\u0000\u0000"+ - "\u0000\u03e0\u03e4\b \u0000\u0000\u03e1\u03e2\u0005/\u0000\u0000\u03e2"+ - "\u03e4\b!\u0000\u0000\u03e3\u03e0\u0001\u0000\u0000\u0000\u03e3\u03e1"+ - "\u0001\u0000\u0000\u0000\u03e4\u00ce\u0001\u0000\u0000\u0000\u03e5\u03e7"+ - "\u0003\u00cd_\u0000\u03e6\u03e5\u0001\u0000\u0000\u0000\u03e7\u03e8\u0001"+ - "\u0000\u0000\u0000\u03e8\u03e6\u0001\u0000\u0000\u0000\u03e8\u03e9\u0001"+ - "\u0000\u0000\u0000\u03e9\u00d0\u0001\u0000\u0000\u0000\u03ea\u03eb\u0003"+ - "\u00cf`\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ed\u0006a\u0014"+ - "\u0000\u03ed\u00d2\u0001\u0000\u0000\u0000\u03ee\u03ef\u0003S\"\u0000"+ - "\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1\u0006b\u0015\u0000\u03f1"+ - "\u00d4\u0001\u0000\u0000\u0000\u03f2\u03f3\u00037\u0014\u0000\u03f3\u03f4"+ - "\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006c\n\u0000\u03f5\u00d6\u0001"+ - "\u0000\u0000\u0000\u03f6\u03f7\u00039\u0015\u0000\u03f7\u03f8\u0001\u0000"+ - "\u0000\u0000\u03f8\u03f9\u0006d\n\u0000\u03f9\u00d8\u0001\u0000\u0000"+ - "\u0000\u03fa\u03fb\u0003;\u0016\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000"+ - "\u03fc\u03fd\u0006e\n\u0000\u03fd\u00da\u0001\u0000\u0000\u0000\u03fe"+ - "\u03ff\u0003=\u0017\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401"+ - "\u0006f\u000f\u0000\u0401\u0402\u0006f\u000b\u0000\u0402\u00dc\u0001\u0000"+ - "\u0000\u0000\u0403\u0404\u0003g,\u0000\u0404\u0405\u0001\u0000\u0000\u0000"+ - "\u0405\u0406\u0006g\u0016\u0000\u0406\u00de\u0001\u0000\u0000\u0000\u0407"+ - "\u0408\u0003c*\u0000\u0408\u0409\u0001\u0000\u0000\u0000\u0409\u040a\u0006"+ - "h\u0012\u0000\u040a\u00e0\u0001\u0000\u0000\u0000\u040b\u040c\u0004i\u0003"+ - "\u0000\u040c\u040d\u0003\u007f8\u0000\u040d\u040e\u0001\u0000\u0000\u0000"+ - "\u040e\u040f\u0006i\u0017\u0000\u040f\u00e2\u0001\u0000\u0000\u0000\u0410"+ - "\u0411\u0004j\u0004\u0000\u0411\u0412\u0003\u00a3J\u0000\u0412\u0413\u0001"+ - "\u0000\u0000\u0000\u0413\u0414\u0006j\u0018\u0000\u0414\u00e4\u0001\u0000"+ - "\u0000\u0000\u0415\u041a\u0003A\u0019\u0000\u0416\u041a\u0003?\u0018\u0000"+ - "\u0417\u041a\u0003O \u0000\u0418\u041a\u0003\u0099E\u0000\u0419\u0415"+ - "\u0001\u0000\u0000\u0000\u0419\u0416\u0001\u0000\u0000\u0000\u0419\u0417"+ - "\u0001\u0000\u0000\u0000\u0419\u0418\u0001\u0000\u0000\u0000\u041a\u00e6"+ - "\u0001\u0000\u0000\u0000\u041b\u041e\u0003A\u0019\u0000\u041c\u041e\u0003"+ - "\u0099E\u0000\u041d\u041b\u0001\u0000\u0000\u0000\u041d\u041c\u0001\u0000"+ - "\u0000\u0000\u041e\u0422\u0001\u0000\u0000\u0000\u041f\u0421\u0003\u00e5"+ - "k\u0000\u0420\u041f\u0001\u0000\u0000\u0000\u0421\u0424\u0001\u0000\u0000"+ - "\u0000\u0422\u0420\u0001\u0000\u0000\u0000\u0422\u0423\u0001\u0000\u0000"+ - "\u0000\u0423\u042f\u0001\u0000\u0000\u0000\u0424\u0422\u0001\u0000\u0000"+ - "\u0000\u0425\u0428\u0003O \u0000\u0426\u0428\u0003I\u001d\u0000\u0427"+ - "\u0425\u0001\u0000\u0000\u0000\u0427\u0426\u0001\u0000\u0000\u0000\u0428"+ - "\u042a\u0001\u0000\u0000\u0000\u0429\u042b\u0003\u00e5k\u0000\u042a\u0429"+ - "\u0001\u0000\u0000\u0000\u042b\u042c\u0001\u0000\u0000\u0000\u042c\u042a"+ - "\u0001\u0000\u0000\u0000\u042c\u042d\u0001\u0000\u0000\u0000\u042d\u042f"+ - "\u0001\u0000\u0000\u0000\u042e\u041d\u0001\u0000\u0000\u0000\u042e\u0427"+ - "\u0001\u0000\u0000\u0000\u042f\u00e8\u0001\u0000\u0000\u0000\u0430\u0433"+ - "\u0003\u00e7l\u0000\u0431\u0433\u0003\u00abN\u0000\u0432\u0430\u0001\u0000"+ - "\u0000\u0000\u0432\u0431\u0001\u0000\u0000\u0000\u0433\u0434\u0001\u0000"+ - "\u0000\u0000\u0434\u0432\u0001\u0000\u0000\u0000\u0434\u0435\u0001\u0000"+ - "\u0000\u0000\u0435\u00ea\u0001\u0000\u0000\u0000\u0436\u0437\u00037\u0014"+ - "\u0000\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u0439\u0006n\n\u0000"+ - "\u0439\u00ec\u0001\u0000\u0000\u0000\u043a\u043b\u00039\u0015\u0000\u043b"+ - "\u043c\u0001\u0000\u0000\u0000\u043c\u043d\u0006o\n\u0000\u043d\u00ee"+ - "\u0001\u0000\u0000\u0000\u043e\u043f\u0003;\u0016\u0000\u043f\u0440\u0001"+ - "\u0000\u0000\u0000\u0440\u0441\u0006p\n\u0000\u0441\u00f0\u0001\u0000"+ - "\u0000\u0000\u0442\u0443\u0003=\u0017\u0000\u0443\u0444\u0001\u0000\u0000"+ - "\u0000\u0444\u0445\u0006q\u000f\u0000\u0445\u0446\u0006q\u000b\u0000\u0446"+ - "\u00f2\u0001\u0000\u0000\u0000\u0447\u0448\u0003_(\u0000\u0448\u0449\u0001"+ - "\u0000\u0000\u0000\u0449\u044a\u0006r\u0013\u0000\u044a\u00f4\u0001\u0000"+ - "\u0000\u0000\u044b\u044c\u0003c*\u0000\u044c\u044d\u0001\u0000\u0000\u0000"+ - "\u044d\u044e\u0006s\u0012\u0000\u044e\u00f6\u0001\u0000\u0000\u0000\u044f"+ - "\u0450\u0003g,\u0000\u0450\u0451\u0001\u0000\u0000\u0000\u0451\u0452\u0006"+ - "t\u0016\u0000\u0452\u00f8\u0001\u0000\u0000\u0000\u0453\u0454\u0004u\u0005"+ - "\u0000\u0454\u0455\u0003\u007f8\u0000\u0455\u0456\u0001\u0000\u0000\u0000"+ - "\u0456\u0457\u0006u\u0017\u0000\u0457\u00fa\u0001\u0000\u0000\u0000\u0458"+ - "\u0459\u0004v\u0006\u0000\u0459\u045a\u0003\u00a3J\u0000\u045a\u045b\u0001"+ - "\u0000\u0000\u0000\u045b\u045c\u0006v\u0018\u0000\u045c\u00fc\u0001\u0000"+ - "\u0000\u0000\u045d\u045e\u0007\f\u0000\u0000\u045e\u045f\u0007\u0002\u0000"+ - "\u0000\u045f\u00fe\u0001\u0000\u0000\u0000\u0460\u0461\u0003\u00e9m\u0000"+ - "\u0461\u0462\u0001\u0000\u0000\u0000\u0462\u0463\u0006x\u0019\u0000\u0463"+ - "\u0100\u0001\u0000\u0000\u0000\u0464\u0465\u00037\u0014\u0000\u0465\u0466"+ - "\u0001\u0000\u0000\u0000\u0466\u0467\u0006y\n\u0000\u0467\u0102\u0001"+ - "\u0000\u0000\u0000\u0468\u0469\u00039\u0015\u0000\u0469\u046a\u0001\u0000"+ - "\u0000\u0000\u046a\u046b\u0006z\n\u0000\u046b\u0104\u0001\u0000\u0000"+ - "\u0000\u046c\u046d\u0003;\u0016\u0000\u046d\u046e\u0001\u0000\u0000\u0000"+ - "\u046e\u046f\u0006{\n\u0000\u046f\u0106\u0001\u0000\u0000\u0000\u0470"+ - "\u0471\u0003=\u0017\u0000\u0471\u0472\u0001\u0000\u0000\u0000\u0472\u0473"+ - "\u0006|\u000f\u0000\u0473\u0474\u0006|\u000b\u0000\u0474\u0108\u0001\u0000"+ - "\u0000\u0000\u0475\u0476\u0003\u00a5K\u0000\u0476\u0477\u0001\u0000\u0000"+ - "\u0000\u0477\u0478\u0006}\r\u0000\u0478\u0479\u0006}\u001a\u0000\u0479"+ - "\u010a\u0001\u0000\u0000\u0000\u047a\u047b\u0007\u0007\u0000\u0000\u047b"+ - "\u047c\u0007\t\u0000\u0000\u047c\u047d\u0001\u0000\u0000\u0000\u047d\u047e"+ - "\u0006~\u001b\u0000\u047e\u010c\u0001\u0000\u0000\u0000\u047f\u0480\u0007"+ - "\u0013\u0000\u0000\u0480\u0481\u0007\u0001\u0000\u0000\u0481\u0482\u0007"+ - "\u0005\u0000\u0000\u0482\u0483\u0007\n\u0000\u0000\u0483\u0484\u0001\u0000"+ - "\u0000\u0000\u0484\u0485\u0006\u007f\u001b\u0000\u0485\u010e\u0001\u0000"+ - "\u0000\u0000\u0486\u0487\b\"\u0000\u0000\u0487\u0110\u0001\u0000\u0000"+ - "\u0000\u0488\u048a\u0003\u010f\u0080\u0000\u0489\u0488\u0001\u0000\u0000"+ - "\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b\u0489\u0001\u0000\u0000"+ - "\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c\u048d\u0001\u0000\u0000"+ - "\u0000\u048d\u048e\u0003\u0151\u00a1\u0000\u048e\u0490\u0001\u0000\u0000"+ - "\u0000\u048f\u0489\u0001\u0000\u0000\u0000\u048f\u0490\u0001\u0000\u0000"+ - "\u0000\u0490\u0492\u0001\u0000\u0000\u0000\u0491\u0493\u0003\u010f\u0080"+ - "\u0000\u0492\u0491\u0001\u0000\u0000\u0000\u0493\u0494\u0001\u0000\u0000"+ - "\u0000\u0494\u0492\u0001\u0000\u0000\u0000\u0494\u0495\u0001\u0000\u0000"+ - "\u0000\u0495\u0112\u0001\u0000\u0000\u0000\u0496\u0497\u0003\u0111\u0081"+ - "\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498\u0499\u0006\u0082\u001c"+ - "\u0000\u0499\u0114\u0001\u0000\u0000\u0000\u049a\u049b\u00037\u0014\u0000"+ - "\u049b\u049c\u0001\u0000\u0000\u0000\u049c\u049d\u0006\u0083\n\u0000\u049d"+ - "\u0116\u0001\u0000\u0000\u0000\u049e\u049f\u00039\u0015\u0000\u049f\u04a0"+ - "\u0001\u0000\u0000\u0000\u04a0\u04a1\u0006\u0084\n\u0000\u04a1\u0118\u0001"+ - "\u0000\u0000\u0000\u04a2\u04a3\u0003;\u0016\u0000\u04a3\u04a4\u0001\u0000"+ - "\u0000\u0000\u04a4\u04a5\u0006\u0085\n\u0000\u04a5\u011a\u0001\u0000\u0000"+ - "\u0000\u04a6\u04a7\u0003=\u0017\u0000\u04a7\u04a8\u0001\u0000\u0000\u0000"+ - "\u04a8\u04a9\u0006\u0086\u000f\u0000\u04a9\u04aa\u0006\u0086\u000b\u0000"+ - "\u04aa\u04ab\u0006\u0086\u000b\u0000\u04ab\u011c\u0001\u0000\u0000\u0000"+ - "\u04ac\u04ad\u0003_(\u0000\u04ad\u04ae\u0001\u0000\u0000\u0000\u04ae\u04af"+ - "\u0006\u0087\u0013\u0000\u04af\u011e\u0001\u0000\u0000\u0000\u04b0\u04b1"+ - "\u0003c*\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000\u04b2\u04b3\u0006\u0088"+ - "\u0012\u0000\u04b3\u0120\u0001\u0000\u0000\u0000\u04b4\u04b5\u0003g,\u0000"+ - "\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6\u04b7\u0006\u0089\u0016\u0000"+ - "\u04b7\u0122\u0001\u0000\u0000\u0000\u04b8\u04b9\u0003\u010d\u007f\u0000"+ - "\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba\u04bb\u0006\u008a\u001d\u0000"+ - "\u04bb\u0124\u0001\u0000\u0000\u0000\u04bc\u04bd\u0003\u00e9m\u0000\u04bd"+ - "\u04be\u0001\u0000\u0000\u0000\u04be\u04bf\u0006\u008b\u0019\u0000\u04bf"+ - "\u0126\u0001\u0000\u0000\u0000\u04c0\u04c1\u0003\u00adO\u0000\u04c1\u04c2"+ - "\u0001\u0000\u0000\u0000\u04c2\u04c3\u0006\u008c\u001e\u0000\u04c3\u0128"+ - "\u0001\u0000\u0000\u0000\u04c4\u04c5\u0004\u008d\u0007\u0000\u04c5\u04c6"+ - "\u0003\u007f8\u0000\u04c6\u04c7\u0001\u0000\u0000\u0000\u04c7\u04c8\u0006"+ - "\u008d\u0017\u0000\u04c8\u012a\u0001\u0000\u0000\u0000\u04c9\u04ca\u0004"+ - "\u008e\b\u0000\u04ca\u04cb\u0003\u00a3J\u0000\u04cb\u04cc\u0001\u0000"+ - "\u0000\u0000\u04cc\u04cd\u0006\u008e\u0018\u0000\u04cd\u012c\u0001\u0000"+ - "\u0000\u0000\u04ce\u04cf\u00037\u0014\u0000\u04cf\u04d0\u0001\u0000\u0000"+ - "\u0000\u04d0\u04d1\u0006\u008f\n\u0000\u04d1\u012e\u0001\u0000\u0000\u0000"+ - "\u04d2\u04d3\u00039\u0015\u0000\u04d3\u04d4\u0001\u0000\u0000\u0000\u04d4"+ - "\u04d5\u0006\u0090\n\u0000\u04d5\u0130\u0001\u0000\u0000\u0000\u04d6\u04d7"+ - "\u0003;\u0016\u0000\u04d7\u04d8\u0001\u0000\u0000\u0000\u04d8\u04d9\u0006"+ - "\u0091\n\u0000\u04d9\u0132\u0001\u0000\u0000\u0000\u04da\u04db\u0003="+ - "\u0017\u0000\u04db\u04dc\u0001\u0000\u0000\u0000\u04dc\u04dd\u0006\u0092"+ - "\u000f\u0000\u04dd\u04de\u0006\u0092\u000b\u0000\u04de\u0134\u0001\u0000"+ - "\u0000\u0000\u04df\u04e0\u0003g,\u0000\u04e0\u04e1\u0001\u0000\u0000\u0000"+ - "\u04e1\u04e2\u0006\u0093\u0016\u0000\u04e2\u0136\u0001\u0000\u0000\u0000"+ - "\u04e3\u04e4\u0004\u0094\t\u0000\u04e4\u04e5\u0003\u007f8\u0000\u04e5"+ - "\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e7\u0006\u0094\u0017\u0000\u04e7"+ - "\u0138\u0001\u0000\u0000\u0000\u04e8\u04e9\u0004\u0095\n\u0000\u04e9\u04ea"+ - "\u0003\u00a3J\u0000\u04ea\u04eb\u0001\u0000\u0000\u0000\u04eb\u04ec\u0006"+ - "\u0095\u0018\u0000\u04ec\u013a\u0001\u0000\u0000\u0000\u04ed\u04ee\u0003"+ - "\u00adO\u0000\u04ee\u04ef\u0001\u0000\u0000\u0000\u04ef\u04f0\u0006\u0096"+ - "\u001e\u0000\u04f0\u013c\u0001\u0000\u0000\u0000\u04f1\u04f2\u0003\u00a9"+ - "M\u0000\u04f2\u04f3\u0001\u0000\u0000\u0000\u04f3\u04f4\u0006\u0097\u001f"+ - "\u0000\u04f4\u013e\u0001\u0000\u0000\u0000\u04f5\u04f6\u00037\u0014\u0000"+ - "\u04f6\u04f7\u0001\u0000\u0000\u0000\u04f7\u04f8\u0006\u0098\n\u0000\u04f8"+ - "\u0140\u0001\u0000\u0000\u0000\u04f9\u04fa\u00039\u0015\u0000\u04fa\u04fb"+ - "\u0001\u0000\u0000\u0000\u04fb\u04fc\u0006\u0099\n\u0000\u04fc\u0142\u0001"+ - "\u0000\u0000\u0000\u04fd\u04fe\u0003;\u0016\u0000\u04fe\u04ff\u0001\u0000"+ - "\u0000\u0000\u04ff\u0500\u0006\u009a\n\u0000\u0500\u0144\u0001\u0000\u0000"+ - "\u0000\u0501\u0502\u0003=\u0017\u0000\u0502\u0503\u0001\u0000\u0000\u0000"+ - "\u0503\u0504\u0006\u009b\u000f\u0000\u0504\u0505\u0006\u009b\u000b\u0000"+ - "\u0505\u0146\u0001\u0000\u0000\u0000\u0506\u0507\u0007\u0001\u0000\u0000"+ - "\u0507\u0508\u0007\t\u0000\u0000\u0508\u0509\u0007\u000f\u0000\u0000\u0509"+ - "\u050a\u0007\u0007\u0000\u0000\u050a\u0148\u0001\u0000\u0000\u0000\u050b"+ - "\u050c\u00037\u0014\u0000\u050c\u050d\u0001\u0000\u0000\u0000\u050d\u050e"+ - "\u0006\u009d\n\u0000\u050e\u014a\u0001\u0000\u0000\u0000\u050f\u0510\u0003"+ - "9\u0015\u0000\u0510\u0511\u0001\u0000\u0000\u0000\u0511\u0512\u0006\u009e"+ - "\n\u0000\u0512\u014c\u0001\u0000\u0000\u0000\u0513\u0514\u0003;\u0016"+ - "\u0000\u0514\u0515\u0001\u0000\u0000\u0000\u0515\u0516\u0006\u009f\n\u0000"+ - "\u0516\u014e\u0001\u0000\u0000\u0000\u0517\u0518\u0003\u00a7L\u0000\u0518"+ - "\u0519\u0001\u0000\u0000\u0000\u0519\u051a\u0006\u00a0\u0010\u0000\u051a"+ - "\u051b\u0006\u00a0\u000b\u0000\u051b\u0150\u0001\u0000\u0000\u0000\u051c"+ - "\u051d\u0005:\u0000\u0000\u051d\u0152\u0001\u0000\u0000\u0000\u051e\u0524"+ - "\u0003I\u001d\u0000\u051f\u0524\u0003?\u0018\u0000\u0520\u0524\u0003g"+ - ",\u0000\u0521\u0524\u0003A\u0019\u0000\u0522\u0524\u0003O \u0000\u0523"+ - "\u051e\u0001\u0000\u0000\u0000\u0523\u051f\u0001\u0000\u0000\u0000\u0523"+ - "\u0520\u0001\u0000\u0000\u0000\u0523\u0521\u0001\u0000\u0000\u0000\u0523"+ - "\u0522\u0001\u0000\u0000\u0000\u0524\u0525\u0001\u0000\u0000\u0000\u0525"+ - "\u0523\u0001\u0000\u0000\u0000\u0525\u0526\u0001\u0000\u0000\u0000\u0526"+ - "\u0154\u0001\u0000\u0000\u0000\u0527\u0528\u00037\u0014\u0000\u0528\u0529"+ - "\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u00a3\n\u0000\u052a\u0156\u0001"+ - "\u0000\u0000\u0000\u052b\u052c\u00039\u0015\u0000\u052c\u052d\u0001\u0000"+ - "\u0000\u0000\u052d\u052e\u0006\u00a4\n\u0000\u052e\u0158\u0001\u0000\u0000"+ - "\u0000\u052f\u0530\u0003;\u0016\u0000\u0530\u0531\u0001\u0000\u0000\u0000"+ - "\u0531\u0532\u0006\u00a5\n\u0000\u0532\u015a\u0001\u0000\u0000\u0000\u0533"+ - "\u0534\u0003=\u0017\u0000\u0534\u0535\u0001\u0000\u0000\u0000\u0535\u0536"+ - "\u0006\u00a6\u000f\u0000\u0536\u0537\u0006\u00a6\u000b\u0000\u0537\u015c"+ - "\u0001\u0000\u0000\u0000\u0538\u0539\u0003\u0151\u00a1\u0000\u0539\u053a"+ - "\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u00a7\u0011\u0000\u053b\u015e"+ - "\u0001\u0000\u0000\u0000\u053c\u053d\u0003c*\u0000\u053d\u053e\u0001\u0000"+ - "\u0000\u0000\u053e\u053f\u0006\u00a8\u0012\u0000\u053f\u0160\u0001\u0000"+ - "\u0000\u0000\u0540\u0541\u0003g,\u0000\u0541\u0542\u0001\u0000\u0000\u0000"+ - "\u0542\u0543\u0006\u00a9\u0016\u0000\u0543\u0162\u0001\u0000\u0000\u0000"+ - "\u0544\u0545\u0003\u010b~\u0000\u0545\u0546\u0001\u0000\u0000\u0000\u0546"+ - "\u0547\u0006\u00aa \u0000\u0547\u0548\u0006\u00aa!\u0000\u0548\u0164\u0001"+ - "\u0000\u0000\u0000\u0549\u054a\u0003\u00cf`\u0000\u054a\u054b\u0001\u0000"+ - "\u0000\u0000\u054b\u054c\u0006\u00ab\u0014\u0000\u054c\u0166\u0001\u0000"+ - "\u0000\u0000\u054d\u054e\u0003S\"\u0000\u054e\u054f\u0001\u0000\u0000"+ - "\u0000\u054f\u0550\u0006\u00ac\u0015\u0000\u0550\u0168\u0001\u0000\u0000"+ - "\u0000\u0551\u0552\u00037\u0014\u0000\u0552\u0553\u0001\u0000\u0000\u0000"+ - "\u0553\u0554\u0006\u00ad\n\u0000\u0554\u016a\u0001\u0000\u0000\u0000\u0555"+ - "\u0556\u00039\u0015\u0000\u0556\u0557\u0001\u0000\u0000\u0000\u0557\u0558"+ - "\u0006\u00ae\n\u0000\u0558\u016c\u0001\u0000\u0000\u0000\u0559\u055a\u0003"+ - ";\u0016\u0000\u055a\u055b\u0001\u0000\u0000\u0000\u055b\u055c\u0006\u00af"+ - "\n\u0000\u055c\u016e\u0001\u0000\u0000\u0000\u055d\u055e\u0003=\u0017"+ - "\u0000\u055e\u055f\u0001\u0000\u0000\u0000\u055f\u0560\u0006\u00b0\u000f"+ - "\u0000\u0560\u0561\u0006\u00b0\u000b\u0000\u0561\u0562\u0006\u00b0\u000b"+ - "\u0000\u0562\u0170\u0001\u0000\u0000\u0000\u0563\u0564\u0003c*\u0000\u0564"+ - "\u0565\u0001\u0000\u0000\u0000\u0565\u0566\u0006\u00b1\u0012\u0000\u0566"+ - "\u0172\u0001\u0000\u0000\u0000\u0567\u0568\u0003g,\u0000\u0568\u0569\u0001"+ - "\u0000\u0000\u0000\u0569\u056a\u0006\u00b2\u0016\u0000\u056a\u0174\u0001"+ - "\u0000\u0000\u0000\u056b\u056c\u0003\u00e9m\u0000\u056c\u056d\u0001\u0000"+ - "\u0000\u0000\u056d\u056e\u0006\u00b3\u0019\u0000\u056e\u0176\u0001\u0000"+ - "\u0000\u0000\u056f\u0570\u00037\u0014\u0000\u0570\u0571\u0001\u0000\u0000"+ - "\u0000\u0571\u0572\u0006\u00b4\n\u0000\u0572\u0178\u0001\u0000\u0000\u0000"+ - "\u0573\u0574\u00039\u0015\u0000\u0574\u0575\u0001\u0000\u0000\u0000\u0575"+ - "\u0576\u0006\u00b5\n\u0000\u0576\u017a\u0001\u0000\u0000\u0000\u0577\u0578"+ - "\u0003;\u0016\u0000\u0578\u0579\u0001\u0000\u0000\u0000\u0579\u057a\u0006"+ - "\u00b6\n\u0000\u057a\u017c\u0001\u0000\u0000\u0000\u057b\u057c\u0003="+ - "\u0017\u0000\u057c\u057d\u0001\u0000\u0000\u0000\u057d\u057e\u0006\u00b7"+ - "\u000f\u0000\u057e\u057f\u0006\u00b7\u000b\u0000\u057f\u017e\u0001\u0000"+ - "\u0000\u0000\u0580\u0581\u0003\u00cf`\u0000\u0581\u0582\u0001\u0000\u0000"+ - "\u0000\u0582\u0583\u0006\u00b8\u0014\u0000\u0583\u0584\u0006\u00b8\u000b"+ - "\u0000\u0584\u0585\u0006\u00b8\"\u0000\u0585\u0180\u0001\u0000\u0000\u0000"+ - "\u0586\u0587\u0003S\"\u0000\u0587\u0588\u0001\u0000\u0000\u0000\u0588"+ - "\u0589\u0006\u00b9\u0015\u0000\u0589\u058a\u0006\u00b9\u000b\u0000\u058a"+ - "\u058b\u0006\u00b9\"\u0000\u058b\u0182\u0001\u0000\u0000\u0000\u058c\u058d"+ - "\u00037\u0014\u0000\u058d\u058e\u0001\u0000\u0000\u0000\u058e\u058f\u0006"+ - "\u00ba\n\u0000\u058f\u0184\u0001\u0000\u0000\u0000\u0590\u0591\u00039"+ - "\u0015\u0000\u0591\u0592\u0001\u0000\u0000\u0000\u0592\u0593\u0006\u00bb"+ - "\n\u0000\u0593\u0186\u0001\u0000\u0000\u0000\u0594\u0595\u0003;\u0016"+ - "\u0000\u0595\u0596\u0001\u0000\u0000\u0000\u0596\u0597\u0006\u00bc\n\u0000"+ - "\u0597\u0188\u0001\u0000\u0000\u0000\u0598\u0599\u0003\u0151\u00a1\u0000"+ - "\u0599\u059a\u0001\u0000\u0000\u0000\u059a\u059b\u0006\u00bd\u0011\u0000"+ - "\u059b\u059c\u0006\u00bd\u000b\u0000\u059c\u059d\u0006\u00bd\t\u0000\u059d"+ - "\u018a\u0001\u0000\u0000\u0000\u059e\u059f\u0003c*\u0000\u059f\u05a0\u0001"+ - "\u0000\u0000\u0000\u05a0\u05a1\u0006\u00be\u0012\u0000\u05a1\u05a2\u0006"+ - "\u00be\u000b\u0000\u05a2\u05a3\u0006\u00be\t\u0000\u05a3\u018c\u0001\u0000"+ - "\u0000\u0000\u05a4\u05a5\u00037\u0014\u0000\u05a5\u05a6\u0001\u0000\u0000"+ - "\u0000\u05a6\u05a7\u0006\u00bf\n\u0000\u05a7\u018e\u0001\u0000\u0000\u0000"+ - "\u05a8\u05a9\u00039\u0015\u0000\u05a9\u05aa\u0001\u0000\u0000\u0000\u05aa"+ - "\u05ab\u0006\u00c0\n\u0000\u05ab\u0190\u0001\u0000\u0000\u0000\u05ac\u05ad"+ - "\u0003;\u0016\u0000\u05ad\u05ae\u0001\u0000\u0000\u0000\u05ae\u05af\u0006"+ - "\u00c1\n\u0000\u05af\u0192\u0001\u0000\u0000\u0000\u05b0\u05b1\u0003\u00ad"+ - "O\u0000\u05b1\u05b2\u0001\u0000\u0000\u0000\u05b2\u05b3\u0006\u00c2\u000b"+ - "\u0000\u05b3\u05b4\u0006\u00c2\u0000\u0000\u05b4\u05b5\u0006\u00c2\u001e"+ - "\u0000\u05b5\u0194\u0001\u0000\u0000\u0000\u05b6\u05b7\u0003\u00a9M\u0000"+ - "\u05b7\u05b8\u0001\u0000\u0000\u0000\u05b8\u05b9\u0006\u00c3\u000b\u0000"+ - "\u05b9\u05ba\u0006\u00c3\u0000\u0000\u05ba\u05bb\u0006\u00c3\u001f\u0000"+ - "\u05bb\u0196\u0001\u0000\u0000\u0000\u05bc\u05bd\u0003Y%\u0000\u05bd\u05be"+ - "\u0001\u0000\u0000\u0000\u05be\u05bf\u0006\u00c4\u000b\u0000\u05bf\u05c0"+ - "\u0006\u00c4\u0000\u0000\u05c0\u05c1\u0006\u00c4#\u0000\u05c1\u0198\u0001"+ - "\u0000\u0000\u0000\u05c2\u05c3\u0003=\u0017\u0000\u05c3\u05c4\u0001\u0000"+ - "\u0000\u0000\u05c4\u05c5\u0006\u00c5\u000f\u0000\u05c5\u05c6\u0006\u00c5"+ - "\u000b\u0000\u05c6\u019a\u0001\u0000\u0000\u0000A\u0000\u0001\u0002\u0003"+ - "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u0243\u024d\u0251\u0254"+ - "\u025d\u025f\u026a\u027d\u0282\u028b\u0292\u0297\u0299\u02a4\u02ac\u02af"+ - "\u02b1\u02b6\u02bb\u02c1\u02c8\u02cd\u02d3\u02d6\u02de\u02e2\u0366\u036b"+ - "\u0372\u0374\u0384\u0389\u038e\u0390\u0396\u03e3\u03e8\u0419\u041d\u0422"+ - "\u0427\u042c\u042e\u0432\u0434\u048b\u048f\u0494\u0523\u0525$\u0005\u0001"+ - "\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0002\u0000\u0005\u0003"+ - "\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0005\u000b\u0000"+ - "\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007"+ - "A\u0000\u0005\u0000\u0000\u0007\u0018\u0000\u0007B\u0000\u0007h\u0000"+ - "\u0007!\u0000\u0007\u001f\u0000\u0007L\u0000\u0007\u0019\u0000\u0007#"+ - "\u0000\u0007/\u0000\u0007@\u0000\u0007P\u0000\u0005\n\u0000\u0005\u0007"+ - "\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007C\u0000\u0007X\u0000"+ - "\u0005\f\u0000\u0005\u000e\u0000\u0007\u001c\u0000"; + "\u02e7\u0001\u0000\u0000\u0000\u02ce\u02d0\u0003A\u0019\u0000\u02cf\u02ce"+ + "\u0001\u0000\u0000\u0000\u02d0\u02d1\u0001\u0000\u0000\u0000\u02d1\u02cf"+ + "\u0001\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000\u0000\u0000\u02d2\u02da"+ + "\u0001\u0000\u0000\u0000\u02d3\u02d7\u0003i-\u0000\u02d4\u02d6\u0003A"+ + "\u0019\u0000\u02d5\u02d4\u0001\u0000\u0000\u0000\u02d6\u02d9\u0001\u0000"+ + "\u0000\u0000\u02d7\u02d5\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001\u0000"+ + "\u0000\u0000\u02d8\u02db\u0001\u0000\u0000\u0000\u02d9\u02d7\u0001\u0000"+ + "\u0000\u0000\u02da\u02d3\u0001\u0000\u0000\u0000\u02da\u02db\u0001\u0000"+ + "\u0000\u0000\u02db\u02dc\u0001\u0000\u0000\u0000\u02dc\u02dd\u0003I\u001d"+ + "\u0000\u02dd\u02e7\u0001\u0000\u0000\u0000\u02de\u02e0\u0003i-\u0000\u02df"+ + "\u02e1\u0003A\u0019\u0000\u02e0\u02df\u0001\u0000\u0000\u0000\u02e1\u02e2"+ + "\u0001\u0000\u0000\u0000\u02e2\u02e0\u0001\u0000\u0000\u0000\u02e2\u02e3"+ + "\u0001\u0000\u0000\u0000\u02e3\u02e4\u0001\u0000\u0000\u0000\u02e4\u02e5"+ + "\u0003I\u001d\u0000\u02e5\u02e7\u0001\u0000\u0000\u0000\u02e6\u02bd\u0001"+ + "\u0000\u0000\u0000\u02e6\u02c8\u0001\u0000\u0000\u0000\u02e6\u02cf\u0001"+ + "\u0000\u0000\u0000\u02e6\u02de\u0001\u0000\u0000\u0000\u02e7Z\u0001\u0000"+ + "\u0000\u0000\u02e8\u02e9\u0007\u001e\u0000\u0000\u02e9\u02ea\u0007\u001f"+ + "\u0000\u0000\u02ea\\\u0001\u0000\u0000\u0000\u02eb\u02ec\u0007\f\u0000"+ + "\u0000\u02ec\u02ed\u0007\t\u0000\u0000\u02ed\u02ee\u0007\u0000\u0000\u0000"+ + "\u02ee^\u0001\u0000\u0000\u0000\u02ef\u02f0\u0007\f\u0000\u0000\u02f0"+ + "\u02f1\u0007\u0002\u0000\u0000\u02f1\u02f2\u0007\u0004\u0000\u0000\u02f2"+ + "`\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005=\u0000\u0000\u02f4b\u0001"+ + "\u0000\u0000\u0000\u02f5\u02f6\u0005:\u0000\u0000\u02f6\u02f7\u0005:\u0000"+ + "\u0000\u02f7d\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005,\u0000\u0000\u02f9"+ + "f\u0001\u0000\u0000\u0000\u02fa\u02fb\u0007\u0000\u0000\u0000\u02fb\u02fc"+ + "\u0007\u0003\u0000\u0000\u02fc\u02fd\u0007\u0002\u0000\u0000\u02fd\u02fe"+ + "\u0007\u0004\u0000\u0000\u02feh\u0001\u0000\u0000\u0000\u02ff\u0300\u0005"+ + ".\u0000\u0000\u0300j\u0001\u0000\u0000\u0000\u0301\u0302\u0007\u000f\u0000"+ + "\u0000\u0302\u0303\u0007\f\u0000\u0000\u0303\u0304\u0007\r\u0000\u0000"+ + "\u0304\u0305\u0007\u0002\u0000\u0000\u0305\u0306\u0007\u0003\u0000\u0000"+ + "\u0306l\u0001\u0000\u0000\u0000\u0307\u0308\u0007\u000f\u0000\u0000\u0308"+ + "\u0309\u0007\u0001\u0000\u0000\u0309\u030a\u0007\u0006\u0000\u0000\u030a"+ + "\u030b\u0007\u0002\u0000\u0000\u030b\u030c\u0007\u0005\u0000\u0000\u030c"+ + "n\u0001\u0000\u0000\u0000\u030d\u030e\u0007\u0001\u0000\u0000\u030e\u030f"+ + "\u0007\t\u0000\u0000\u030fp\u0001\u0000\u0000\u0000\u0310\u0311\u0007"+ + "\u0001\u0000\u0000\u0311\u0312\u0007\u0002\u0000\u0000\u0312r\u0001\u0000"+ + "\u0000\u0000\u0313\u0314\u0007\r\u0000\u0000\u0314\u0315\u0007\f\u0000"+ + "\u0000\u0315\u0316\u0007\u0002\u0000\u0000\u0316\u0317\u0007\u0005\u0000"+ + "\u0000\u0317t\u0001\u0000\u0000\u0000\u0318\u0319\u0007\r\u0000\u0000"+ + "\u0319\u031a\u0007\u0001\u0000\u0000\u031a\u031b\u0007\u0012\u0000\u0000"+ + "\u031b\u031c\u0007\u0003\u0000\u0000\u031cv\u0001\u0000\u0000\u0000\u031d"+ + "\u031e\u0005(\u0000\u0000\u031ex\u0001\u0000\u0000\u0000\u031f\u0320\u0007"+ + "\t\u0000\u0000\u0320\u0321\u0007\u0007\u0000\u0000\u0321\u0322\u0007\u0005"+ + "\u0000\u0000\u0322z\u0001\u0000\u0000\u0000\u0323\u0324\u0007\t\u0000"+ + "\u0000\u0324\u0325\u0007\u0014\u0000\u0000\u0325\u0326\u0007\r\u0000\u0000"+ + "\u0326\u0327\u0007\r\u0000\u0000\u0327|\u0001\u0000\u0000\u0000\u0328"+ + "\u0329\u0007\t\u0000\u0000\u0329\u032a\u0007\u0014\u0000\u0000\u032a\u032b"+ + "\u0007\r\u0000\u0000\u032b\u032c\u0007\r\u0000\u0000\u032c\u032d\u0007"+ + "\u0002\u0000\u0000\u032d~\u0001\u0000\u0000\u0000\u032e\u032f\u0007\u0007"+ + "\u0000\u0000\u032f\u0330\u0007\u0006\u0000\u0000\u0330\u0080\u0001\u0000"+ + "\u0000\u0000\u0331\u0332\u0005?\u0000\u0000\u0332\u0082\u0001\u0000\u0000"+ + "\u0000\u0333\u0334\u0007\u0006\u0000\u0000\u0334\u0335\u0007\r\u0000\u0000"+ + "\u0335\u0336\u0007\u0001\u0000\u0000\u0336\u0337\u0007\u0012\u0000\u0000"+ + "\u0337\u0338\u0007\u0003\u0000\u0000\u0338\u0084\u0001\u0000\u0000\u0000"+ + "\u0339\u033a\u0005)\u0000\u0000\u033a\u0086\u0001\u0000\u0000\u0000\u033b"+ + "\u033c\u0007\u0005\u0000\u0000\u033c\u033d\u0007\u0006\u0000\u0000\u033d"+ + "\u033e\u0007\u0014\u0000\u0000\u033e\u033f\u0007\u0003\u0000\u0000\u033f"+ + "\u0088\u0001\u0000\u0000\u0000\u0340\u0341\u0005=\u0000\u0000\u0341\u0342"+ + "\u0005=\u0000\u0000\u0342\u008a\u0001\u0000\u0000\u0000\u0343\u0344\u0005"+ + "=\u0000\u0000\u0344\u0345\u0005~\u0000\u0000\u0345\u008c\u0001\u0000\u0000"+ + "\u0000\u0346\u0347\u0005!\u0000\u0000\u0347\u0348\u0005=\u0000\u0000\u0348"+ + "\u008e\u0001\u0000\u0000\u0000\u0349\u034a\u0005<\u0000\u0000\u034a\u0090"+ + "\u0001\u0000\u0000\u0000\u034b\u034c\u0005<\u0000\u0000\u034c\u034d\u0005"+ + "=\u0000\u0000\u034d\u0092\u0001\u0000\u0000\u0000\u034e\u034f\u0005>\u0000"+ + "\u0000\u034f\u0094\u0001\u0000\u0000\u0000\u0350\u0351\u0005>\u0000\u0000"+ + "\u0351\u0352\u0005=\u0000\u0000\u0352\u0096\u0001\u0000\u0000\u0000\u0353"+ + "\u0354\u0005+\u0000\u0000\u0354\u0098\u0001\u0000\u0000\u0000\u0355\u0356"+ + "\u0005-\u0000\u0000\u0356\u009a\u0001\u0000\u0000\u0000\u0357\u0358\u0005"+ + "*\u0000\u0000\u0358\u009c\u0001\u0000\u0000\u0000\u0359\u035a\u0005/\u0000"+ + "\u0000\u035a\u009e\u0001\u0000\u0000\u0000\u035b\u035c\u0005%\u0000\u0000"+ + "\u035c\u00a0\u0001\u0000\u0000\u0000\u035d\u035e\u0004I\u0003\u0000\u035e"+ + "\u035f\u0003=\u0017\u0000\u035f\u0360\u0001\u0000\u0000\u0000\u0360\u0361"+ + "\u0006I\f\u0000\u0361\u00a2\u0001\u0000\u0000\u0000\u0362\u0363\u0003"+ + "-\u000f\u0000\u0363\u0364\u0001\u0000\u0000\u0000\u0364\u0365\u0006J\r"+ + "\u0000\u0365\u00a4\u0001\u0000\u0000\u0000\u0366\u0369\u0003\u00819\u0000"+ + "\u0367\u036a\u0003C\u001a\u0000\u0368\u036a\u0003Q!\u0000\u0369\u0367"+ + "\u0001\u0000\u0000\u0000\u0369\u0368\u0001\u0000\u0000\u0000\u036a\u036e"+ + "\u0001\u0000\u0000\u0000\u036b\u036d\u0003S\"\u0000\u036c\u036b\u0001"+ + "\u0000\u0000\u0000\u036d\u0370\u0001\u0000\u0000\u0000\u036e\u036c\u0001"+ + "\u0000\u0000\u0000\u036e\u036f\u0001\u0000\u0000\u0000\u036f\u0378\u0001"+ + "\u0000\u0000\u0000\u0370\u036e\u0001\u0000\u0000\u0000\u0371\u0373\u0003"+ + "\u00819\u0000\u0372\u0374\u0003A\u0019\u0000\u0373\u0372\u0001\u0000\u0000"+ + "\u0000\u0374\u0375\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000\u0000"+ + "\u0000\u0375\u0376\u0001\u0000\u0000\u0000\u0376\u0378\u0001\u0000\u0000"+ + "\u0000\u0377\u0366\u0001\u0000\u0000\u0000\u0377\u0371\u0001\u0000\u0000"+ + "\u0000\u0378\u00a6\u0001\u0000\u0000\u0000\u0379\u037a\u0005[\u0000\u0000"+ + "\u037a\u037b\u0001\u0000\u0000\u0000\u037b\u037c\u0006L\u0000\u0000\u037c"+ + "\u037d\u0006L\u0000\u0000\u037d\u00a8\u0001\u0000\u0000\u0000\u037e\u037f"+ + "\u0005]\u0000\u0000\u037f\u0380\u0001\u0000\u0000\u0000\u0380\u0381\u0006"+ + "M\u000b\u0000\u0381\u0382\u0006M\u000b\u0000\u0382\u00aa\u0001\u0000\u0000"+ + "\u0000\u0383\u0387\u0003C\u001a\u0000\u0384\u0386\u0003S\"\u0000\u0385"+ + "\u0384\u0001\u0000\u0000\u0000\u0386\u0389\u0001\u0000\u0000\u0000\u0387"+ + "\u0385\u0001\u0000\u0000\u0000\u0387\u0388\u0001\u0000\u0000\u0000\u0388"+ + "\u0394\u0001\u0000\u0000\u0000\u0389\u0387\u0001\u0000\u0000\u0000\u038a"+ + "\u038d\u0003Q!\u0000\u038b\u038d\u0003K\u001e\u0000\u038c\u038a\u0001"+ + "\u0000\u0000\u0000\u038c\u038b\u0001\u0000\u0000\u0000\u038d\u038f\u0001"+ + "\u0000\u0000\u0000\u038e\u0390\u0003S\"\u0000\u038f\u038e\u0001\u0000"+ + "\u0000\u0000\u0390\u0391\u0001\u0000\u0000\u0000\u0391\u038f\u0001\u0000"+ + "\u0000\u0000\u0391\u0392\u0001\u0000\u0000\u0000\u0392\u0394\u0001\u0000"+ + "\u0000\u0000\u0393\u0383\u0001\u0000\u0000\u0000\u0393\u038c\u0001\u0000"+ + "\u0000\u0000\u0394\u00ac\u0001\u0000\u0000\u0000\u0395\u0397\u0003M\u001f"+ + "\u0000\u0396\u0398\u0003O \u0000\u0397\u0396\u0001\u0000\u0000\u0000\u0398"+ + "\u0399\u0001\u0000\u0000\u0000\u0399\u0397\u0001\u0000\u0000\u0000\u0399"+ + "\u039a\u0001\u0000\u0000\u0000\u039a\u039b\u0001\u0000\u0000\u0000\u039b"+ + "\u039c\u0003M\u001f\u0000\u039c\u00ae\u0001\u0000\u0000\u0000\u039d\u039e"+ + "\u0003\u00adO\u0000\u039e\u00b0\u0001\u0000\u0000\u0000\u039f\u03a0\u0003"+ + "7\u0014\u0000\u03a0\u03a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006Q\n"+ + "\u0000\u03a2\u00b2\u0001\u0000\u0000\u0000\u03a3\u03a4\u00039\u0015\u0000"+ + "\u03a4\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a6\u0006R\n\u0000\u03a6"+ + "\u00b4\u0001\u0000\u0000\u0000\u03a7\u03a8\u0003;\u0016\u0000\u03a8\u03a9"+ + "\u0001\u0000\u0000\u0000\u03a9\u03aa\u0006S\n\u0000\u03aa\u00b6\u0001"+ + "\u0000\u0000\u0000\u03ab\u03ac\u0003\u00a7L\u0000\u03ac\u03ad\u0001\u0000"+ + "\u0000\u0000\u03ad\u03ae\u0006T\u000e\u0000\u03ae\u03af\u0006T\u000f\u0000"+ + "\u03af\u00b8\u0001\u0000\u0000\u0000\u03b0\u03b1\u0003?\u0018\u0000\u03b1"+ + "\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3\u0006U\u0010\u0000\u03b3\u03b4"+ + "\u0006U\u000b\u0000\u03b4\u00ba\u0001\u0000\u0000\u0000\u03b5\u03b6\u0003"+ + ";\u0016\u0000\u03b6\u03b7\u0001\u0000\u0000\u0000\u03b7\u03b8\u0006V\n"+ + "\u0000\u03b8\u00bc\u0001\u0000\u0000\u0000\u03b9\u03ba\u00037\u0014\u0000"+ + "\u03ba\u03bb\u0001\u0000\u0000\u0000\u03bb\u03bc\u0006W\n\u0000\u03bc"+ + "\u00be\u0001\u0000\u0000\u0000\u03bd\u03be\u00039\u0015\u0000\u03be\u03bf"+ + "\u0001\u0000\u0000\u0000\u03bf\u03c0\u0006X\n\u0000\u03c0\u00c0\u0001"+ + "\u0000\u0000\u0000\u03c1\u03c2\u0003?\u0018\u0000\u03c2\u03c3\u0001\u0000"+ + "\u0000\u0000\u03c3\u03c4\u0006Y\u0010\u0000\u03c4\u03c5\u0006Y\u000b\u0000"+ + "\u03c5\u00c2\u0001\u0000\u0000\u0000\u03c6\u03c7\u0003\u00a7L\u0000\u03c7"+ + "\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9\u0006Z\u000e\u0000\u03c9\u00c4"+ + "\u0001\u0000\u0000\u0000\u03ca\u03cb\u0003\u00a9M\u0000\u03cb\u03cc\u0001"+ + "\u0000\u0000\u0000\u03cc\u03cd\u0006[\u0011\u0000\u03cd\u00c6\u0001\u0000"+ + "\u0000\u0000\u03ce\u03cf\u0003=\u0017\u0000\u03cf\u03d0\u0001\u0000\u0000"+ + "\u0000\u03d0\u03d1\u0006\\\f\u0000\u03d1\u00c8\u0001\u0000\u0000\u0000"+ + "\u03d2\u03d3\u0003e+\u0000\u03d3\u03d4\u0001\u0000\u0000\u0000\u03d4\u03d5"+ + "\u0006]\u0012\u0000\u03d5\u00ca\u0001\u0000\u0000\u0000\u03d6\u03d7\u0003"+ + "a)\u0000\u03d7\u03d8\u0001\u0000\u0000\u0000\u03d8\u03d9\u0006^\u0013"+ + "\u0000\u03d9\u00cc\u0001\u0000\u0000\u0000\u03da\u03db\u0007\u0010\u0000"+ + "\u0000\u03db\u03dc\u0007\u0003\u0000\u0000\u03dc\u03dd\u0007\u0005\u0000"+ + "\u0000\u03dd\u03de\u0007\f\u0000\u0000\u03de\u03df\u0007\u0000\u0000\u0000"+ + "\u03df\u03e0\u0007\f\u0000\u0000\u03e0\u03e1\u0007\u0005\u0000\u0000\u03e1"+ + "\u03e2\u0007\f\u0000\u0000\u03e2\u00ce\u0001\u0000\u0000\u0000\u03e3\u03e7"+ + "\b \u0000\u0000\u03e4\u03e5\u0005/\u0000\u0000\u03e5\u03e7\b!\u0000\u0000"+ + "\u03e6\u03e3\u0001\u0000\u0000\u0000\u03e6\u03e4\u0001\u0000\u0000\u0000"+ + "\u03e7\u00d0\u0001\u0000\u0000\u0000\u03e8\u03ea\u0003\u00cf`\u0000\u03e9"+ + "\u03e8\u0001\u0000\u0000\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb"+ + "\u03e9\u0001\u0000\u0000\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec"+ + "\u00d2\u0001\u0000\u0000\u0000\u03ed\u03ee\u0003\u00d1a\u0000\u03ee\u03ef"+ + "\u0001\u0000\u0000\u0000\u03ef\u03f0\u0006b\u0014\u0000\u03f0\u00d4\u0001"+ + "\u0000\u0000\u0000\u03f1\u03f2\u0003U#\u0000\u03f2\u03f3\u0001\u0000\u0000"+ + "\u0000\u03f3\u03f4\u0006c\u0015\u0000\u03f4\u00d6\u0001\u0000\u0000\u0000"+ + "\u03f5\u03f6\u00037\u0014\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7"+ + "\u03f8\u0006d\n\u0000\u03f8\u00d8\u0001\u0000\u0000\u0000\u03f9\u03fa"+ + "\u00039\u0015\u0000\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03fc\u0006"+ + "e\n\u0000\u03fc\u00da\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003;\u0016"+ + "\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0400\u0006f\n\u0000"+ + "\u0400\u00dc\u0001\u0000\u0000\u0000\u0401\u0402\u0003?\u0018\u0000\u0402"+ + "\u0403\u0001\u0000\u0000\u0000\u0403\u0404\u0006g\u0010\u0000\u0404\u0405"+ + "\u0006g\u000b\u0000\u0405\u00de\u0001\u0000\u0000\u0000\u0406\u0407\u0003"+ + "i-\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006h\u0016"+ + "\u0000\u0409\u00e0\u0001\u0000\u0000\u0000\u040a\u040b\u0003e+\u0000\u040b"+ + "\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006i\u0012\u0000\u040d\u00e2"+ + "\u0001\u0000\u0000\u0000\u040e\u040f\u0004j\u0004\u0000\u040f\u0410\u0003"+ + "\u00819\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006j\u0017"+ + "\u0000\u0412\u00e4\u0001\u0000\u0000\u0000\u0413\u0414\u0004k\u0005\u0000"+ + "\u0414\u0415\u0003\u00a5K\u0000\u0415\u0416\u0001\u0000\u0000\u0000\u0416"+ + "\u0417\u0006k\u0018\u0000\u0417\u00e6\u0001\u0000\u0000\u0000\u0418\u041d"+ + "\u0003C\u001a\u0000\u0419\u041d\u0003A\u0019\u0000\u041a\u041d\u0003Q"+ + "!\u0000\u041b\u041d\u0003\u009bF\u0000\u041c\u0418\u0001\u0000\u0000\u0000"+ + "\u041c\u0419\u0001\u0000\u0000\u0000\u041c\u041a\u0001\u0000\u0000\u0000"+ + "\u041c\u041b\u0001\u0000\u0000\u0000\u041d\u00e8\u0001\u0000\u0000\u0000"+ + "\u041e\u0421\u0003C\u001a\u0000\u041f\u0421\u0003\u009bF\u0000\u0420\u041e"+ + "\u0001\u0000\u0000\u0000\u0420\u041f\u0001\u0000\u0000\u0000\u0421\u0425"+ + "\u0001\u0000\u0000\u0000\u0422\u0424\u0003\u00e7l\u0000\u0423\u0422\u0001"+ + "\u0000\u0000\u0000\u0424\u0427\u0001\u0000\u0000\u0000\u0425\u0423\u0001"+ + "\u0000\u0000\u0000\u0425\u0426\u0001\u0000\u0000\u0000\u0426\u0432\u0001"+ + "\u0000\u0000\u0000\u0427\u0425\u0001\u0000\u0000\u0000\u0428\u042b\u0003"+ + "Q!\u0000\u0429\u042b\u0003K\u001e\u0000\u042a\u0428\u0001\u0000\u0000"+ + "\u0000\u042a\u0429\u0001\u0000\u0000\u0000\u042b\u042d\u0001\u0000\u0000"+ + "\u0000\u042c\u042e\u0003\u00e7l\u0000\u042d\u042c\u0001\u0000\u0000\u0000"+ + "\u042e\u042f\u0001\u0000\u0000\u0000\u042f\u042d\u0001\u0000\u0000\u0000"+ + "\u042f\u0430\u0001\u0000\u0000\u0000\u0430\u0432\u0001\u0000\u0000\u0000"+ + "\u0431\u0420\u0001\u0000\u0000\u0000\u0431\u042a\u0001\u0000\u0000\u0000"+ + "\u0432\u00ea\u0001\u0000\u0000\u0000\u0433\u0436\u0003\u00e9m\u0000\u0434"+ + "\u0436\u0003\u00adO\u0000\u0435\u0433\u0001\u0000\u0000\u0000\u0435\u0434"+ + "\u0001\u0000\u0000\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437\u0435"+ + "\u0001\u0000\u0000\u0000\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u00ec"+ + "\u0001\u0000\u0000\u0000\u0439\u043a\u00037\u0014\u0000\u043a\u043b\u0001"+ + "\u0000\u0000\u0000\u043b\u043c\u0006o\n\u0000\u043c\u00ee\u0001\u0000"+ + "\u0000\u0000\u043d\u043e\u00039\u0015\u0000\u043e\u043f\u0001\u0000\u0000"+ + "\u0000\u043f\u0440\u0006p\n\u0000\u0440\u00f0\u0001\u0000\u0000\u0000"+ + "\u0441\u0442\u0003;\u0016\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443"+ + "\u0444\u0006q\n\u0000\u0444\u00f2\u0001\u0000\u0000\u0000\u0445\u0446"+ + "\u0003?\u0018\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ + "r\u0010\u0000\u0448\u0449\u0006r\u000b\u0000\u0449\u00f4\u0001\u0000\u0000"+ + "\u0000\u044a\u044b\u0003a)\u0000\u044b\u044c\u0001\u0000\u0000\u0000\u044c"+ + "\u044d\u0006s\u0013\u0000\u044d\u00f6\u0001\u0000\u0000\u0000\u044e\u044f"+ + "\u0003e+\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450\u0451\u0006t"+ + "\u0012\u0000\u0451\u00f8\u0001\u0000\u0000\u0000\u0452\u0453\u0003i-\u0000"+ + "\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455\u0006u\u0016\u0000\u0455"+ + "\u00fa\u0001\u0000\u0000\u0000\u0456\u0457\u0004v\u0006\u0000\u0457\u0458"+ + "\u0003\u00819\u0000\u0458\u0459\u0001\u0000\u0000\u0000\u0459\u045a\u0006"+ + "v\u0017\u0000\u045a\u00fc\u0001\u0000\u0000\u0000\u045b\u045c\u0004w\u0007"+ + "\u0000\u045c\u045d\u0003\u00a5K\u0000\u045d\u045e\u0001\u0000\u0000\u0000"+ + "\u045e\u045f\u0006w\u0018\u0000\u045f\u00fe\u0001\u0000\u0000\u0000\u0460"+ + "\u0461\u0007\f\u0000\u0000\u0461\u0462\u0007\u0002\u0000\u0000\u0462\u0100"+ + "\u0001\u0000\u0000\u0000\u0463\u0464\u0003\u00ebn\u0000\u0464\u0465\u0001"+ + "\u0000\u0000\u0000\u0465\u0466\u0006y\u0019\u0000\u0466\u0102\u0001\u0000"+ + "\u0000\u0000\u0467\u0468\u00037\u0014\u0000\u0468\u0469\u0001\u0000\u0000"+ + "\u0000\u0469\u046a\u0006z\n\u0000\u046a\u0104\u0001\u0000\u0000\u0000"+ + "\u046b\u046c\u00039\u0015\u0000\u046c\u046d\u0001\u0000\u0000\u0000\u046d"+ + "\u046e\u0006{\n\u0000\u046e\u0106\u0001\u0000\u0000\u0000\u046f\u0470"+ + "\u0003;\u0016\u0000\u0470\u0471\u0001\u0000\u0000\u0000\u0471\u0472\u0006"+ + "|\n\u0000\u0472\u0108\u0001\u0000\u0000\u0000\u0473\u0474\u0003?\u0018"+ + "\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0476\u0006}\u0010\u0000"+ + "\u0476\u0477\u0006}\u000b\u0000\u0477\u010a\u0001\u0000\u0000\u0000\u0478"+ + "\u0479\u0003\u00a7L\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b"+ + "\u0006~\u000e\u0000\u047b\u047c\u0006~\u001a\u0000\u047c\u010c\u0001\u0000"+ + "\u0000\u0000\u047d\u047e\u0007\u0007\u0000\u0000\u047e\u047f\u0007\t\u0000"+ + "\u0000\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481\u0006\u007f\u001b"+ + "\u0000\u0481\u010e\u0001\u0000\u0000\u0000\u0482\u0483\u0007\u0013\u0000"+ + "\u0000\u0483\u0484\u0007\u0001\u0000\u0000\u0484\u0485\u0007\u0005\u0000"+ + "\u0000\u0485\u0486\u0007\n\u0000\u0000\u0486\u0487\u0001\u0000\u0000\u0000"+ + "\u0487\u0488\u0006\u0080\u001b\u0000\u0488\u0110\u0001\u0000\u0000\u0000"+ + "\u0489\u048a\b\"\u0000\u0000\u048a\u0112\u0001\u0000\u0000\u0000\u048b"+ + "\u048d\u0003\u0111\u0081\u0000\u048c\u048b\u0001\u0000\u0000\u0000\u048d"+ + "\u048e\u0001\u0000\u0000\u0000\u048e\u048c\u0001\u0000\u0000\u0000\u048e"+ + "\u048f\u0001\u0000\u0000\u0000\u048f\u0490\u0001\u0000\u0000\u0000\u0490"+ + "\u0491\u0003=\u0017\u0000\u0491\u0493\u0001\u0000\u0000\u0000\u0492\u048c"+ + "\u0001\u0000\u0000\u0000\u0492\u0493\u0001\u0000\u0000\u0000\u0493\u0495"+ + "\u0001\u0000\u0000\u0000\u0494\u0496\u0003\u0111\u0081\u0000\u0495\u0494"+ + "\u0001\u0000\u0000\u0000\u0496\u0497\u0001\u0000\u0000\u0000\u0497\u0495"+ + "\u0001\u0000\u0000\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498\u0114"+ + "\u0001\u0000\u0000\u0000\u0499\u049a\u0003\u0113\u0082\u0000\u049a\u049b"+ + "\u0001\u0000\u0000\u0000\u049b\u049c\u0006\u0083\u001c\u0000\u049c\u0116"+ + "\u0001\u0000\u0000\u0000\u049d\u049e\u00037\u0014\u0000\u049e\u049f\u0001"+ + "\u0000\u0000\u0000\u049f\u04a0\u0006\u0084\n\u0000\u04a0\u0118\u0001\u0000"+ + "\u0000\u0000\u04a1\u04a2\u00039\u0015\u0000\u04a2\u04a3\u0001\u0000\u0000"+ + "\u0000\u04a3\u04a4\u0006\u0085\n\u0000\u04a4\u011a\u0001\u0000\u0000\u0000"+ + "\u04a5\u04a6\u0003;\u0016\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000\u04a7"+ + "\u04a8\u0006\u0086\n\u0000\u04a8\u011c\u0001\u0000\u0000\u0000\u04a9\u04aa"+ + "\u0003?\u0018\u0000\u04aa\u04ab\u0001\u0000\u0000\u0000\u04ab\u04ac\u0006"+ + "\u0087\u0010\u0000\u04ac\u04ad\u0006\u0087\u000b\u0000\u04ad\u04ae\u0006"+ + "\u0087\u000b\u0000\u04ae\u011e\u0001\u0000\u0000\u0000\u04af\u04b0\u0003"+ + "a)\u0000\u04b0\u04b1\u0001\u0000\u0000\u0000\u04b1\u04b2\u0006\u0088\u0013"+ + "\u0000\u04b2\u0120\u0001\u0000\u0000\u0000\u04b3\u04b4\u0003e+\u0000\u04b4"+ + "\u04b5\u0001\u0000\u0000\u0000\u04b5\u04b6\u0006\u0089\u0012\u0000\u04b6"+ + "\u0122\u0001\u0000\u0000\u0000\u04b7\u04b8\u0003i-\u0000\u04b8\u04b9\u0001"+ + "\u0000\u0000\u0000\u04b9\u04ba\u0006\u008a\u0016\u0000\u04ba\u0124\u0001"+ + "\u0000\u0000\u0000\u04bb\u04bc\u0003\u010f\u0080\u0000\u04bc\u04bd\u0001"+ + "\u0000\u0000\u0000\u04bd\u04be\u0006\u008b\u001d\u0000\u04be\u0126\u0001"+ + "\u0000\u0000\u0000\u04bf\u04c0\u0003\u00ebn\u0000\u04c0\u04c1\u0001\u0000"+ + "\u0000\u0000\u04c1\u04c2\u0006\u008c\u0019\u0000\u04c2\u0128\u0001\u0000"+ + "\u0000\u0000\u04c3\u04c4\u0003\u00afP\u0000\u04c4\u04c5\u0001\u0000\u0000"+ + "\u0000\u04c5\u04c6\u0006\u008d\u001e\u0000\u04c6\u012a\u0001\u0000\u0000"+ + "\u0000\u04c7\u04c8\u0004\u008e\b\u0000\u04c8\u04c9\u0003\u00819\u0000"+ + "\u04c9\u04ca\u0001\u0000\u0000\u0000\u04ca\u04cb\u0006\u008e\u0017\u0000"+ + "\u04cb\u012c\u0001\u0000\u0000\u0000\u04cc\u04cd\u0004\u008f\t\u0000\u04cd"+ + "\u04ce\u0003\u00a5K\u0000\u04ce\u04cf\u0001\u0000\u0000\u0000\u04cf\u04d0"+ + "\u0006\u008f\u0018\u0000\u04d0\u012e\u0001\u0000\u0000\u0000\u04d1\u04d2"+ + "\u00037\u0014\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3\u04d4\u0006"+ + "\u0090\n\u0000\u04d4\u0130\u0001\u0000\u0000\u0000\u04d5\u04d6\u00039"+ + "\u0015\u0000\u04d6\u04d7\u0001\u0000\u0000\u0000\u04d7\u04d8\u0006\u0091"+ + "\n\u0000\u04d8\u0132\u0001\u0000\u0000\u0000\u04d9\u04da\u0003;\u0016"+ + "\u0000\u04da\u04db\u0001\u0000\u0000\u0000\u04db\u04dc\u0006\u0092\n\u0000"+ + "\u04dc\u0134\u0001\u0000\u0000\u0000\u04dd\u04de\u0003?\u0018\u0000\u04de"+ + "\u04df\u0001\u0000\u0000\u0000\u04df\u04e0\u0006\u0093\u0010\u0000\u04e0"+ + "\u04e1\u0006\u0093\u000b\u0000\u04e1\u0136\u0001\u0000\u0000\u0000\u04e2"+ + "\u04e3\u0003i-\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u04e5\u0006"+ + "\u0094\u0016\u0000\u04e5\u0138\u0001\u0000\u0000\u0000\u04e6\u04e7\u0004"+ + "\u0095\n\u0000\u04e7\u04e8\u0003\u00819\u0000\u04e8\u04e9\u0001\u0000"+ + "\u0000\u0000\u04e9\u04ea\u0006\u0095\u0017\u0000\u04ea\u013a\u0001\u0000"+ + "\u0000\u0000\u04eb\u04ec\u0004\u0096\u000b\u0000\u04ec\u04ed\u0003\u00a5"+ + "K\u0000\u04ed\u04ee\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006\u0096\u0018"+ + "\u0000\u04ef\u013c\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003\u00afP\u0000"+ + "\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3\u0006\u0097\u001e\u0000"+ + "\u04f3\u013e\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003\u00abN\u0000\u04f5"+ + "\u04f6\u0001\u0000\u0000\u0000\u04f6\u04f7\u0006\u0098\u001f\u0000\u04f7"+ + "\u0140\u0001\u0000\u0000\u0000\u04f8\u04f9\u00037\u0014\u0000\u04f9\u04fa"+ + "\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u0099\n\u0000\u04fb\u0142\u0001"+ + "\u0000\u0000\u0000\u04fc\u04fd\u00039\u0015\u0000\u04fd\u04fe\u0001\u0000"+ + "\u0000\u0000\u04fe\u04ff\u0006\u009a\n\u0000\u04ff\u0144\u0001\u0000\u0000"+ + "\u0000\u0500\u0501\u0003;\u0016\u0000\u0501\u0502\u0001\u0000\u0000\u0000"+ + "\u0502\u0503\u0006\u009b\n\u0000\u0503\u0146\u0001\u0000\u0000\u0000\u0504"+ + "\u0505\u0003?\u0018\u0000\u0505\u0506\u0001\u0000\u0000\u0000\u0506\u0507"+ + "\u0006\u009c\u0010\u0000\u0507\u0508\u0006\u009c\u000b\u0000\u0508\u0148"+ + "\u0001\u0000\u0000\u0000\u0509\u050a\u0007\u0001\u0000\u0000\u050a\u050b"+ + "\u0007\t\u0000\u0000\u050b\u050c\u0007\u000f\u0000\u0000\u050c\u050d\u0007"+ + "\u0007\u0000\u0000\u050d\u014a\u0001\u0000\u0000\u0000\u050e\u050f\u0003"+ + "7\u0014\u0000\u050f\u0510\u0001\u0000\u0000\u0000\u0510\u0511\u0006\u009e"+ + "\n\u0000\u0511\u014c\u0001\u0000\u0000\u0000\u0512\u0513\u00039\u0015"+ + "\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006\u009f\n\u0000"+ + "\u0515\u014e\u0001\u0000\u0000\u0000\u0516\u0517\u0003;\u0016\u0000\u0517"+ + "\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u00a0\n\u0000\u0519\u0150"+ + "\u0001\u0000\u0000\u0000\u051a\u051b\u0003\u00a9M\u0000\u051b\u051c\u0001"+ + "\u0000\u0000\u0000\u051c\u051d\u0006\u00a1\u0011\u0000\u051d\u051e\u0006"+ + "\u00a1\u000b\u0000\u051e\u0152\u0001\u0000\u0000\u0000\u051f\u0520\u0003"+ + "=\u0017\u0000\u0520\u0521\u0001\u0000\u0000\u0000\u0521\u0522\u0006\u00a2"+ + "\f\u0000\u0522\u0154\u0001\u0000\u0000\u0000\u0523\u0529\u0003K\u001e"+ + "\u0000\u0524\u0529\u0003A\u0019\u0000\u0525\u0529\u0003i-\u0000\u0526"+ + "\u0529\u0003C\u001a\u0000\u0527\u0529\u0003Q!\u0000\u0528\u0523\u0001"+ + "\u0000\u0000\u0000\u0528\u0524\u0001\u0000\u0000\u0000\u0528\u0525\u0001"+ + "\u0000\u0000\u0000\u0528\u0526\u0001\u0000\u0000\u0000\u0528\u0527\u0001"+ + "\u0000\u0000\u0000\u0529\u052a\u0001\u0000\u0000\u0000\u052a\u0528\u0001"+ + "\u0000\u0000\u0000\u052a\u052b\u0001\u0000\u0000\u0000\u052b\u0156\u0001"+ + "\u0000\u0000\u0000\u052c\u052d\u00037\u0014\u0000\u052d\u052e\u0001\u0000"+ + "\u0000\u0000\u052e\u052f\u0006\u00a4\n\u0000\u052f\u0158\u0001\u0000\u0000"+ + "\u0000\u0530\u0531\u00039\u0015\u0000\u0531\u0532\u0001\u0000\u0000\u0000"+ + "\u0532\u0533\u0006\u00a5\n\u0000\u0533\u015a\u0001\u0000\u0000\u0000\u0534"+ + "\u0535\u0003;\u0016\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536\u0537"+ + "\u0006\u00a6\n\u0000\u0537\u015c\u0001\u0000\u0000\u0000\u0538\u0539\u0003"+ + "?\u0018\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u00a7"+ + "\u0010\u0000\u053b\u053c\u0006\u00a7\u000b\u0000\u053c\u015e\u0001\u0000"+ + "\u0000\u0000\u053d\u053e\u0003=\u0017\u0000\u053e\u053f\u0001\u0000\u0000"+ + "\u0000\u053f\u0540\u0006\u00a8\f\u0000\u0540\u0160\u0001\u0000\u0000\u0000"+ + "\u0541\u0542\u0003e+\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543\u0544"+ + "\u0006\u00a9\u0012\u0000\u0544\u0162\u0001\u0000\u0000\u0000\u0545\u0546"+ + "\u0003i-\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548\u0006\u00aa"+ + "\u0016\u0000\u0548\u0164\u0001\u0000\u0000\u0000\u0549\u054a\u0003\u010d"+ + "\u007f\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006\u00ab"+ + " \u0000\u054c\u054d\u0006\u00ab!\u0000\u054d\u0166\u0001\u0000\u0000\u0000"+ + "\u054e\u054f\u0003\u00d1a\u0000\u054f\u0550\u0001\u0000\u0000\u0000\u0550"+ + "\u0551\u0006\u00ac\u0014\u0000\u0551\u0168\u0001\u0000\u0000\u0000\u0552"+ + "\u0553\u0003U#\u0000\u0553\u0554\u0001\u0000\u0000\u0000\u0554\u0555\u0006"+ + "\u00ad\u0015\u0000\u0555\u016a\u0001\u0000\u0000\u0000\u0556\u0557\u0003"+ + "7\u0014\u0000\u0557\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006\u00ae"+ + "\n\u0000\u0559\u016c\u0001\u0000\u0000\u0000\u055a\u055b\u00039\u0015"+ + "\u0000\u055b\u055c\u0001\u0000\u0000\u0000\u055c\u055d\u0006\u00af\n\u0000"+ + "\u055d\u016e\u0001\u0000\u0000\u0000\u055e\u055f\u0003;\u0016\u0000\u055f"+ + "\u0560\u0001\u0000\u0000\u0000\u0560\u0561\u0006\u00b0\n\u0000\u0561\u0170"+ + "\u0001\u0000\u0000\u0000\u0562\u0563\u0003?\u0018\u0000\u0563\u0564\u0001"+ + "\u0000\u0000\u0000\u0564\u0565\u0006\u00b1\u0010\u0000\u0565\u0566\u0006"+ + "\u00b1\u000b\u0000\u0566\u0567\u0006\u00b1\u000b\u0000\u0567\u0172\u0001"+ + "\u0000\u0000\u0000\u0568\u0569\u0003e+\u0000\u0569\u056a\u0001\u0000\u0000"+ + "\u0000\u056a\u056b\u0006\u00b2\u0012\u0000\u056b\u0174\u0001\u0000\u0000"+ + "\u0000\u056c\u056d\u0003i-\u0000\u056d\u056e\u0001\u0000\u0000\u0000\u056e"+ + "\u056f\u0006\u00b3\u0016\u0000\u056f\u0176\u0001\u0000\u0000\u0000\u0570"+ + "\u0571\u0003\u00ebn\u0000\u0571\u0572\u0001\u0000\u0000\u0000\u0572\u0573"+ + "\u0006\u00b4\u0019\u0000\u0573\u0178\u0001\u0000\u0000\u0000\u0574\u0575"+ + "\u00037\u0014\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0577\u0006"+ + "\u00b5\n\u0000\u0577\u017a\u0001\u0000\u0000\u0000\u0578\u0579\u00039"+ + "\u0015\u0000\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u057b\u0006\u00b6"+ + "\n\u0000\u057b\u017c\u0001\u0000\u0000\u0000\u057c\u057d\u0003;\u0016"+ + "\u0000\u057d\u057e\u0001\u0000\u0000\u0000\u057e\u057f\u0006\u00b7\n\u0000"+ + "\u057f\u017e\u0001\u0000\u0000\u0000\u0580\u0581\u0003?\u0018\u0000\u0581"+ + "\u0582\u0001\u0000\u0000\u0000\u0582\u0583\u0006\u00b8\u0010\u0000\u0583"+ + "\u0584\u0006\u00b8\u000b\u0000\u0584\u0180\u0001\u0000\u0000\u0000\u0585"+ + "\u0586\u0003\u00d1a\u0000\u0586\u0587\u0001\u0000\u0000\u0000\u0587\u0588"+ + "\u0006\u00b9\u0014\u0000\u0588\u0589\u0006\u00b9\u000b\u0000\u0589\u058a"+ + "\u0006\u00b9\"\u0000\u058a\u0182\u0001\u0000\u0000\u0000\u058b\u058c\u0003"+ + "U#\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e\u0006\u00ba\u0015"+ + "\u0000\u058e\u058f\u0006\u00ba\u000b\u0000\u058f\u0590\u0006\u00ba\"\u0000"+ + "\u0590\u0184\u0001\u0000\u0000\u0000\u0591\u0592\u00037\u0014\u0000\u0592"+ + "\u0593\u0001\u0000\u0000\u0000\u0593\u0594\u0006\u00bb\n\u0000\u0594\u0186"+ + "\u0001\u0000\u0000\u0000\u0595\u0596\u00039\u0015\u0000\u0596\u0597\u0001"+ + "\u0000\u0000\u0000\u0597\u0598\u0006\u00bc\n\u0000\u0598\u0188\u0001\u0000"+ + "\u0000\u0000\u0599\u059a\u0003;\u0016\u0000\u059a\u059b\u0001\u0000\u0000"+ + "\u0000\u059b\u059c\u0006\u00bd\n\u0000\u059c\u018a\u0001\u0000\u0000\u0000"+ + "\u059d\u059e\u0003=\u0017\u0000\u059e\u059f\u0001\u0000\u0000\u0000\u059f"+ + "\u05a0\u0006\u00be\f\u0000\u05a0\u05a1\u0006\u00be\u000b\u0000\u05a1\u05a2"+ + "\u0006\u00be\t\u0000\u05a2\u018c\u0001\u0000\u0000\u0000\u05a3\u05a4\u0003"+ + "e+\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006\u00bf\u0012"+ + "\u0000\u05a6\u05a7\u0006\u00bf\u000b\u0000\u05a7\u05a8\u0006\u00bf\t\u0000"+ + "\u05a8\u018e\u0001\u0000\u0000\u0000\u05a9\u05aa\u00037\u0014\u0000\u05aa"+ + "\u05ab\u0001\u0000\u0000\u0000\u05ab\u05ac\u0006\u00c0\n\u0000\u05ac\u0190"+ + "\u0001\u0000\u0000\u0000\u05ad\u05ae\u00039\u0015\u0000\u05ae\u05af\u0001"+ + "\u0000\u0000\u0000\u05af\u05b0\u0006\u00c1\n\u0000\u05b0\u0192\u0001\u0000"+ + "\u0000\u0000\u05b1\u05b2\u0003;\u0016\u0000\u05b2\u05b3\u0001\u0000\u0000"+ + "\u0000\u05b3\u05b4\u0006\u00c2\n\u0000\u05b4\u0194\u0001\u0000\u0000\u0000"+ + "\u05b5\u05b6\u0003\u00afP\u0000\u05b6\u05b7\u0001\u0000\u0000\u0000\u05b7"+ + "\u05b8\u0006\u00c3\u000b\u0000\u05b8\u05b9\u0006\u00c3\u0000\u0000\u05b9"+ + "\u05ba\u0006\u00c3\u001e\u0000\u05ba\u0196\u0001\u0000\u0000\u0000\u05bb"+ + "\u05bc\u0003\u00abN\u0000\u05bc\u05bd\u0001\u0000\u0000\u0000\u05bd\u05be"+ + "\u0006\u00c4\u000b\u0000\u05be\u05bf\u0006\u00c4\u0000\u0000\u05bf\u05c0"+ + "\u0006\u00c4\u001f\u0000\u05c0\u0198\u0001\u0000\u0000\u0000\u05c1\u05c2"+ + "\u0003[&\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3\u05c4\u0006\u00c5"+ + "\u000b\u0000\u05c4\u05c5\u0006\u00c5\u0000\u0000\u05c5\u05c6\u0006\u00c5"+ + "#\u0000\u05c6\u019a\u0001\u0000\u0000\u0000\u05c7\u05c8\u0003?\u0018\u0000"+ + "\u05c8\u05c9\u0001\u0000\u0000\u0000\u05c9\u05ca\u0006\u00c6\u0010\u0000"+ + "\u05ca\u05cb\u0006\u00c6\u000b\u0000\u05cb\u019c\u0001\u0000\u0000\u0000"+ + "A\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ + "\u0245\u024f\u0253\u0256\u025f\u0261\u026c\u0281\u0286\u028f\u0296\u029b"+ + "\u029d\u02a8\u02b0\u02b3\u02b5\u02ba\u02bf\u02c5\u02cc\u02d1\u02d7\u02da"+ + "\u02e2\u02e6\u0369\u036e\u0375\u0377\u0387\u038c\u0391\u0393\u0399\u03e6"+ + "\u03eb\u041c\u0420\u0425\u042a\u042f\u0431\u0435\u0437\u048e\u0492\u0497"+ + "\u0528\u052a$\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005"+ + "\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t"+ + "\u0000\u0005\u000b\u0000\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000"+ + "\u0007\u0018\u0000\u0007\u0010\u0000\u0007A\u0000\u0005\u0000\u0000\u0007"+ + "\u0019\u0000\u0007B\u0000\u0007\"\u0000\u0007 \u0000\u0007L\u0000\u0007"+ + "\u001a\u0000\u0007$\u0000\u00070\u0000\u0007@\u0000\u0007P\u0000\u0005"+ + "\n\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007"+ + "C\u0000\u0007X\u0000\u0005\f\u0000\u0005\u000e\u0000\u0007\u001d\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index b52d842e79fb2..a2b339f378f12 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -23,6 +23,7 @@ null null null null +':' '|' null null @@ -62,7 +63,6 @@ null '*' '/' '%' -'match' null null ']' @@ -103,7 +103,6 @@ null null null null -':' null null null @@ -146,6 +145,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +COLON PIPE QUOTED_STRING INTEGER_LITERAL @@ -185,7 +185,6 @@ MINUS ASTERISK SLASH PERCENT -MATCH NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -226,7 +225,6 @@ INFO SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS -COLON SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT @@ -310,4 +308,4 @@ inlinestatsCommand atn: -[4, 1, 120, 605, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 134, 8, 1, 10, 1, 12, 1, 137, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 145, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 163, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 206, 8, 5, 10, 5, 12, 5, 209, 9, 5, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 236, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 242, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 250, 8, 9, 10, 9, 12, 9, 253, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 263, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 268, 8, 10, 10, 10, 12, 10, 271, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 279, 8, 11, 10, 11, 12, 11, 282, 9, 11, 3, 11, 284, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 3, 12, 290, 8, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 300, 8, 15, 10, 15, 12, 15, 303, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 308, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 316, 8, 17, 10, 17, 12, 17, 319, 9, 17, 1, 17, 3, 17, 322, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 327, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 337, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 343, 8, 22, 10, 22, 12, 22, 346, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 356, 8, 24, 10, 24, 12, 24, 359, 9, 24, 1, 24, 3, 24, 362, 8, 24, 1, 24, 1, 24, 3, 24, 366, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 373, 8, 26, 1, 26, 1, 26, 3, 26, 377, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 382, 8, 27, 10, 27, 12, 27, 385, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 390, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 395, 8, 29, 10, 29, 12, 29, 398, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 403, 8, 30, 10, 30, 12, 30, 406, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 411, 8, 31, 10, 31, 12, 31, 414, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 421, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 436, 8, 34, 10, 34, 12, 34, 439, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 447, 8, 34, 10, 34, 12, 34, 450, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 458, 8, 34, 10, 34, 12, 34, 461, 9, 34, 1, 34, 1, 34, 3, 34, 465, 8, 34, 1, 35, 1, 35, 3, 35, 469, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 474, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 483, 8, 38, 10, 38, 12, 38, 486, 9, 38, 1, 39, 1, 39, 3, 39, 490, 8, 39, 1, 39, 1, 39, 3, 39, 494, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 506, 8, 42, 10, 42, 12, 42, 509, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 519, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 531, 8, 47, 10, 47, 12, 47, 534, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 544, 8, 50, 1, 51, 3, 51, 547, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 552, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 574, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 580, 8, 58, 10, 58, 12, 58, 583, 9, 58, 3, 58, 585, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 590, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 603, 8, 61, 1, 61, 0, 4, 2, 10, 18, 20, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 25, 25, 76, 76, 1, 0, 67, 68, 2, 0, 30, 30, 34, 34, 2, 0, 37, 37, 40, 40, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 631, 0, 124, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 4, 144, 1, 0, 0, 0, 6, 162, 1, 0, 0, 0, 8, 164, 1, 0, 0, 0, 10, 197, 1, 0, 0, 0, 12, 224, 1, 0, 0, 0, 14, 226, 1, 0, 0, 0, 16, 235, 1, 0, 0, 0, 18, 241, 1, 0, 0, 0, 20, 262, 1, 0, 0, 0, 22, 272, 1, 0, 0, 0, 24, 289, 1, 0, 0, 0, 26, 291, 1, 0, 0, 0, 28, 293, 1, 0, 0, 0, 30, 296, 1, 0, 0, 0, 32, 307, 1, 0, 0, 0, 34, 311, 1, 0, 0, 0, 36, 326, 1, 0, 0, 0, 38, 330, 1, 0, 0, 0, 40, 332, 1, 0, 0, 0, 42, 336, 1, 0, 0, 0, 44, 338, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 351, 1, 0, 0, 0, 50, 367, 1, 0, 0, 0, 52, 370, 1, 0, 0, 0, 54, 378, 1, 0, 0, 0, 56, 386, 1, 0, 0, 0, 58, 391, 1, 0, 0, 0, 60, 399, 1, 0, 0, 0, 62, 407, 1, 0, 0, 0, 64, 415, 1, 0, 0, 0, 66, 420, 1, 0, 0, 0, 68, 464, 1, 0, 0, 0, 70, 468, 1, 0, 0, 0, 72, 473, 1, 0, 0, 0, 74, 475, 1, 0, 0, 0, 76, 478, 1, 0, 0, 0, 78, 487, 1, 0, 0, 0, 80, 495, 1, 0, 0, 0, 82, 498, 1, 0, 0, 0, 84, 501, 1, 0, 0, 0, 86, 510, 1, 0, 0, 0, 88, 514, 1, 0, 0, 0, 90, 520, 1, 0, 0, 0, 92, 524, 1, 0, 0, 0, 94, 527, 1, 0, 0, 0, 96, 535, 1, 0, 0, 0, 98, 539, 1, 0, 0, 0, 100, 543, 1, 0, 0, 0, 102, 546, 1, 0, 0, 0, 104, 551, 1, 0, 0, 0, 106, 555, 1, 0, 0, 0, 108, 557, 1, 0, 0, 0, 110, 559, 1, 0, 0, 0, 112, 562, 1, 0, 0, 0, 114, 566, 1, 0, 0, 0, 116, 569, 1, 0, 0, 0, 118, 589, 1, 0, 0, 0, 120, 593, 1, 0, 0, 0, 122, 598, 1, 0, 0, 0, 124, 125, 3, 2, 1, 0, 125, 126, 5, 0, 0, 1, 126, 1, 1, 0, 0, 0, 127, 128, 6, 1, -1, 0, 128, 129, 3, 4, 2, 0, 129, 135, 1, 0, 0, 0, 130, 131, 10, 1, 0, 0, 131, 132, 5, 24, 0, 0, 132, 134, 3, 6, 3, 0, 133, 130, 1, 0, 0, 0, 134, 137, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 3, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 138, 145, 3, 110, 55, 0, 139, 145, 3, 34, 17, 0, 140, 145, 3, 28, 14, 0, 141, 145, 3, 114, 57, 0, 142, 143, 4, 2, 1, 0, 143, 145, 3, 48, 24, 0, 144, 138, 1, 0, 0, 0, 144, 139, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 141, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 145, 5, 1, 0, 0, 0, 146, 163, 3, 50, 25, 0, 147, 163, 3, 8, 4, 0, 148, 163, 3, 80, 40, 0, 149, 163, 3, 74, 37, 0, 150, 163, 3, 52, 26, 0, 151, 163, 3, 76, 38, 0, 152, 163, 3, 82, 41, 0, 153, 163, 3, 84, 42, 0, 154, 163, 3, 88, 44, 0, 155, 163, 3, 90, 45, 0, 156, 163, 3, 116, 58, 0, 157, 163, 3, 92, 46, 0, 158, 159, 4, 3, 2, 0, 159, 163, 3, 122, 61, 0, 160, 161, 4, 3, 3, 0, 161, 163, 3, 120, 60, 0, 162, 146, 1, 0, 0, 0, 162, 147, 1, 0, 0, 0, 162, 148, 1, 0, 0, 0, 162, 149, 1, 0, 0, 0, 162, 150, 1, 0, 0, 0, 162, 151, 1, 0, 0, 0, 162, 152, 1, 0, 0, 0, 162, 153, 1, 0, 0, 0, 162, 154, 1, 0, 0, 0, 162, 155, 1, 0, 0, 0, 162, 156, 1, 0, 0, 0, 162, 157, 1, 0, 0, 0, 162, 158, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 7, 1, 0, 0, 0, 164, 165, 5, 16, 0, 0, 165, 166, 3, 10, 5, 0, 166, 9, 1, 0, 0, 0, 167, 168, 6, 5, -1, 0, 168, 169, 5, 43, 0, 0, 169, 198, 3, 10, 5, 8, 170, 198, 3, 16, 8, 0, 171, 198, 3, 12, 6, 0, 172, 174, 3, 16, 8, 0, 173, 175, 5, 43, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 38, 0, 0, 177, 178, 5, 42, 0, 0, 178, 183, 3, 16, 8, 0, 179, 180, 5, 33, 0, 0, 180, 182, 3, 16, 8, 0, 181, 179, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 187, 5, 49, 0, 0, 187, 198, 1, 0, 0, 0, 188, 189, 3, 16, 8, 0, 189, 191, 5, 39, 0, 0, 190, 192, 5, 43, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 44, 0, 0, 194, 198, 1, 0, 0, 0, 195, 196, 4, 5, 4, 0, 196, 198, 3, 14, 7, 0, 197, 167, 1, 0, 0, 0, 197, 170, 1, 0, 0, 0, 197, 171, 1, 0, 0, 0, 197, 172, 1, 0, 0, 0, 197, 188, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 207, 1, 0, 0, 0, 199, 200, 10, 5, 0, 0, 200, 201, 5, 29, 0, 0, 201, 206, 3, 10, 5, 6, 202, 203, 10, 4, 0, 0, 203, 204, 5, 46, 0, 0, 204, 206, 3, 10, 5, 5, 205, 199, 1, 0, 0, 0, 205, 202, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 11, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 212, 3, 16, 8, 0, 211, 213, 5, 43, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 41, 0, 0, 215, 216, 3, 106, 53, 0, 216, 225, 1, 0, 0, 0, 217, 219, 3, 16, 8, 0, 218, 220, 5, 43, 0, 0, 219, 218, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 48, 0, 0, 222, 223, 3, 106, 53, 0, 223, 225, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 225, 13, 1, 0, 0, 0, 226, 227, 3, 16, 8, 0, 227, 228, 5, 63, 0, 0, 228, 229, 3, 106, 53, 0, 229, 15, 1, 0, 0, 0, 230, 236, 3, 18, 9, 0, 231, 232, 3, 18, 9, 0, 232, 233, 3, 108, 54, 0, 233, 234, 3, 18, 9, 0, 234, 236, 1, 0, 0, 0, 235, 230, 1, 0, 0, 0, 235, 231, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 238, 6, 9, -1, 0, 238, 242, 3, 20, 10, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 3, 241, 237, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 251, 1, 0, 0, 0, 243, 244, 10, 2, 0, 0, 244, 245, 7, 1, 0, 0, 245, 250, 3, 18, 9, 3, 246, 247, 10, 1, 0, 0, 247, 248, 7, 0, 0, 0, 248, 250, 3, 18, 9, 2, 249, 243, 1, 0, 0, 0, 249, 246, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 19, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 255, 6, 10, -1, 0, 255, 263, 3, 68, 34, 0, 256, 263, 3, 58, 29, 0, 257, 263, 3, 22, 11, 0, 258, 259, 5, 42, 0, 0, 259, 260, 3, 10, 5, 0, 260, 261, 5, 49, 0, 0, 261, 263, 1, 0, 0, 0, 262, 254, 1, 0, 0, 0, 262, 256, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 269, 1, 0, 0, 0, 264, 265, 10, 1, 0, 0, 265, 266, 5, 32, 0, 0, 266, 268, 3, 26, 13, 0, 267, 264, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 21, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 3, 24, 12, 0, 273, 283, 5, 42, 0, 0, 274, 284, 5, 60, 0, 0, 275, 280, 3, 10, 5, 0, 276, 277, 5, 33, 0, 0, 277, 279, 3, 10, 5, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 274, 1, 0, 0, 0, 283, 275, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 5, 49, 0, 0, 286, 23, 1, 0, 0, 0, 287, 290, 5, 63, 0, 0, 288, 290, 3, 72, 36, 0, 289, 287, 1, 0, 0, 0, 289, 288, 1, 0, 0, 0, 290, 25, 1, 0, 0, 0, 291, 292, 3, 64, 32, 0, 292, 27, 1, 0, 0, 0, 293, 294, 5, 12, 0, 0, 294, 295, 3, 30, 15, 0, 295, 29, 1, 0, 0, 0, 296, 301, 3, 32, 16, 0, 297, 298, 5, 33, 0, 0, 298, 300, 3, 32, 16, 0, 299, 297, 1, 0, 0, 0, 300, 303, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 31, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 304, 305, 3, 58, 29, 0, 305, 306, 5, 31, 0, 0, 306, 308, 1, 0, 0, 0, 307, 304, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 310, 3, 10, 5, 0, 310, 33, 1, 0, 0, 0, 311, 312, 5, 6, 0, 0, 312, 317, 3, 36, 18, 0, 313, 314, 5, 33, 0, 0, 314, 316, 3, 36, 18, 0, 315, 313, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 321, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 3, 42, 21, 0, 321, 320, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 35, 1, 0, 0, 0, 323, 324, 3, 38, 19, 0, 324, 325, 5, 104, 0, 0, 325, 327, 1, 0, 0, 0, 326, 323, 1, 0, 0, 0, 326, 327, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 329, 3, 40, 20, 0, 329, 37, 1, 0, 0, 0, 330, 331, 5, 76, 0, 0, 331, 39, 1, 0, 0, 0, 332, 333, 7, 2, 0, 0, 333, 41, 1, 0, 0, 0, 334, 337, 3, 44, 22, 0, 335, 337, 3, 46, 23, 0, 336, 334, 1, 0, 0, 0, 336, 335, 1, 0, 0, 0, 337, 43, 1, 0, 0, 0, 338, 339, 5, 75, 0, 0, 339, 344, 5, 76, 0, 0, 340, 341, 5, 33, 0, 0, 341, 343, 5, 76, 0, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 45, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 348, 5, 65, 0, 0, 348, 349, 3, 44, 22, 0, 349, 350, 5, 66, 0, 0, 350, 47, 1, 0, 0, 0, 351, 352, 5, 19, 0, 0, 352, 357, 3, 36, 18, 0, 353, 354, 5, 33, 0, 0, 354, 356, 3, 36, 18, 0, 355, 353, 1, 0, 0, 0, 356, 359, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 357, 1, 0, 0, 0, 360, 362, 3, 54, 27, 0, 361, 360, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 365, 1, 0, 0, 0, 363, 364, 5, 28, 0, 0, 364, 366, 3, 30, 15, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 49, 1, 0, 0, 0, 367, 368, 5, 4, 0, 0, 368, 369, 3, 30, 15, 0, 369, 51, 1, 0, 0, 0, 370, 372, 5, 15, 0, 0, 371, 373, 3, 54, 27, 0, 372, 371, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 376, 1, 0, 0, 0, 374, 375, 5, 28, 0, 0, 375, 377, 3, 30, 15, 0, 376, 374, 1, 0, 0, 0, 376, 377, 1, 0, 0, 0, 377, 53, 1, 0, 0, 0, 378, 383, 3, 56, 28, 0, 379, 380, 5, 33, 0, 0, 380, 382, 3, 56, 28, 0, 381, 379, 1, 0, 0, 0, 382, 385, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 55, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 386, 389, 3, 32, 16, 0, 387, 388, 5, 16, 0, 0, 388, 390, 3, 10, 5, 0, 389, 387, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 57, 1, 0, 0, 0, 391, 396, 3, 72, 36, 0, 392, 393, 5, 35, 0, 0, 393, 395, 3, 72, 36, 0, 394, 392, 1, 0, 0, 0, 395, 398, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 59, 1, 0, 0, 0, 398, 396, 1, 0, 0, 0, 399, 404, 3, 66, 33, 0, 400, 401, 5, 35, 0, 0, 401, 403, 3, 66, 33, 0, 402, 400, 1, 0, 0, 0, 403, 406, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 404, 405, 1, 0, 0, 0, 405, 61, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 407, 412, 3, 60, 30, 0, 408, 409, 5, 33, 0, 0, 409, 411, 3, 60, 30, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 63, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 416, 7, 3, 0, 0, 416, 65, 1, 0, 0, 0, 417, 421, 5, 80, 0, 0, 418, 419, 4, 33, 10, 0, 419, 421, 3, 70, 35, 0, 420, 417, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 67, 1, 0, 0, 0, 422, 465, 5, 44, 0, 0, 423, 424, 3, 104, 52, 0, 424, 425, 5, 67, 0, 0, 425, 465, 1, 0, 0, 0, 426, 465, 3, 102, 51, 0, 427, 465, 3, 104, 52, 0, 428, 465, 3, 98, 49, 0, 429, 465, 3, 70, 35, 0, 430, 465, 3, 106, 53, 0, 431, 432, 5, 65, 0, 0, 432, 437, 3, 100, 50, 0, 433, 434, 5, 33, 0, 0, 434, 436, 3, 100, 50, 0, 435, 433, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, 438, 1, 0, 0, 0, 438, 440, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 441, 5, 66, 0, 0, 441, 465, 1, 0, 0, 0, 442, 443, 5, 65, 0, 0, 443, 448, 3, 98, 49, 0, 444, 445, 5, 33, 0, 0, 445, 447, 3, 98, 49, 0, 446, 444, 1, 0, 0, 0, 447, 450, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 448, 449, 1, 0, 0, 0, 449, 451, 1, 0, 0, 0, 450, 448, 1, 0, 0, 0, 451, 452, 5, 66, 0, 0, 452, 465, 1, 0, 0, 0, 453, 454, 5, 65, 0, 0, 454, 459, 3, 106, 53, 0, 455, 456, 5, 33, 0, 0, 456, 458, 3, 106, 53, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 462, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 463, 5, 66, 0, 0, 463, 465, 1, 0, 0, 0, 464, 422, 1, 0, 0, 0, 464, 423, 1, 0, 0, 0, 464, 426, 1, 0, 0, 0, 464, 427, 1, 0, 0, 0, 464, 428, 1, 0, 0, 0, 464, 429, 1, 0, 0, 0, 464, 430, 1, 0, 0, 0, 464, 431, 1, 0, 0, 0, 464, 442, 1, 0, 0, 0, 464, 453, 1, 0, 0, 0, 465, 69, 1, 0, 0, 0, 466, 469, 5, 47, 0, 0, 467, 469, 5, 64, 0, 0, 468, 466, 1, 0, 0, 0, 468, 467, 1, 0, 0, 0, 469, 71, 1, 0, 0, 0, 470, 474, 3, 64, 32, 0, 471, 472, 4, 36, 11, 0, 472, 474, 3, 70, 35, 0, 473, 470, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 474, 73, 1, 0, 0, 0, 475, 476, 5, 9, 0, 0, 476, 477, 5, 26, 0, 0, 477, 75, 1, 0, 0, 0, 478, 479, 5, 14, 0, 0, 479, 484, 3, 78, 39, 0, 480, 481, 5, 33, 0, 0, 481, 483, 3, 78, 39, 0, 482, 480, 1, 0, 0, 0, 483, 486, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 77, 1, 0, 0, 0, 486, 484, 1, 0, 0, 0, 487, 489, 3, 10, 5, 0, 488, 490, 7, 4, 0, 0, 489, 488, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 493, 1, 0, 0, 0, 491, 492, 5, 45, 0, 0, 492, 494, 7, 5, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 79, 1, 0, 0, 0, 495, 496, 5, 8, 0, 0, 496, 497, 3, 62, 31, 0, 497, 81, 1, 0, 0, 0, 498, 499, 5, 2, 0, 0, 499, 500, 3, 62, 31, 0, 500, 83, 1, 0, 0, 0, 501, 502, 5, 11, 0, 0, 502, 507, 3, 86, 43, 0, 503, 504, 5, 33, 0, 0, 504, 506, 3, 86, 43, 0, 505, 503, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 85, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 511, 3, 60, 30, 0, 511, 512, 5, 84, 0, 0, 512, 513, 3, 60, 30, 0, 513, 87, 1, 0, 0, 0, 514, 515, 5, 1, 0, 0, 515, 516, 3, 20, 10, 0, 516, 518, 3, 106, 53, 0, 517, 519, 3, 94, 47, 0, 518, 517, 1, 0, 0, 0, 518, 519, 1, 0, 0, 0, 519, 89, 1, 0, 0, 0, 520, 521, 5, 7, 0, 0, 521, 522, 3, 20, 10, 0, 522, 523, 3, 106, 53, 0, 523, 91, 1, 0, 0, 0, 524, 525, 5, 10, 0, 0, 525, 526, 3, 58, 29, 0, 526, 93, 1, 0, 0, 0, 527, 532, 3, 96, 48, 0, 528, 529, 5, 33, 0, 0, 529, 531, 3, 96, 48, 0, 530, 528, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 532, 533, 1, 0, 0, 0, 533, 95, 1, 0, 0, 0, 534, 532, 1, 0, 0, 0, 535, 536, 3, 64, 32, 0, 536, 537, 5, 31, 0, 0, 537, 538, 3, 68, 34, 0, 538, 97, 1, 0, 0, 0, 539, 540, 7, 6, 0, 0, 540, 99, 1, 0, 0, 0, 541, 544, 3, 102, 51, 0, 542, 544, 3, 104, 52, 0, 543, 541, 1, 0, 0, 0, 543, 542, 1, 0, 0, 0, 544, 101, 1, 0, 0, 0, 545, 547, 7, 0, 0, 0, 546, 545, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 548, 1, 0, 0, 0, 548, 549, 5, 27, 0, 0, 549, 103, 1, 0, 0, 0, 550, 552, 7, 0, 0, 0, 551, 550, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 5, 26, 0, 0, 554, 105, 1, 0, 0, 0, 555, 556, 5, 25, 0, 0, 556, 107, 1, 0, 0, 0, 557, 558, 7, 7, 0, 0, 558, 109, 1, 0, 0, 0, 559, 560, 5, 5, 0, 0, 560, 561, 3, 112, 56, 0, 561, 111, 1, 0, 0, 0, 562, 563, 5, 65, 0, 0, 563, 564, 3, 2, 1, 0, 564, 565, 5, 66, 0, 0, 565, 113, 1, 0, 0, 0, 566, 567, 5, 13, 0, 0, 567, 568, 5, 100, 0, 0, 568, 115, 1, 0, 0, 0, 569, 570, 5, 3, 0, 0, 570, 573, 5, 90, 0, 0, 571, 572, 5, 88, 0, 0, 572, 574, 3, 60, 30, 0, 573, 571, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 584, 1, 0, 0, 0, 575, 576, 5, 89, 0, 0, 576, 581, 3, 118, 59, 0, 577, 578, 5, 33, 0, 0, 578, 580, 3, 118, 59, 0, 579, 577, 1, 0, 0, 0, 580, 583, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 585, 1, 0, 0, 0, 583, 581, 1, 0, 0, 0, 584, 575, 1, 0, 0, 0, 584, 585, 1, 0, 0, 0, 585, 117, 1, 0, 0, 0, 586, 587, 3, 60, 30, 0, 587, 588, 5, 31, 0, 0, 588, 590, 1, 0, 0, 0, 589, 586, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 1, 0, 0, 0, 591, 592, 3, 60, 30, 0, 592, 119, 1, 0, 0, 0, 593, 594, 5, 18, 0, 0, 594, 595, 3, 36, 18, 0, 595, 596, 5, 88, 0, 0, 596, 597, 3, 62, 31, 0, 597, 121, 1, 0, 0, 0, 598, 599, 5, 17, 0, 0, 599, 602, 3, 54, 27, 0, 600, 601, 5, 28, 0, 0, 601, 603, 3, 30, 15, 0, 602, 600, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 123, 1, 0, 0, 0, 59, 135, 144, 162, 174, 183, 191, 197, 205, 207, 212, 219, 224, 235, 241, 249, 251, 262, 269, 280, 283, 289, 301, 307, 317, 321, 326, 336, 344, 357, 361, 365, 372, 376, 383, 389, 396, 404, 412, 420, 437, 448, 459, 464, 468, 473, 484, 489, 493, 507, 518, 532, 543, 546, 551, 573, 581, 584, 589, 602] \ No newline at end of file +[4, 1, 119, 603, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 134, 8, 1, 10, 1, 12, 1, 137, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 145, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 163, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 206, 8, 5, 10, 5, 12, 5, 209, 9, 5, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 236, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 242, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 250, 8, 9, 10, 9, 12, 9, 253, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 263, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 268, 8, 10, 10, 10, 12, 10, 271, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 279, 8, 11, 10, 11, 12, 11, 282, 9, 11, 3, 11, 284, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 298, 8, 15, 10, 15, 12, 15, 301, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 306, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 314, 8, 17, 10, 17, 12, 17, 317, 9, 17, 1, 17, 3, 17, 320, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 325, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 335, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 341, 8, 22, 10, 22, 12, 22, 344, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 354, 8, 24, 10, 24, 12, 24, 357, 9, 24, 1, 24, 3, 24, 360, 8, 24, 1, 24, 1, 24, 3, 24, 364, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 371, 8, 26, 1, 26, 1, 26, 3, 26, 375, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 380, 8, 27, 10, 27, 12, 27, 383, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 388, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 393, 8, 29, 10, 29, 12, 29, 396, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 401, 8, 30, 10, 30, 12, 30, 404, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 409, 8, 31, 10, 31, 12, 31, 412, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 419, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 434, 8, 34, 10, 34, 12, 34, 437, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 445, 8, 34, 10, 34, 12, 34, 448, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 456, 8, 34, 10, 34, 12, 34, 459, 9, 34, 1, 34, 1, 34, 3, 34, 463, 8, 34, 1, 35, 1, 35, 3, 35, 467, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 472, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 481, 8, 38, 10, 38, 12, 38, 484, 9, 38, 1, 39, 1, 39, 3, 39, 488, 8, 39, 1, 39, 1, 39, 3, 39, 492, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 504, 8, 42, 10, 42, 12, 42, 507, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 517, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 529, 8, 47, 10, 47, 12, 47, 532, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 542, 8, 50, 1, 51, 3, 51, 545, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 550, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 572, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 578, 8, 58, 10, 58, 12, 58, 581, 9, 58, 3, 58, 583, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 588, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 601, 8, 61, 1, 61, 0, 4, 2, 10, 18, 20, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 26, 26, 76, 76, 1, 0, 67, 68, 2, 0, 31, 31, 35, 35, 2, 0, 38, 38, 41, 41, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 628, 0, 124, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 4, 144, 1, 0, 0, 0, 6, 162, 1, 0, 0, 0, 8, 164, 1, 0, 0, 0, 10, 197, 1, 0, 0, 0, 12, 224, 1, 0, 0, 0, 14, 226, 1, 0, 0, 0, 16, 235, 1, 0, 0, 0, 18, 241, 1, 0, 0, 0, 20, 262, 1, 0, 0, 0, 22, 272, 1, 0, 0, 0, 24, 287, 1, 0, 0, 0, 26, 289, 1, 0, 0, 0, 28, 291, 1, 0, 0, 0, 30, 294, 1, 0, 0, 0, 32, 305, 1, 0, 0, 0, 34, 309, 1, 0, 0, 0, 36, 324, 1, 0, 0, 0, 38, 328, 1, 0, 0, 0, 40, 330, 1, 0, 0, 0, 42, 334, 1, 0, 0, 0, 44, 336, 1, 0, 0, 0, 46, 345, 1, 0, 0, 0, 48, 349, 1, 0, 0, 0, 50, 365, 1, 0, 0, 0, 52, 368, 1, 0, 0, 0, 54, 376, 1, 0, 0, 0, 56, 384, 1, 0, 0, 0, 58, 389, 1, 0, 0, 0, 60, 397, 1, 0, 0, 0, 62, 405, 1, 0, 0, 0, 64, 413, 1, 0, 0, 0, 66, 418, 1, 0, 0, 0, 68, 462, 1, 0, 0, 0, 70, 466, 1, 0, 0, 0, 72, 471, 1, 0, 0, 0, 74, 473, 1, 0, 0, 0, 76, 476, 1, 0, 0, 0, 78, 485, 1, 0, 0, 0, 80, 493, 1, 0, 0, 0, 82, 496, 1, 0, 0, 0, 84, 499, 1, 0, 0, 0, 86, 508, 1, 0, 0, 0, 88, 512, 1, 0, 0, 0, 90, 518, 1, 0, 0, 0, 92, 522, 1, 0, 0, 0, 94, 525, 1, 0, 0, 0, 96, 533, 1, 0, 0, 0, 98, 537, 1, 0, 0, 0, 100, 541, 1, 0, 0, 0, 102, 544, 1, 0, 0, 0, 104, 549, 1, 0, 0, 0, 106, 553, 1, 0, 0, 0, 108, 555, 1, 0, 0, 0, 110, 557, 1, 0, 0, 0, 112, 560, 1, 0, 0, 0, 114, 564, 1, 0, 0, 0, 116, 567, 1, 0, 0, 0, 118, 587, 1, 0, 0, 0, 120, 591, 1, 0, 0, 0, 122, 596, 1, 0, 0, 0, 124, 125, 3, 2, 1, 0, 125, 126, 5, 0, 0, 1, 126, 1, 1, 0, 0, 0, 127, 128, 6, 1, -1, 0, 128, 129, 3, 4, 2, 0, 129, 135, 1, 0, 0, 0, 130, 131, 10, 1, 0, 0, 131, 132, 5, 25, 0, 0, 132, 134, 3, 6, 3, 0, 133, 130, 1, 0, 0, 0, 134, 137, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 3, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 138, 145, 3, 110, 55, 0, 139, 145, 3, 34, 17, 0, 140, 145, 3, 28, 14, 0, 141, 145, 3, 114, 57, 0, 142, 143, 4, 2, 1, 0, 143, 145, 3, 48, 24, 0, 144, 138, 1, 0, 0, 0, 144, 139, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 141, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 145, 5, 1, 0, 0, 0, 146, 163, 3, 50, 25, 0, 147, 163, 3, 8, 4, 0, 148, 163, 3, 80, 40, 0, 149, 163, 3, 74, 37, 0, 150, 163, 3, 52, 26, 0, 151, 163, 3, 76, 38, 0, 152, 163, 3, 82, 41, 0, 153, 163, 3, 84, 42, 0, 154, 163, 3, 88, 44, 0, 155, 163, 3, 90, 45, 0, 156, 163, 3, 116, 58, 0, 157, 163, 3, 92, 46, 0, 158, 159, 4, 3, 2, 0, 159, 163, 3, 122, 61, 0, 160, 161, 4, 3, 3, 0, 161, 163, 3, 120, 60, 0, 162, 146, 1, 0, 0, 0, 162, 147, 1, 0, 0, 0, 162, 148, 1, 0, 0, 0, 162, 149, 1, 0, 0, 0, 162, 150, 1, 0, 0, 0, 162, 151, 1, 0, 0, 0, 162, 152, 1, 0, 0, 0, 162, 153, 1, 0, 0, 0, 162, 154, 1, 0, 0, 0, 162, 155, 1, 0, 0, 0, 162, 156, 1, 0, 0, 0, 162, 157, 1, 0, 0, 0, 162, 158, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 7, 1, 0, 0, 0, 164, 165, 5, 16, 0, 0, 165, 166, 3, 10, 5, 0, 166, 9, 1, 0, 0, 0, 167, 168, 6, 5, -1, 0, 168, 169, 5, 44, 0, 0, 169, 198, 3, 10, 5, 8, 170, 198, 3, 16, 8, 0, 171, 198, 3, 12, 6, 0, 172, 174, 3, 16, 8, 0, 173, 175, 5, 44, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 39, 0, 0, 177, 178, 5, 43, 0, 0, 178, 183, 3, 16, 8, 0, 179, 180, 5, 34, 0, 0, 180, 182, 3, 16, 8, 0, 181, 179, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 187, 5, 50, 0, 0, 187, 198, 1, 0, 0, 0, 188, 189, 3, 16, 8, 0, 189, 191, 5, 40, 0, 0, 190, 192, 5, 44, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 45, 0, 0, 194, 198, 1, 0, 0, 0, 195, 196, 4, 5, 4, 0, 196, 198, 3, 14, 7, 0, 197, 167, 1, 0, 0, 0, 197, 170, 1, 0, 0, 0, 197, 171, 1, 0, 0, 0, 197, 172, 1, 0, 0, 0, 197, 188, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 207, 1, 0, 0, 0, 199, 200, 10, 5, 0, 0, 200, 201, 5, 30, 0, 0, 201, 206, 3, 10, 5, 6, 202, 203, 10, 4, 0, 0, 203, 204, 5, 47, 0, 0, 204, 206, 3, 10, 5, 5, 205, 199, 1, 0, 0, 0, 205, 202, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 11, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 212, 3, 16, 8, 0, 211, 213, 5, 44, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 42, 0, 0, 215, 216, 3, 106, 53, 0, 216, 225, 1, 0, 0, 0, 217, 219, 3, 16, 8, 0, 218, 220, 5, 44, 0, 0, 219, 218, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 49, 0, 0, 222, 223, 3, 106, 53, 0, 223, 225, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 225, 13, 1, 0, 0, 0, 226, 227, 3, 58, 29, 0, 227, 228, 5, 24, 0, 0, 228, 229, 3, 68, 34, 0, 229, 15, 1, 0, 0, 0, 230, 236, 3, 18, 9, 0, 231, 232, 3, 18, 9, 0, 232, 233, 3, 108, 54, 0, 233, 234, 3, 18, 9, 0, 234, 236, 1, 0, 0, 0, 235, 230, 1, 0, 0, 0, 235, 231, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 238, 6, 9, -1, 0, 238, 242, 3, 20, 10, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 3, 241, 237, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 251, 1, 0, 0, 0, 243, 244, 10, 2, 0, 0, 244, 245, 7, 1, 0, 0, 245, 250, 3, 18, 9, 3, 246, 247, 10, 1, 0, 0, 247, 248, 7, 0, 0, 0, 248, 250, 3, 18, 9, 2, 249, 243, 1, 0, 0, 0, 249, 246, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 19, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 255, 6, 10, -1, 0, 255, 263, 3, 68, 34, 0, 256, 263, 3, 58, 29, 0, 257, 263, 3, 22, 11, 0, 258, 259, 5, 43, 0, 0, 259, 260, 3, 10, 5, 0, 260, 261, 5, 50, 0, 0, 261, 263, 1, 0, 0, 0, 262, 254, 1, 0, 0, 0, 262, 256, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 269, 1, 0, 0, 0, 264, 265, 10, 1, 0, 0, 265, 266, 5, 33, 0, 0, 266, 268, 3, 26, 13, 0, 267, 264, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 21, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 3, 24, 12, 0, 273, 283, 5, 43, 0, 0, 274, 284, 5, 61, 0, 0, 275, 280, 3, 10, 5, 0, 276, 277, 5, 34, 0, 0, 277, 279, 3, 10, 5, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 274, 1, 0, 0, 0, 283, 275, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 5, 50, 0, 0, 286, 23, 1, 0, 0, 0, 287, 288, 3, 72, 36, 0, 288, 25, 1, 0, 0, 0, 289, 290, 3, 64, 32, 0, 290, 27, 1, 0, 0, 0, 291, 292, 5, 12, 0, 0, 292, 293, 3, 30, 15, 0, 293, 29, 1, 0, 0, 0, 294, 299, 3, 32, 16, 0, 295, 296, 5, 34, 0, 0, 296, 298, 3, 32, 16, 0, 297, 295, 1, 0, 0, 0, 298, 301, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 31, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 302, 303, 3, 58, 29, 0, 303, 304, 5, 32, 0, 0, 304, 306, 1, 0, 0, 0, 305, 302, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 307, 1, 0, 0, 0, 307, 308, 3, 10, 5, 0, 308, 33, 1, 0, 0, 0, 309, 310, 5, 6, 0, 0, 310, 315, 3, 36, 18, 0, 311, 312, 5, 34, 0, 0, 312, 314, 3, 36, 18, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 320, 3, 42, 21, 0, 319, 318, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 35, 1, 0, 0, 0, 321, 322, 3, 38, 19, 0, 322, 323, 5, 24, 0, 0, 323, 325, 1, 0, 0, 0, 324, 321, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 327, 3, 40, 20, 0, 327, 37, 1, 0, 0, 0, 328, 329, 5, 76, 0, 0, 329, 39, 1, 0, 0, 0, 330, 331, 7, 2, 0, 0, 331, 41, 1, 0, 0, 0, 332, 335, 3, 44, 22, 0, 333, 335, 3, 46, 23, 0, 334, 332, 1, 0, 0, 0, 334, 333, 1, 0, 0, 0, 335, 43, 1, 0, 0, 0, 336, 337, 5, 75, 0, 0, 337, 342, 5, 76, 0, 0, 338, 339, 5, 34, 0, 0, 339, 341, 5, 76, 0, 0, 340, 338, 1, 0, 0, 0, 341, 344, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 45, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 345, 346, 5, 65, 0, 0, 346, 347, 3, 44, 22, 0, 347, 348, 5, 66, 0, 0, 348, 47, 1, 0, 0, 0, 349, 350, 5, 19, 0, 0, 350, 355, 3, 36, 18, 0, 351, 352, 5, 34, 0, 0, 352, 354, 3, 36, 18, 0, 353, 351, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, 356, 359, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 358, 360, 3, 54, 27, 0, 359, 358, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 363, 1, 0, 0, 0, 361, 362, 5, 29, 0, 0, 362, 364, 3, 30, 15, 0, 363, 361, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 49, 1, 0, 0, 0, 365, 366, 5, 4, 0, 0, 366, 367, 3, 30, 15, 0, 367, 51, 1, 0, 0, 0, 368, 370, 5, 15, 0, 0, 369, 371, 3, 54, 27, 0, 370, 369, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 374, 1, 0, 0, 0, 372, 373, 5, 29, 0, 0, 373, 375, 3, 30, 15, 0, 374, 372, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 53, 1, 0, 0, 0, 376, 381, 3, 56, 28, 0, 377, 378, 5, 34, 0, 0, 378, 380, 3, 56, 28, 0, 379, 377, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 381, 382, 1, 0, 0, 0, 382, 55, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 384, 387, 3, 32, 16, 0, 385, 386, 5, 16, 0, 0, 386, 388, 3, 10, 5, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 57, 1, 0, 0, 0, 389, 394, 3, 72, 36, 0, 390, 391, 5, 36, 0, 0, 391, 393, 3, 72, 36, 0, 392, 390, 1, 0, 0, 0, 393, 396, 1, 0, 0, 0, 394, 392, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 59, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 397, 402, 3, 66, 33, 0, 398, 399, 5, 36, 0, 0, 399, 401, 3, 66, 33, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 61, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 410, 3, 60, 30, 0, 406, 407, 5, 34, 0, 0, 407, 409, 3, 60, 30, 0, 408, 406, 1, 0, 0, 0, 409, 412, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 63, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 413, 414, 7, 3, 0, 0, 414, 65, 1, 0, 0, 0, 415, 419, 5, 80, 0, 0, 416, 417, 4, 33, 10, 0, 417, 419, 3, 70, 35, 0, 418, 415, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 419, 67, 1, 0, 0, 0, 420, 463, 5, 45, 0, 0, 421, 422, 3, 104, 52, 0, 422, 423, 5, 67, 0, 0, 423, 463, 1, 0, 0, 0, 424, 463, 3, 102, 51, 0, 425, 463, 3, 104, 52, 0, 426, 463, 3, 98, 49, 0, 427, 463, 3, 70, 35, 0, 428, 463, 3, 106, 53, 0, 429, 430, 5, 65, 0, 0, 430, 435, 3, 100, 50, 0, 431, 432, 5, 34, 0, 0, 432, 434, 3, 100, 50, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 438, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 66, 0, 0, 439, 463, 1, 0, 0, 0, 440, 441, 5, 65, 0, 0, 441, 446, 3, 98, 49, 0, 442, 443, 5, 34, 0, 0, 443, 445, 3, 98, 49, 0, 444, 442, 1, 0, 0, 0, 445, 448, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 449, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 449, 450, 5, 66, 0, 0, 450, 463, 1, 0, 0, 0, 451, 452, 5, 65, 0, 0, 452, 457, 3, 106, 53, 0, 453, 454, 5, 34, 0, 0, 454, 456, 3, 106, 53, 0, 455, 453, 1, 0, 0, 0, 456, 459, 1, 0, 0, 0, 457, 455, 1, 0, 0, 0, 457, 458, 1, 0, 0, 0, 458, 460, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 460, 461, 5, 66, 0, 0, 461, 463, 1, 0, 0, 0, 462, 420, 1, 0, 0, 0, 462, 421, 1, 0, 0, 0, 462, 424, 1, 0, 0, 0, 462, 425, 1, 0, 0, 0, 462, 426, 1, 0, 0, 0, 462, 427, 1, 0, 0, 0, 462, 428, 1, 0, 0, 0, 462, 429, 1, 0, 0, 0, 462, 440, 1, 0, 0, 0, 462, 451, 1, 0, 0, 0, 463, 69, 1, 0, 0, 0, 464, 467, 5, 48, 0, 0, 465, 467, 5, 64, 0, 0, 466, 464, 1, 0, 0, 0, 466, 465, 1, 0, 0, 0, 467, 71, 1, 0, 0, 0, 468, 472, 3, 64, 32, 0, 469, 470, 4, 36, 11, 0, 470, 472, 3, 70, 35, 0, 471, 468, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 472, 73, 1, 0, 0, 0, 473, 474, 5, 9, 0, 0, 474, 475, 5, 27, 0, 0, 475, 75, 1, 0, 0, 0, 476, 477, 5, 14, 0, 0, 477, 482, 3, 78, 39, 0, 478, 479, 5, 34, 0, 0, 479, 481, 3, 78, 39, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 77, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 487, 3, 10, 5, 0, 486, 488, 7, 4, 0, 0, 487, 486, 1, 0, 0, 0, 487, 488, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 490, 5, 46, 0, 0, 490, 492, 7, 5, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 79, 1, 0, 0, 0, 493, 494, 5, 8, 0, 0, 494, 495, 3, 62, 31, 0, 495, 81, 1, 0, 0, 0, 496, 497, 5, 2, 0, 0, 497, 498, 3, 62, 31, 0, 498, 83, 1, 0, 0, 0, 499, 500, 5, 11, 0, 0, 500, 505, 3, 86, 43, 0, 501, 502, 5, 34, 0, 0, 502, 504, 3, 86, 43, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 85, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 509, 3, 60, 30, 0, 509, 510, 5, 84, 0, 0, 510, 511, 3, 60, 30, 0, 511, 87, 1, 0, 0, 0, 512, 513, 5, 1, 0, 0, 513, 514, 3, 20, 10, 0, 514, 516, 3, 106, 53, 0, 515, 517, 3, 94, 47, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 89, 1, 0, 0, 0, 518, 519, 5, 7, 0, 0, 519, 520, 3, 20, 10, 0, 520, 521, 3, 106, 53, 0, 521, 91, 1, 0, 0, 0, 522, 523, 5, 10, 0, 0, 523, 524, 3, 58, 29, 0, 524, 93, 1, 0, 0, 0, 525, 530, 3, 96, 48, 0, 526, 527, 5, 34, 0, 0, 527, 529, 3, 96, 48, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 95, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 64, 32, 0, 534, 535, 5, 32, 0, 0, 535, 536, 3, 68, 34, 0, 536, 97, 1, 0, 0, 0, 537, 538, 7, 6, 0, 0, 538, 99, 1, 0, 0, 0, 539, 542, 3, 102, 51, 0, 540, 542, 3, 104, 52, 0, 541, 539, 1, 0, 0, 0, 541, 540, 1, 0, 0, 0, 542, 101, 1, 0, 0, 0, 543, 545, 7, 0, 0, 0, 544, 543, 1, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 5, 28, 0, 0, 547, 103, 1, 0, 0, 0, 548, 550, 7, 0, 0, 0, 549, 548, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 5, 27, 0, 0, 552, 105, 1, 0, 0, 0, 553, 554, 5, 26, 0, 0, 554, 107, 1, 0, 0, 0, 555, 556, 7, 7, 0, 0, 556, 109, 1, 0, 0, 0, 557, 558, 5, 5, 0, 0, 558, 559, 3, 112, 56, 0, 559, 111, 1, 0, 0, 0, 560, 561, 5, 65, 0, 0, 561, 562, 3, 2, 1, 0, 562, 563, 5, 66, 0, 0, 563, 113, 1, 0, 0, 0, 564, 565, 5, 13, 0, 0, 565, 566, 5, 100, 0, 0, 566, 115, 1, 0, 0, 0, 567, 568, 5, 3, 0, 0, 568, 571, 5, 90, 0, 0, 569, 570, 5, 88, 0, 0, 570, 572, 3, 60, 30, 0, 571, 569, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 582, 1, 0, 0, 0, 573, 574, 5, 89, 0, 0, 574, 579, 3, 118, 59, 0, 575, 576, 5, 34, 0, 0, 576, 578, 3, 118, 59, 0, 577, 575, 1, 0, 0, 0, 578, 581, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 583, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 582, 573, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 117, 1, 0, 0, 0, 584, 585, 3, 60, 30, 0, 585, 586, 5, 32, 0, 0, 586, 588, 1, 0, 0, 0, 587, 584, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 3, 60, 30, 0, 590, 119, 1, 0, 0, 0, 591, 592, 5, 18, 0, 0, 592, 593, 3, 36, 18, 0, 593, 594, 5, 88, 0, 0, 594, 595, 3, 62, 31, 0, 595, 121, 1, 0, 0, 0, 596, 597, 5, 17, 0, 0, 597, 600, 3, 54, 27, 0, 598, 599, 5, 29, 0, 0, 599, 601, 3, 30, 15, 0, 600, 598, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 123, 1, 0, 0, 0, 58, 135, 144, 162, 174, 183, 191, 197, 205, 207, 212, 219, 224, 235, 241, 249, 251, 262, 269, 280, 283, 299, 305, 315, 319, 324, 334, 342, 355, 359, 363, 370, 374, 381, 387, 394, 402, 410, 418, 435, 446, 457, 462, 466, 471, 482, 487, 491, 505, 516, 530, 541, 544, 549, 571, 579, 582, 587, 600] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 9f8197830640c..e36184b1f07da 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -8,14 +8,26 @@ * 2.0. */ -import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.FailedPredicateException; +import org.antlr.v4.runtime.NoViableAltException; +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.RuntimeMetaData; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.Vocabulary; +import org.antlr.v4.runtime.VocabularyImpl; +import org.antlr.v4.runtime.atn.ATN; +import org.antlr.v4.runtime.atn.ATNDeserializer; +import org.antlr.v4.runtime.atn.ParserATNSimulator; +import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.misc.*; -import org.antlr.v4.runtime.tree.*; +import org.antlr.v4.runtime.tree.ParseTreeListener; +import org.antlr.v4.runtime.tree.ParseTreeVisitor; +import org.antlr.v4.runtime.tree.TerminalNode; + import java.util.List; -import java.util.Iterator; -import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"}) public class EsqlBaseParser extends ParserConfig { @@ -25,67 +37,67 @@ public class EsqlBaseParser extends ParserConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, - LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, PIPE=24, QUOTED_STRING=25, - INTEGER_LITERAL=26, DECIMAL_LITERAL=27, BY=28, AND=29, ASC=30, ASSIGN=31, - CAST_OP=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, IN=38, IS=39, - LAST=40, LIKE=41, LP=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, RLIKE=48, - RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, - PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, MATCH=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, - SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, - LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, - LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, - METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, - CLOSING_METRICS_WS=120; + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, + LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, COLON=24, PIPE=25, QUOTED_STRING=26, + INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, ASC=31, ASSIGN=32, + CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, IN=39, IS=40, + LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, RLIKE=49, + RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, GTE=58, + PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, + OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, + EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, + EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, + FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, + PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, + AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, + ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, + ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, + ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, + MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, + SHOW_WS=103, SETTING=104, SETTING_LINE_COMMENT=105, SETTTING_MULTILINE_COMMENT=106, + SETTING_WS=107, LOOKUP_LINE_COMMENT=108, LOOKUP_MULTILINE_COMMENT=109, + LOOKUP_WS=110, LOOKUP_FIELD_LINE_COMMENT=111, LOOKUP_FIELD_MULTILINE_COMMENT=112, + LOOKUP_FIELD_WS=113, METRICS_LINE_COMMENT=114, METRICS_MULTILINE_COMMENT=115, + METRICS_WS=116, CLOSING_METRICS_LINE_COMMENT=117, CLOSING_METRICS_MULTILINE_COMMENT=118, + CLOSING_METRICS_WS=119; public static final int - RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, - RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, - RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, - RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, - RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, - RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, - RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, - RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, - RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, - RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, - RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, - RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, - RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, - RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, - RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, - RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, - RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, - RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, - RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, + RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, + RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, + RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, + RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, + RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, + RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, + RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, + RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, + RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, + RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, + RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, + RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, + RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, + RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, + RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, + RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, + RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, + RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, + RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, RULE_inlinestatsCommand = 61; private static String[] makeRuleNames() { return new String[] { - "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", - "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", - "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", - "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", - "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", - "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", - "aggFields", "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", - "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", - "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", - "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", - "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", - "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", + "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", + "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", + "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", + "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", + "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", + "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", + "aggFields", "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", + "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", "inlinestatsCommand" }; } @@ -93,46 +105,45 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", "','", - "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", - "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", - "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", "'match'", null, null, "']'", null, null, - null, null, null, null, null, null, "'metadata'", null, null, null, null, - null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, - null, null, null, null, null, null, null, null, null, "'info'", null, - null, null, "':'" + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + "':'", "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", + "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", + "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", + "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", + "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "MATCH", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -244,7 +255,7 @@ public QueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_query; } - + @SuppressWarnings("this-escape") public QueryContext() { } public void copyFrom(QueryContext ctx) { @@ -340,7 +351,7 @@ private QueryContext query(int _p) throws RecognitionException { setState(132); processingCommand(); } - } + } } setState(137); _errHandler.sync(this); @@ -695,7 +706,7 @@ public BooleanExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_booleanExpression; } - + @SuppressWarnings("this-escape") public BooleanExpressionContext() { } public void copyFrom(BooleanExpressionContext ctx) { @@ -1039,7 +1050,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc } break; } - } + } } setState(209); _errHandler.sync(this); @@ -1156,13 +1167,14 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog @SuppressWarnings("CheckReturnValue") public static class MatchBooleanExpressionContext extends ParserRuleContext { - public StringContext queryString; - public ValueExpressionContext valueExpression() { - return getRuleContext(ValueExpressionContext.class,0); + public QualifiedNameContext fieldExp; + public ConstantContext queryString; + public TerminalNode COLON() { return getToken(EsqlBaseParser.COLON, 0); } + public QualifiedNameContext qualifiedName() { + return getRuleContext(QualifiedNameContext.class,0); } - public TerminalNode MATCH() { return getToken(EsqlBaseParser.MATCH, 0); } - public StringContext string() { - return getRuleContext(StringContext.class,0); + public ConstantContext constant() { + return getRuleContext(ConstantContext.class,0); } @SuppressWarnings("this-escape") public MatchBooleanExpressionContext(ParserRuleContext parent, int invokingState) { @@ -1191,11 +1203,11 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog enterOuterAlt(_localctx, 1); { setState(226); - valueExpression(); + ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); setState(227); - match(MATCH); + match(COLON); setState(228); - ((MatchBooleanExpressionContext)_localctx).queryString = string(); + ((MatchBooleanExpressionContext)_localctx).queryString = constant(); } } catch (RecognitionException re) { @@ -1216,7 +1228,7 @@ public ValueExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_valueExpression; } - + @SuppressWarnings("this-escape") public ValueExpressionContext() { } public void copyFrom(ValueExpressionContext ctx) { @@ -1321,7 +1333,7 @@ public OperatorExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_operatorExpression; } - + @SuppressWarnings("this-escape") public OperatorExpressionContext() { } public void copyFrom(OperatorExpressionContext ctx) { @@ -1478,7 +1490,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE setState(244); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 8070450532247928832L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1513,7 +1525,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE } break; } - } + } } setState(253); _errHandler.sync(this); @@ -1539,7 +1551,7 @@ public PrimaryExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_primaryExpression; } - + @SuppressWarnings("this-escape") public PrimaryExpressionContext() { } public void copyFrom(PrimaryExpressionContext ctx) { @@ -1737,7 +1749,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc setState(266); dataType(); } - } + } } setState(271); _errHandler.sync(this); @@ -1856,7 +1868,6 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx @SuppressWarnings("CheckReturnValue") public static class FunctionNameContext extends ParserRuleContext { - public TerminalNode MATCH() { return getToken(EsqlBaseParser.MATCH, 0); } public IdentifierOrParameterContext identifierOrParameter() { return getRuleContext(IdentifierOrParameterContext.class,0); } @@ -1884,23 +1895,10 @@ public final FunctionNameContext functionName() throws RecognitionException { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); enterRule(_localctx, 24, RULE_functionName); try { - setState(289); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(287); - match(MATCH); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(288); - identifierOrParameter(); - } - break; + enterOuterAlt(_localctx, 1); + { + setState(287); + identifierOrParameter(); } } catch (RecognitionException re) { @@ -1921,7 +1919,7 @@ public DataTypeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_dataType; } - + @SuppressWarnings("this-escape") public DataTypeContext() { } public void copyFrom(DataTypeContext ctx) { @@ -1957,7 +1955,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(291); + setState(289); identifier(); } } @@ -2004,9 +2002,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(293); + setState(291); match(ROW); - setState(294); + setState(292); fields(); } } @@ -2060,25 +2058,25 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(296); + setState(294); field(); - setState(301); + setState(299); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,21,_ctx); + _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(297); + setState(295); match(COMMA); - setState(298); + setState(296); field(); } - } + } } - setState(303); + setState(301); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,21,_ctx); + _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } } } @@ -2128,19 +2126,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(307); + setState(305); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: { - setState(304); + setState(302); qualifiedName(); - setState(305); + setState(303); match(ASSIGN); } break; } - setState(309); + setState(307); booleanExpression(0); } } @@ -2198,34 +2196,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(311); + setState(309); match(FROM); - setState(312); + setState(310); indexPattern(); - setState(317); + setState(315); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,23,_ctx); + _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(313); + setState(311); match(COMMA); - setState(314); + setState(312); indexPattern(); } - } + } } - setState(319); + setState(317); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,23,_ctx); + _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(321); + setState(319); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(320); + setState(318); metadata(); } break; @@ -2278,19 +2276,19 @@ public final IndexPatternContext indexPattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(326); + setState(324); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(323); + setState(321); clusterString(); - setState(324); + setState(322); match(COLON); } break; } - setState(328); + setState(326); indexString(); } } @@ -2334,7 +2332,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(330); + setState(328); match(UNQUOTED_SOURCE); } } @@ -2380,7 +2378,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(332); + setState(330); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2435,20 +2433,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 42, RULE_metadata); try { - setState(336); + setState(334); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(334); + setState(332); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(335); + setState(333); deprecated_metadata(); } break; @@ -2505,27 +2503,27 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(338); + setState(336); match(METADATA); - setState(339); + setState(337); match(UNQUOTED_SOURCE); - setState(344); + setState(342); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(340); + setState(338); match(COMMA); - setState(341); + setState(339); match(UNQUOTED_SOURCE); } - } + } } - setState(346); + setState(344); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } } } @@ -2572,11 +2570,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(347); + setState(345); match(OPENING_BRACKET); - setState(348); + setState(346); metadataOption(); - setState(349); + setState(347); match(CLOSING_BRACKET); } } @@ -2640,46 +2638,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(351); + setState(349); match(DEV_METRICS); - setState(352); + setState(350); indexPattern(); - setState(357); + setState(355); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(353); + setState(351); match(COMMA); - setState(354); + setState(352); indexPattern(); } - } + } } - setState(359); + setState(357); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(361); + setState(359); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(360); + setState(358); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(365); + setState(363); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(363); + setState(361); match(BY); - setState(364); + setState(362); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2729,9 +2727,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(367); + setState(365); match(EVAL); - setState(368); + setState(366); fields(); } } @@ -2784,26 +2782,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(370); + setState(368); match(STATS); - setState(372); + setState(370); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(371); + setState(369); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(376); + setState(374); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(374); + setState(372); match(BY); - setState(375); + setState(373); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2860,25 +2858,25 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(378); + setState(376); aggField(); - setState(383); + setState(381); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,32,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(379); + setState(377); match(COMMA); - setState(380); + setState(378); aggField(); } - } + } } - setState(385); + setState(383); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,32,_ctx); } } } @@ -2928,16 +2926,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(386); + setState(384); field(); - setState(389); + setState(387); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(387); + setState(385); match(WHERE); - setState(388); + setState(386); booleanExpression(0); } break; @@ -2994,25 +2992,25 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(391); + setState(389); identifierOrParameter(); - setState(396); + setState(394); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(392); + setState(390); match(DOT); - setState(393); + setState(391); identifierOrParameter(); } - } + } } - setState(398); + setState(396); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } } } @@ -3066,25 +3064,25 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(399); + setState(397); identifierPattern(); - setState(404); + setState(402); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(400); + setState(398); match(DOT); - setState(401); + setState(399); identifierPattern(); } - } + } } - setState(406); + setState(404); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } } } @@ -3138,25 +3136,25 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(407); + setState(405); qualifiedNamePattern(); - setState(412); + setState(410); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(408); + setState(406); match(COMMA); - setState(409); + setState(407); qualifiedNamePattern(); } - } + } } - setState(414); + setState(412); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } } } @@ -3202,7 +3200,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(415); + setState(413); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3255,22 +3253,22 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(420); + setState(418); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(417); + setState(415); match(ID_PATTERN); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(418); + setState(416); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(419); + setState(417); parameter(); } break; @@ -3294,7 +3292,7 @@ public ConstantContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_constant; } - + @SuppressWarnings("this-escape") public ConstantContext() { } public void copyFrom(ConstantContext ctx) { @@ -3543,14 +3541,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(464); + setState(462); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(422); + setState(420); match(NULL); } break; @@ -3558,9 +3556,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(423); + setState(421); integerValue(); - setState(424); + setState(422); match(UNQUOTED_IDENTIFIER); } break; @@ -3568,7 +3566,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(426); + setState(424); decimalValue(); } break; @@ -3576,7 +3574,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(427); + setState(425); integerValue(); } break; @@ -3584,7 +3582,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(428); + setState(426); booleanValue(); } break; @@ -3592,7 +3590,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(429); + setState(427); parameter(); } break; @@ -3600,7 +3598,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(430); + setState(428); string(); } break; @@ -3608,27 +3606,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(431); + setState(429); match(OPENING_BRACKET); - setState(432); + setState(430); numericValue(); - setState(437); + setState(435); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(433); + setState(431); match(COMMA); - setState(434); + setState(432); numericValue(); } } - setState(439); + setState(437); _errHandler.sync(this); _la = _input.LA(1); } - setState(440); + setState(438); match(CLOSING_BRACKET); } break; @@ -3636,27 +3634,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(442); + setState(440); match(OPENING_BRACKET); - setState(443); + setState(441); booleanValue(); - setState(448); + setState(446); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(444); + setState(442); match(COMMA); - setState(445); + setState(443); booleanValue(); } } - setState(450); + setState(448); _errHandler.sync(this); _la = _input.LA(1); } - setState(451); + setState(449); match(CLOSING_BRACKET); } break; @@ -3664,27 +3662,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(453); + setState(451); match(OPENING_BRACKET); - setState(454); + setState(452); string(); - setState(459); + setState(457); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(455); + setState(453); match(COMMA); - setState(456); + setState(454); string(); } } - setState(461); + setState(459); _errHandler.sync(this); _la = _input.LA(1); } - setState(462); + setState(460); match(CLOSING_BRACKET); } break; @@ -3708,7 +3706,7 @@ public ParameterContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_parameter; } - + @SuppressWarnings("this-escape") public ParameterContext() { } public void copyFrom(ParameterContext ctx) { @@ -3758,14 +3756,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(468); + setState(466); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(466); + setState(464); match(PARAM); } break; @@ -3773,7 +3771,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(467); + setState(465); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3824,22 +3822,22 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(473); + setState(471); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(470); + setState(468); identifier(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(471); + setState(469); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(472); + setState(470); parameter(); } break; @@ -3886,9 +3884,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(475); + setState(473); match(LIMIT); - setState(476); + setState(474); match(INTEGER_LITERAL); } } @@ -3943,27 +3941,27 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(478); + setState(476); match(SORT); - setState(479); + setState(477); orderExpression(); - setState(484); + setState(482); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,45,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(480); + setState(478); match(COMMA); - setState(481); + setState(479); orderExpression(); } - } + } } - setState(486); + setState(484); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,45,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } } } @@ -4017,14 +4015,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(487); + setState(485); booleanExpression(0); - setState(489); + setState(487); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(488); + setState(486); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4038,14 +4036,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(493); + setState(491); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(491); + setState(489); match(NULLS); - setState(492); + setState(490); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4104,9 +4102,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(495); + setState(493); match(KEEP); - setState(496); + setState(494); qualifiedNamePatterns(); } } @@ -4153,9 +4151,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(498); + setState(496); match(DROP); - setState(499); + setState(497); qualifiedNamePatterns(); } } @@ -4210,27 +4208,27 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(501); + setState(499); match(RENAME); - setState(502); + setState(500); renameClause(); - setState(507); + setState(505); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,48,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(503); + setState(501); match(COMMA); - setState(504); + setState(502); renameClause(); } - } + } } - setState(509); + setState(507); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,48,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } } } @@ -4282,11 +4280,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(510); + setState(508); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(511); + setState(509); match(AS); - setState(512); + setState(510); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4339,18 +4337,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(514); + setState(512); match(DISSECT); - setState(515); + setState(513); primaryExpression(0); - setState(516); + setState(514); string(); - setState(518); + setState(516); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(517); + setState(515); commandOptions(); } break; @@ -4403,11 +4401,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(520); + setState(518); match(GROK); - setState(521); + setState(519); primaryExpression(0); - setState(522); + setState(520); string(); } } @@ -4454,9 +4452,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(524); + setState(522); match(MV_EXPAND); - setState(525); + setState(523); qualifiedName(); } } @@ -4510,25 +4508,25 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(527); + setState(525); commandOption(); - setState(532); + setState(530); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,50,_ctx); + _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(528); + setState(526); match(COMMA); - setState(529); + setState(527); commandOption(); } - } + } } - setState(534); + setState(532); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,50,_ctx); + _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } } } @@ -4578,11 +4576,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(535); + setState(533); identifier(); - setState(536); + setState(534); match(ASSIGN); - setState(537); + setState(535); constant(); } } @@ -4628,7 +4626,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(539); + setState(537); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4683,20 +4681,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(543); + setState(541); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(541); + setState(539); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(542); + setState(540); integerValue(); } break; @@ -4745,12 +4743,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(546); + setState(544); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(545); + setState(543); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4763,7 +4761,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(548); + setState(546); match(DECIMAL_LITERAL); } } @@ -4810,12 +4808,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(551); + setState(549); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(550); + setState(548); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4828,7 +4826,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(553); + setState(551); match(INTEGER_LITERAL); } } @@ -4872,7 +4870,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(555); + setState(553); match(QUOTED_STRING); } } @@ -4922,9 +4920,9 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(557); + setState(555); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 281474976710656000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 562949953421312000L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4977,9 +4975,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(559); + setState(557); match(EXPLAIN); - setState(560); + setState(558); subqueryExpression(); } } @@ -5027,11 +5025,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(562); + setState(560); match(OPENING_BRACKET); - setState(563); + setState(561); query(0); - setState(564); + setState(562); match(CLOSING_BRACKET); } } @@ -5053,7 +5051,7 @@ public ShowCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_showCommand; } - + @SuppressWarnings("this-escape") public ShowCommandContext() { } public void copyFrom(ShowCommandContext ctx) { @@ -5088,9 +5086,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(566); + setState(564); match(SHOW); - setState(567); + setState(565); match(INFO); } } @@ -5153,48 +5151,48 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(569); + setState(567); match(ENRICH); - setState(570); + setState(568); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(573); + setState(571); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(571); + setState(569); match(ON); - setState(572); + setState(570); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(584); + setState(582); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(575); + setState(573); match(WITH); - setState(576); + setState(574); enrichWithClause(); - setState(581); + setState(579); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,55,_ctx); + _alt = getInterpreter().adaptivePredict(_input,54,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(577); + setState(575); match(COMMA); - setState(578); + setState(576); enrichWithClause(); } - } + } } - setState(583); + setState(581); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,55,_ctx); + _alt = getInterpreter().adaptivePredict(_input,54,_ctx); } } break; @@ -5249,19 +5247,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(589); + setState(587); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(586); + setState(584); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(587); + setState(585); match(ASSIGN); } break; } - setState(591); + setState(589); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5314,13 +5312,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(593); + setState(591); match(DEV_LOOKUP); - setState(594); + setState(592); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(595); + setState(593); match(ON); - setState(596); + setState(594); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5373,18 +5371,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(598); + setState(596); match(DEV_INLINESTATS); - setState(599); + setState(597); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(602); + setState(600); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(600); + setState(598); match(BY); - setState(601); + setState(599); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5489,7 +5487,7 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca } public static final String _serializedATN = - "\u0004\u0001x\u025d\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001w\u025b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -5530,347 +5528,346 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca "\n\u010c\b\n\n\n\f\n\u010f\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0117\b\u000b\n\u000b\f\u000b"+ "\u011a\t\u000b\u0003\u000b\u011c\b\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\f\u0001\f\u0003\f\u0122\b\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u012c\b\u000f\n"+ - "\u000f\f\u000f\u012f\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003"+ - "\u0010\u0134\b\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0005\u0011\u013c\b\u0011\n\u0011\f\u0011\u013f\t\u0011"+ - "\u0001\u0011\u0003\u0011\u0142\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0003\u0012\u0147\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ - "\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u0151\b\u0015"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0157\b\u0016"+ - "\n\u0016\f\u0016\u015a\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u0164"+ - "\b\u0018\n\u0018\f\u0018\u0167\t\u0018\u0001\u0018\u0003\u0018\u016a\b"+ - "\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u016e\b\u0018\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u0175\b\u001a\u0001"+ - "\u001a\u0001\u001a\u0003\u001a\u0179\b\u001a\u0001\u001b\u0001\u001b\u0001"+ - "\u001b\u0005\u001b\u017e\b\u001b\n\u001b\f\u001b\u0181\t\u001b\u0001\u001c"+ - "\u0001\u001c\u0001\u001c\u0003\u001c\u0186\b\u001c\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0005\u001d\u018b\b\u001d\n\u001d\f\u001d\u018e\t\u001d\u0001"+ - "\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0193\b\u001e\n\u001e\f\u001e"+ - "\u0196\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u019b\b"+ - "\u001f\n\u001f\f\u001f\u019e\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001"+ - "!\u0003!\u01a5\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b4\b\"\n"+ - "\"\f\"\u01b7\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ - "\"\u01bf\b\"\n\"\f\"\u01c2\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ - "\u0001\"\u0005\"\u01ca\b\"\n\"\f\"\u01cd\t\"\u0001\"\u0001\"\u0003\"\u01d1"+ - "\b\"\u0001#\u0001#\u0003#\u01d5\b#\u0001$\u0001$\u0001$\u0003$\u01da\b"+ - "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01e3\b&\n&"+ - "\f&\u01e6\t&\u0001\'\u0001\'\u0003\'\u01ea\b\'\u0001\'\u0001\'\u0003\'"+ - "\u01ee\b\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ - "*\u0001*\u0005*\u01fa\b*\n*\f*\u01fd\t*\u0001+\u0001+\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0003,\u0207\b,\u0001-\u0001-\u0001-\u0001-\u0001"+ - ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0005/\u0213\b/\n/\f/\u0216\t/\u0001"+ - "0\u00010\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u0220\b2\u0001"+ - "3\u00033\u0223\b3\u00013\u00013\u00014\u00034\u0228\b4\u00014\u00014\u0001"+ - "5\u00015\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u0001"+ - "8\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u023e\b:\u0001"+ - ":\u0001:\u0001:\u0001:\u0005:\u0244\b:\n:\f:\u0247\t:\u0003:\u0249\b:"+ - "\u0001;\u0001;\u0001;\u0003;\u024e\b;\u0001;\u0001;\u0001<\u0001<\u0001"+ - "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u025b\b=\u0001=\u0000"+ - "\u0004\u0002\n\u0012\u0014>\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ - "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ - "TVXZ\\^`bdfhjlnprtvxz\u0000\b\u0001\u0000:;\u0001\u0000<>\u0002\u0000"+ - "\u0019\u0019LL\u0001\u0000CD\u0002\u0000\u001e\u001e\"\"\u0002\u0000%"+ - "%((\u0002\u0000$$22\u0002\u00003359\u0277\u0000|\u0001\u0000\u0000\u0000"+ - "\u0002\u007f\u0001\u0000\u0000\u0000\u0004\u0090\u0001\u0000\u0000\u0000"+ - "\u0006\u00a2\u0001\u0000\u0000\u0000\b\u00a4\u0001\u0000\u0000\u0000\n"+ - "\u00c5\u0001\u0000\u0000\u0000\f\u00e0\u0001\u0000\u0000\u0000\u000e\u00e2"+ - "\u0001\u0000\u0000\u0000\u0010\u00eb\u0001\u0000\u0000\u0000\u0012\u00f1"+ - "\u0001\u0000\u0000\u0000\u0014\u0106\u0001\u0000\u0000\u0000\u0016\u0110"+ - "\u0001\u0000\u0000\u0000\u0018\u0121\u0001\u0000\u0000\u0000\u001a\u0123"+ - "\u0001\u0000\u0000\u0000\u001c\u0125\u0001\u0000\u0000\u0000\u001e\u0128"+ - "\u0001\u0000\u0000\u0000 \u0133\u0001\u0000\u0000\u0000\"\u0137\u0001"+ - "\u0000\u0000\u0000$\u0146\u0001\u0000\u0000\u0000&\u014a\u0001\u0000\u0000"+ - "\u0000(\u014c\u0001\u0000\u0000\u0000*\u0150\u0001\u0000\u0000\u0000,"+ - "\u0152\u0001\u0000\u0000\u0000.\u015b\u0001\u0000\u0000\u00000\u015f\u0001"+ - "\u0000\u0000\u00002\u016f\u0001\u0000\u0000\u00004\u0172\u0001\u0000\u0000"+ - "\u00006\u017a\u0001\u0000\u0000\u00008\u0182\u0001\u0000\u0000\u0000:"+ - "\u0187\u0001\u0000\u0000\u0000<\u018f\u0001\u0000\u0000\u0000>\u0197\u0001"+ - "\u0000\u0000\u0000@\u019f\u0001\u0000\u0000\u0000B\u01a4\u0001\u0000\u0000"+ - "\u0000D\u01d0\u0001\u0000\u0000\u0000F\u01d4\u0001\u0000\u0000\u0000H"+ - "\u01d9\u0001\u0000\u0000\u0000J\u01db\u0001\u0000\u0000\u0000L\u01de\u0001"+ - "\u0000\u0000\u0000N\u01e7\u0001\u0000\u0000\u0000P\u01ef\u0001\u0000\u0000"+ - "\u0000R\u01f2\u0001\u0000\u0000\u0000T\u01f5\u0001\u0000\u0000\u0000V"+ - "\u01fe\u0001\u0000\u0000\u0000X\u0202\u0001\u0000\u0000\u0000Z\u0208\u0001"+ - "\u0000\u0000\u0000\\\u020c\u0001\u0000\u0000\u0000^\u020f\u0001\u0000"+ - "\u0000\u0000`\u0217\u0001\u0000\u0000\u0000b\u021b\u0001\u0000\u0000\u0000"+ - "d\u021f\u0001\u0000\u0000\u0000f\u0222\u0001\u0000\u0000\u0000h\u0227"+ - "\u0001\u0000\u0000\u0000j\u022b\u0001\u0000\u0000\u0000l\u022d\u0001\u0000"+ - "\u0000\u0000n\u022f\u0001\u0000\u0000\u0000p\u0232\u0001\u0000\u0000\u0000"+ - "r\u0236\u0001\u0000\u0000\u0000t\u0239\u0001\u0000\u0000\u0000v\u024d"+ - "\u0001\u0000\u0000\u0000x\u0251\u0001\u0000\u0000\u0000z\u0256\u0001\u0000"+ - "\u0000\u0000|}\u0003\u0002\u0001\u0000}~\u0005\u0000\u0000\u0001~\u0001"+ - "\u0001\u0000\u0000\u0000\u007f\u0080\u0006\u0001\uffff\uffff\u0000\u0080"+ - "\u0081\u0003\u0004\u0002\u0000\u0081\u0087\u0001\u0000\u0000\u0000\u0082"+ - "\u0083\n\u0001\u0000\u0000\u0083\u0084\u0005\u0018\u0000\u0000\u0084\u0086"+ - "\u0003\u0006\u0003\u0000\u0085\u0082\u0001\u0000\u0000\u0000\u0086\u0089"+ - "\u0001\u0000\u0000\u0000\u0087\u0085\u0001\u0000\u0000\u0000\u0087\u0088"+ - "\u0001\u0000\u0000\u0000\u0088\u0003\u0001\u0000\u0000\u0000\u0089\u0087"+ - "\u0001\u0000\u0000\u0000\u008a\u0091\u0003n7\u0000\u008b\u0091\u0003\""+ - "\u0011\u0000\u008c\u0091\u0003\u001c\u000e\u0000\u008d\u0091\u0003r9\u0000"+ - "\u008e\u008f\u0004\u0002\u0001\u0000\u008f\u0091\u00030\u0018\u0000\u0090"+ - "\u008a\u0001\u0000\u0000\u0000\u0090\u008b\u0001\u0000\u0000\u0000\u0090"+ - "\u008c\u0001\u0000\u0000\u0000\u0090\u008d\u0001\u0000\u0000\u0000\u0090"+ - "\u008e\u0001\u0000\u0000\u0000\u0091\u0005\u0001\u0000\u0000\u0000\u0092"+ - "\u00a3\u00032\u0019\u0000\u0093\u00a3\u0003\b\u0004\u0000\u0094\u00a3"+ - "\u0003P(\u0000\u0095\u00a3\u0003J%\u0000\u0096\u00a3\u00034\u001a\u0000"+ - "\u0097\u00a3\u0003L&\u0000\u0098\u00a3\u0003R)\u0000\u0099\u00a3\u0003"+ - "T*\u0000\u009a\u00a3\u0003X,\u0000\u009b\u00a3\u0003Z-\u0000\u009c\u00a3"+ - "\u0003t:\u0000\u009d\u00a3\u0003\\.\u0000\u009e\u009f\u0004\u0003\u0002"+ - "\u0000\u009f\u00a3\u0003z=\u0000\u00a0\u00a1\u0004\u0003\u0003\u0000\u00a1"+ - "\u00a3\u0003x<\u0000\u00a2\u0092\u0001\u0000\u0000\u0000\u00a2\u0093\u0001"+ - "\u0000\u0000\u0000\u00a2\u0094\u0001\u0000\u0000\u0000\u00a2\u0095\u0001"+ - "\u0000\u0000\u0000\u00a2\u0096\u0001\u0000\u0000\u0000\u00a2\u0097\u0001"+ - "\u0000\u0000\u0000\u00a2\u0098\u0001\u0000\u0000\u0000\u00a2\u0099\u0001"+ - "\u0000\u0000\u0000\u00a2\u009a\u0001\u0000\u0000\u0000\u00a2\u009b\u0001"+ - "\u0000\u0000\u0000\u00a2\u009c\u0001\u0000\u0000\u0000\u00a2\u009d\u0001"+ - "\u0000\u0000\u0000\u00a2\u009e\u0001\u0000\u0000\u0000\u00a2\u00a0\u0001"+ - "\u0000\u0000\u0000\u00a3\u0007\u0001\u0000\u0000\u0000\u00a4\u00a5\u0005"+ - "\u0010\u0000\u0000\u00a5\u00a6\u0003\n\u0005\u0000\u00a6\t\u0001\u0000"+ - "\u0000\u0000\u00a7\u00a8\u0006\u0005\uffff\uffff\u0000\u00a8\u00a9\u0005"+ - "+\u0000\u0000\u00a9\u00c6\u0003\n\u0005\b\u00aa\u00c6\u0003\u0010\b\u0000"+ - "\u00ab\u00c6\u0003\f\u0006\u0000\u00ac\u00ae\u0003\u0010\b\u0000\u00ad"+ - "\u00af\u0005+\u0000\u0000\u00ae\u00ad\u0001\u0000\u0000\u0000\u00ae\u00af"+ - "\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000\u0000\u0000\u00b0\u00b1"+ - "\u0005&\u0000\u0000\u00b1\u00b2\u0005*\u0000\u0000\u00b2\u00b7\u0003\u0010"+ - "\b\u0000\u00b3\u00b4\u0005!\u0000\u0000\u00b4\u00b6\u0003\u0010\b\u0000"+ - "\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b9\u0001\u0000\u0000\u0000"+ - "\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b7\u00b8\u0001\u0000\u0000\u0000"+ - "\u00b8\u00ba\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000"+ - "\u00ba\u00bb\u00051\u0000\u0000\u00bb\u00c6\u0001\u0000\u0000\u0000\u00bc"+ - "\u00bd\u0003\u0010\b\u0000\u00bd\u00bf\u0005\'\u0000\u0000\u00be\u00c0"+ - "\u0005+\u0000\u0000\u00bf\u00be\u0001\u0000\u0000\u0000\u00bf\u00c0\u0001"+ - "\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000\u0000\u00c1\u00c2\u0005"+ - ",\u0000\u0000\u00c2\u00c6\u0001\u0000\u0000\u0000\u00c3\u00c4\u0004\u0005"+ - "\u0004\u0000\u00c4\u00c6\u0003\u000e\u0007\u0000\u00c5\u00a7\u0001\u0000"+ - "\u0000\u0000\u00c5\u00aa\u0001\u0000\u0000\u0000\u00c5\u00ab\u0001\u0000"+ - "\u0000\u0000\u00c5\u00ac\u0001\u0000\u0000\u0000\u00c5\u00bc\u0001\u0000"+ - "\u0000\u0000\u00c5\u00c3\u0001\u0000\u0000\u0000\u00c6\u00cf\u0001\u0000"+ - "\u0000\u0000\u00c7\u00c8\n\u0005\u0000\u0000\u00c8\u00c9\u0005\u001d\u0000"+ - "\u0000\u00c9\u00ce\u0003\n\u0005\u0006\u00ca\u00cb\n\u0004\u0000\u0000"+ - "\u00cb\u00cc\u0005.\u0000\u0000\u00cc\u00ce\u0003\n\u0005\u0005\u00cd"+ - "\u00c7\u0001\u0000\u0000\u0000\u00cd\u00ca\u0001\u0000\u0000\u0000\u00ce"+ - "\u00d1\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000\u0000\u0000\u00cf"+ - "\u00d0\u0001\u0000\u0000\u0000\u00d0\u000b\u0001\u0000\u0000\u0000\u00d1"+ - "\u00cf\u0001\u0000\u0000\u0000\u00d2\u00d4\u0003\u0010\b\u0000\u00d3\u00d5"+ - "\u0005+\u0000\u0000\u00d4\u00d3\u0001\u0000\u0000\u0000\u00d4\u00d5\u0001"+ - "\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000\u0000\u00d6\u00d7\u0005"+ - ")\u0000\u0000\u00d7\u00d8\u0003j5\u0000\u00d8\u00e1\u0001\u0000\u0000"+ - "\u0000\u00d9\u00db\u0003\u0010\b\u0000\u00da\u00dc\u0005+\u0000\u0000"+ - "\u00db\u00da\u0001\u0000\u0000\u0000\u00db\u00dc\u0001\u0000\u0000\u0000"+ - "\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de\u00050\u0000\u0000\u00de"+ - "\u00df\u0003j5\u0000\u00df\u00e1\u0001\u0000\u0000\u0000\u00e0\u00d2\u0001"+ - "\u0000\u0000\u0000\u00e0\u00d9\u0001\u0000\u0000\u0000\u00e1\r\u0001\u0000"+ - "\u0000\u0000\u00e2\u00e3\u0003\u0010\b\u0000\u00e3\u00e4\u0005?\u0000"+ - "\u0000\u00e4\u00e5\u0003j5\u0000\u00e5\u000f\u0001\u0000\u0000\u0000\u00e6"+ - "\u00ec\u0003\u0012\t\u0000\u00e7\u00e8\u0003\u0012\t\u0000\u00e8\u00e9"+ - "\u0003l6\u0000\u00e9\u00ea\u0003\u0012\t\u0000\u00ea\u00ec\u0001\u0000"+ - "\u0000\u0000\u00eb\u00e6\u0001\u0000\u0000\u0000\u00eb\u00e7\u0001\u0000"+ - "\u0000\u0000\u00ec\u0011\u0001\u0000\u0000\u0000\u00ed\u00ee\u0006\t\uffff"+ - "\uffff\u0000\u00ee\u00f2\u0003\u0014\n\u0000\u00ef\u00f0\u0007\u0000\u0000"+ - "\u0000\u00f0\u00f2\u0003\u0012\t\u0003\u00f1\u00ed\u0001\u0000\u0000\u0000"+ - "\u00f1\u00ef\u0001\u0000\u0000\u0000\u00f2\u00fb\u0001\u0000\u0000\u0000"+ - "\u00f3\u00f4\n\u0002\u0000\u0000\u00f4\u00f5\u0007\u0001\u0000\u0000\u00f5"+ - "\u00fa\u0003\u0012\t\u0003\u00f6\u00f7\n\u0001\u0000\u0000\u00f7\u00f8"+ - "\u0007\u0000\u0000\u0000\u00f8\u00fa\u0003\u0012\t\u0002\u00f9\u00f3\u0001"+ - "\u0000\u0000\u0000\u00f9\u00f6\u0001\u0000\u0000\u0000\u00fa\u00fd\u0001"+ - "\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fb\u00fc\u0001"+ - "\u0000\u0000\u0000\u00fc\u0013\u0001\u0000\u0000\u0000\u00fd\u00fb\u0001"+ - "\u0000\u0000\u0000\u00fe\u00ff\u0006\n\uffff\uffff\u0000\u00ff\u0107\u0003"+ - "D\"\u0000\u0100\u0107\u0003:\u001d\u0000\u0101\u0107\u0003\u0016\u000b"+ - "\u0000\u0102\u0103\u0005*\u0000\u0000\u0103\u0104\u0003\n\u0005\u0000"+ - "\u0104\u0105\u00051\u0000\u0000\u0105\u0107\u0001\u0000\u0000\u0000\u0106"+ - "\u00fe\u0001\u0000\u0000\u0000\u0106\u0100\u0001\u0000\u0000\u0000\u0106"+ - "\u0101\u0001\u0000\u0000\u0000\u0106\u0102\u0001\u0000\u0000\u0000\u0107"+ - "\u010d\u0001\u0000\u0000\u0000\u0108\u0109\n\u0001\u0000\u0000\u0109\u010a"+ - "\u0005 \u0000\u0000\u010a\u010c\u0003\u001a\r\u0000\u010b\u0108\u0001"+ - "\u0000\u0000\u0000\u010c\u010f\u0001\u0000\u0000\u0000\u010d\u010b\u0001"+ - "\u0000\u0000\u0000\u010d\u010e\u0001\u0000\u0000\u0000\u010e\u0015\u0001"+ - "\u0000\u0000\u0000\u010f\u010d\u0001\u0000\u0000\u0000\u0110\u0111\u0003"+ - "\u0018\f\u0000\u0111\u011b\u0005*\u0000\u0000\u0112\u011c\u0005<\u0000"+ - "\u0000\u0113\u0118\u0003\n\u0005\u0000\u0114\u0115\u0005!\u0000\u0000"+ - "\u0115\u0117\u0003\n\u0005\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0117"+ - "\u011a\u0001\u0000\u0000\u0000\u0118\u0116\u0001\u0000\u0000\u0000\u0118"+ - "\u0119\u0001\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000\u0000\u011a"+ - "\u0118\u0001\u0000\u0000\u0000\u011b\u0112\u0001\u0000\u0000\u0000\u011b"+ - "\u0113\u0001\u0000\u0000\u0000\u011b\u011c\u0001\u0000\u0000\u0000\u011c"+ - "\u011d\u0001\u0000\u0000\u0000\u011d\u011e\u00051\u0000\u0000\u011e\u0017"+ - "\u0001\u0000\u0000\u0000\u011f\u0122\u0005?\u0000\u0000\u0120\u0122\u0003"+ - "H$\u0000\u0121\u011f\u0001\u0000\u0000\u0000\u0121\u0120\u0001\u0000\u0000"+ - "\u0000\u0122\u0019\u0001\u0000\u0000\u0000\u0123\u0124\u0003@ \u0000\u0124"+ - "\u001b\u0001\u0000\u0000\u0000\u0125\u0126\u0005\f\u0000\u0000\u0126\u0127"+ - "\u0003\u001e\u000f\u0000\u0127\u001d\u0001\u0000\u0000\u0000\u0128\u012d"+ - "\u0003 \u0010\u0000\u0129\u012a\u0005!\u0000\u0000\u012a\u012c\u0003 "+ - "\u0010\u0000\u012b\u0129\u0001\u0000\u0000\u0000\u012c\u012f\u0001\u0000"+ - "\u0000\u0000\u012d\u012b\u0001\u0000\u0000\u0000\u012d\u012e\u0001\u0000"+ - "\u0000\u0000\u012e\u001f\u0001\u0000\u0000\u0000\u012f\u012d\u0001\u0000"+ - "\u0000\u0000\u0130\u0131\u0003:\u001d\u0000\u0131\u0132\u0005\u001f\u0000"+ - "\u0000\u0132\u0134\u0001\u0000\u0000\u0000\u0133\u0130\u0001\u0000\u0000"+ - "\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134\u0135\u0001\u0000\u0000"+ - "\u0000\u0135\u0136\u0003\n\u0005\u0000\u0136!\u0001\u0000\u0000\u0000"+ - "\u0137\u0138\u0005\u0006\u0000\u0000\u0138\u013d\u0003$\u0012\u0000\u0139"+ - "\u013a\u0005!\u0000\u0000\u013a\u013c\u0003$\u0012\u0000\u013b\u0139\u0001"+ - "\u0000\u0000\u0000\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001"+ - "\u0000\u0000\u0000\u013d\u013e\u0001\u0000\u0000\u0000\u013e\u0141\u0001"+ - "\u0000\u0000\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0142\u0003"+ - "*\u0015\u0000\u0141\u0140\u0001\u0000\u0000\u0000\u0141\u0142\u0001\u0000"+ - "\u0000\u0000\u0142#\u0001\u0000\u0000\u0000\u0143\u0144\u0003&\u0013\u0000"+ - "\u0144\u0145\u0005h\u0000\u0000\u0145\u0147\u0001\u0000\u0000\u0000\u0146"+ - "\u0143\u0001\u0000\u0000\u0000\u0146\u0147\u0001\u0000\u0000\u0000\u0147"+ - "\u0148\u0001\u0000\u0000\u0000\u0148\u0149\u0003(\u0014\u0000\u0149%\u0001"+ - "\u0000\u0000\u0000\u014a\u014b\u0005L\u0000\u0000\u014b\'\u0001\u0000"+ - "\u0000\u0000\u014c\u014d\u0007\u0002\u0000\u0000\u014d)\u0001\u0000\u0000"+ - "\u0000\u014e\u0151\u0003,\u0016\u0000\u014f\u0151\u0003.\u0017\u0000\u0150"+ - "\u014e\u0001\u0000\u0000\u0000\u0150\u014f\u0001\u0000\u0000\u0000\u0151"+ - "+\u0001\u0000\u0000\u0000\u0152\u0153\u0005K\u0000\u0000\u0153\u0158\u0005"+ - "L\u0000\u0000\u0154\u0155\u0005!\u0000\u0000\u0155\u0157\u0005L\u0000"+ - "\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0157\u015a\u0001\u0000\u0000"+ - "\u0000\u0158\u0156\u0001\u0000\u0000\u0000\u0158\u0159\u0001\u0000\u0000"+ - "\u0000\u0159-\u0001\u0000\u0000\u0000\u015a\u0158\u0001\u0000\u0000\u0000"+ - "\u015b\u015c\u0005A\u0000\u0000\u015c\u015d\u0003,\u0016\u0000\u015d\u015e"+ - "\u0005B\u0000\u0000\u015e/\u0001\u0000\u0000\u0000\u015f\u0160\u0005\u0013"+ - "\u0000\u0000\u0160\u0165\u0003$\u0012\u0000\u0161\u0162\u0005!\u0000\u0000"+ - "\u0162\u0164\u0003$\u0012\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0164"+ - "\u0167\u0001\u0000\u0000\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0165"+ - "\u0166\u0001\u0000\u0000\u0000\u0166\u0169\u0001\u0000\u0000\u0000\u0167"+ - "\u0165\u0001\u0000\u0000\u0000\u0168\u016a\u00036\u001b\u0000\u0169\u0168"+ - "\u0001\u0000\u0000\u0000\u0169\u016a\u0001\u0000\u0000\u0000\u016a\u016d"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0005\u001c\u0000\u0000\u016c\u016e"+ - "\u0003\u001e\u000f\u0000\u016d\u016b\u0001\u0000\u0000\u0000\u016d\u016e"+ - "\u0001\u0000\u0000\u0000\u016e1\u0001\u0000\u0000\u0000\u016f\u0170\u0005"+ - "\u0004\u0000\u0000\u0170\u0171\u0003\u001e\u000f\u0000\u01713\u0001\u0000"+ - "\u0000\u0000\u0172\u0174\u0005\u000f\u0000\u0000\u0173\u0175\u00036\u001b"+ - "\u0000\u0174\u0173\u0001\u0000\u0000\u0000\u0174\u0175\u0001\u0000\u0000"+ - "\u0000\u0175\u0178\u0001\u0000\u0000\u0000\u0176\u0177\u0005\u001c\u0000"+ - "\u0000\u0177\u0179\u0003\u001e\u000f\u0000\u0178\u0176\u0001\u0000\u0000"+ - "\u0000\u0178\u0179\u0001\u0000\u0000\u0000\u01795\u0001\u0000\u0000\u0000"+ - "\u017a\u017f\u00038\u001c\u0000\u017b\u017c\u0005!\u0000\u0000\u017c\u017e"+ - "\u00038\u001c\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e\u0181\u0001"+ - "\u0000\u0000\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u017f\u0180\u0001"+ - "\u0000\u0000\u0000\u01807\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000"+ - "\u0000\u0000\u0182\u0185\u0003 \u0010\u0000\u0183\u0184\u0005\u0010\u0000"+ - "\u0000\u0184\u0186\u0003\n\u0005\u0000\u0185\u0183\u0001\u0000\u0000\u0000"+ - "\u0185\u0186\u0001\u0000\u0000\u0000\u01869\u0001\u0000\u0000\u0000\u0187"+ - "\u018c\u0003H$\u0000\u0188\u0189\u0005#\u0000\u0000\u0189\u018b\u0003"+ - "H$\u0000\u018a\u0188\u0001\u0000\u0000\u0000\u018b\u018e\u0001\u0000\u0000"+ - "\u0000\u018c\u018a\u0001\u0000\u0000\u0000\u018c\u018d\u0001\u0000\u0000"+ - "\u0000\u018d;\u0001\u0000\u0000\u0000\u018e\u018c\u0001\u0000\u0000\u0000"+ - "\u018f\u0194\u0003B!\u0000\u0190\u0191\u0005#\u0000\u0000\u0191\u0193"+ - "\u0003B!\u0000\u0192\u0190\u0001\u0000\u0000\u0000\u0193\u0196\u0001\u0000"+ - "\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0194\u0195\u0001\u0000"+ - "\u0000\u0000\u0195=\u0001\u0000\u0000\u0000\u0196\u0194\u0001\u0000\u0000"+ - "\u0000\u0197\u019c\u0003<\u001e\u0000\u0198\u0199\u0005!\u0000\u0000\u0199"+ - "\u019b\u0003<\u001e\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019b\u019e"+ - "\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000\u0000\u019c\u019d"+ - "\u0001\u0000\u0000\u0000\u019d?\u0001\u0000\u0000\u0000\u019e\u019c\u0001"+ - "\u0000\u0000\u0000\u019f\u01a0\u0007\u0003\u0000\u0000\u01a0A\u0001\u0000"+ - "\u0000\u0000\u01a1\u01a5\u0005P\u0000\u0000\u01a2\u01a3\u0004!\n\u0000"+ - "\u01a3\u01a5\u0003F#\u0000\u01a4\u01a1\u0001\u0000\u0000\u0000\u01a4\u01a2"+ - "\u0001\u0000\u0000\u0000\u01a5C\u0001\u0000\u0000\u0000\u01a6\u01d1\u0005"+ - ",\u0000\u0000\u01a7\u01a8\u0003h4\u0000\u01a8\u01a9\u0005C\u0000\u0000"+ - "\u01a9\u01d1\u0001\u0000\u0000\u0000\u01aa\u01d1\u0003f3\u0000\u01ab\u01d1"+ - "\u0003h4\u0000\u01ac\u01d1\u0003b1\u0000\u01ad\u01d1\u0003F#\u0000\u01ae"+ - "\u01d1\u0003j5\u0000\u01af\u01b0\u0005A\u0000\u0000\u01b0\u01b5\u0003"+ - "d2\u0000\u01b1\u01b2\u0005!\u0000\u0000\u01b2\u01b4\u0003d2\u0000\u01b3"+ - "\u01b1\u0001\u0000\u0000\u0000\u01b4\u01b7\u0001\u0000\u0000\u0000\u01b5"+ - "\u01b3\u0001\u0000\u0000\u0000\u01b5\u01b6\u0001\u0000\u0000\u0000\u01b6"+ - "\u01b8\u0001\u0000\u0000\u0000\u01b7\u01b5\u0001\u0000\u0000\u0000\u01b8"+ - "\u01b9\u0005B\u0000\u0000\u01b9\u01d1\u0001\u0000\u0000\u0000\u01ba\u01bb"+ - "\u0005A\u0000\u0000\u01bb\u01c0\u0003b1\u0000\u01bc\u01bd\u0005!\u0000"+ - "\u0000\u01bd\u01bf\u0003b1\u0000\u01be\u01bc\u0001\u0000\u0000\u0000\u01bf"+ - "\u01c2\u0001\u0000\u0000\u0000\u01c0\u01be\u0001\u0000\u0000\u0000\u01c0"+ - "\u01c1\u0001\u0000\u0000\u0000\u01c1\u01c3\u0001\u0000\u0000\u0000\u01c2"+ - "\u01c0\u0001\u0000\u0000\u0000\u01c3\u01c4\u0005B\u0000\u0000\u01c4\u01d1"+ - "\u0001\u0000\u0000\u0000\u01c5\u01c6\u0005A\u0000\u0000\u01c6\u01cb\u0003"+ - "j5\u0000\u01c7\u01c8\u0005!\u0000\u0000\u01c8\u01ca\u0003j5\u0000\u01c9"+ - "\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cd\u0001\u0000\u0000\u0000\u01cb"+ - "\u01c9\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000\u01cc"+ - "\u01ce\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01ce"+ - "\u01cf\u0005B\u0000\u0000\u01cf\u01d1\u0001\u0000\u0000\u0000\u01d0\u01a6"+ - "\u0001\u0000\u0000\u0000\u01d0\u01a7\u0001\u0000\u0000\u0000\u01d0\u01aa"+ - "\u0001\u0000\u0000\u0000\u01d0\u01ab\u0001\u0000\u0000\u0000\u01d0\u01ac"+ - "\u0001\u0000\u0000\u0000\u01d0\u01ad\u0001\u0000\u0000\u0000\u01d0\u01ae"+ - "\u0001\u0000\u0000\u0000\u01d0\u01af\u0001\u0000\u0000\u0000\u01d0\u01ba"+ - "\u0001\u0000\u0000\u0000\u01d0\u01c5\u0001\u0000\u0000\u0000\u01d1E\u0001"+ - "\u0000\u0000\u0000\u01d2\u01d5\u0005/\u0000\u0000\u01d3\u01d5\u0005@\u0000"+ - "\u0000\u01d4\u01d2\u0001\u0000\u0000\u0000\u01d4\u01d3\u0001\u0000\u0000"+ - "\u0000\u01d5G\u0001\u0000\u0000\u0000\u01d6\u01da\u0003@ \u0000\u01d7"+ - "\u01d8\u0004$\u000b\u0000\u01d8\u01da\u0003F#\u0000\u01d9\u01d6\u0001"+ - "\u0000\u0000\u0000\u01d9\u01d7\u0001\u0000\u0000\u0000\u01daI\u0001\u0000"+ - "\u0000\u0000\u01db\u01dc\u0005\t\u0000\u0000\u01dc\u01dd\u0005\u001a\u0000"+ - "\u0000\u01ddK\u0001\u0000\u0000\u0000\u01de\u01df\u0005\u000e\u0000\u0000"+ - "\u01df\u01e4\u0003N\'\u0000\u01e0\u01e1\u0005!\u0000\u0000\u01e1\u01e3"+ - "\u0003N\'\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e3\u01e6\u0001"+ - "\u0000\u0000\u0000\u01e4\u01e2\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001"+ - "\u0000\u0000\u0000\u01e5M\u0001\u0000\u0000\u0000\u01e6\u01e4\u0001\u0000"+ - "\u0000\u0000\u01e7\u01e9\u0003\n\u0005\u0000\u01e8\u01ea\u0007\u0004\u0000"+ - "\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000"+ - "\u0000\u01ea\u01ed\u0001\u0000\u0000\u0000\u01eb\u01ec\u0005-\u0000\u0000"+ - "\u01ec\u01ee\u0007\u0005\u0000\u0000\u01ed\u01eb\u0001\u0000\u0000\u0000"+ - "\u01ed\u01ee\u0001\u0000\u0000\u0000\u01eeO\u0001\u0000\u0000\u0000\u01ef"+ - "\u01f0\u0005\b\u0000\u0000\u01f0\u01f1\u0003>\u001f\u0000\u01f1Q\u0001"+ - "\u0000\u0000\u0000\u01f2\u01f3\u0005\u0002\u0000\u0000\u01f3\u01f4\u0003"+ - ">\u001f\u0000\u01f4S\u0001\u0000\u0000\u0000\u01f5\u01f6\u0005\u000b\u0000"+ - "\u0000\u01f6\u01fb\u0003V+\u0000\u01f7\u01f8\u0005!\u0000\u0000\u01f8"+ - "\u01fa\u0003V+\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000\u01fa\u01fd\u0001"+ - "\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000\u0000\u01fb\u01fc\u0001"+ - "\u0000\u0000\u0000\u01fcU\u0001\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000"+ - "\u0000\u0000\u01fe\u01ff\u0003<\u001e\u0000\u01ff\u0200\u0005T\u0000\u0000"+ - "\u0200\u0201\u0003<\u001e\u0000\u0201W\u0001\u0000\u0000\u0000\u0202\u0203"+ - "\u0005\u0001\u0000\u0000\u0203\u0204\u0003\u0014\n\u0000\u0204\u0206\u0003"+ - "j5\u0000\u0205\u0207\u0003^/\u0000\u0206\u0205\u0001\u0000\u0000\u0000"+ - "\u0206\u0207\u0001\u0000\u0000\u0000\u0207Y\u0001\u0000\u0000\u0000\u0208"+ - "\u0209\u0005\u0007\u0000\u0000\u0209\u020a\u0003\u0014\n\u0000\u020a\u020b"+ - "\u0003j5\u0000\u020b[\u0001\u0000\u0000\u0000\u020c\u020d\u0005\n\u0000"+ - "\u0000\u020d\u020e\u0003:\u001d\u0000\u020e]\u0001\u0000\u0000\u0000\u020f"+ - "\u0214\u0003`0\u0000\u0210\u0211\u0005!\u0000\u0000\u0211\u0213\u0003"+ - "`0\u0000\u0212\u0210\u0001\u0000\u0000\u0000\u0213\u0216\u0001\u0000\u0000"+ - "\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0214\u0215\u0001\u0000\u0000"+ - "\u0000\u0215_\u0001\u0000\u0000\u0000\u0216\u0214\u0001\u0000\u0000\u0000"+ - "\u0217\u0218\u0003@ \u0000\u0218\u0219\u0005\u001f\u0000\u0000\u0219\u021a"+ - "\u0003D\"\u0000\u021aa\u0001\u0000\u0000\u0000\u021b\u021c\u0007\u0006"+ - "\u0000\u0000\u021cc\u0001\u0000\u0000\u0000\u021d\u0220\u0003f3\u0000"+ - "\u021e\u0220\u0003h4\u0000\u021f\u021d\u0001\u0000\u0000\u0000\u021f\u021e"+ - "\u0001\u0000\u0000\u0000\u0220e\u0001\u0000\u0000\u0000\u0221\u0223\u0007"+ - "\u0000\u0000\u0000\u0222\u0221\u0001\u0000\u0000\u0000\u0222\u0223\u0001"+ - "\u0000\u0000\u0000\u0223\u0224\u0001\u0000\u0000\u0000\u0224\u0225\u0005"+ - "\u001b\u0000\u0000\u0225g\u0001\u0000\u0000\u0000\u0226\u0228\u0007\u0000"+ - "\u0000\u0000\u0227\u0226\u0001\u0000\u0000\u0000\u0227\u0228\u0001\u0000"+ - "\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229\u022a\u0005\u001a"+ - "\u0000\u0000\u022ai\u0001\u0000\u0000\u0000\u022b\u022c\u0005\u0019\u0000"+ - "\u0000\u022ck\u0001\u0000\u0000\u0000\u022d\u022e\u0007\u0007\u0000\u0000"+ - "\u022em\u0001\u0000\u0000\u0000\u022f\u0230\u0005\u0005\u0000\u0000\u0230"+ - "\u0231\u0003p8\u0000\u0231o\u0001\u0000\u0000\u0000\u0232\u0233\u0005"+ - "A\u0000\u0000\u0233\u0234\u0003\u0002\u0001\u0000\u0234\u0235\u0005B\u0000"+ - "\u0000\u0235q\u0001\u0000\u0000\u0000\u0236\u0237\u0005\r\u0000\u0000"+ - "\u0237\u0238\u0005d\u0000\u0000\u0238s\u0001\u0000\u0000\u0000\u0239\u023a"+ - "\u0005\u0003\u0000\u0000\u023a\u023d\u0005Z\u0000\u0000\u023b\u023c\u0005"+ - "X\u0000\u0000\u023c\u023e\u0003<\u001e\u0000\u023d\u023b\u0001\u0000\u0000"+ - "\u0000\u023d\u023e\u0001\u0000\u0000\u0000\u023e\u0248\u0001\u0000\u0000"+ - "\u0000\u023f\u0240\u0005Y\u0000\u0000\u0240\u0245\u0003v;\u0000\u0241"+ - "\u0242\u0005!\u0000\u0000\u0242\u0244\u0003v;\u0000\u0243\u0241\u0001"+ - "\u0000\u0000\u0000\u0244\u0247\u0001\u0000\u0000\u0000\u0245\u0243\u0001"+ - "\u0000\u0000\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0249\u0001"+ - "\u0000\u0000\u0000\u0247\u0245\u0001\u0000\u0000\u0000\u0248\u023f\u0001"+ - "\u0000\u0000\u0000\u0248\u0249\u0001\u0000\u0000\u0000\u0249u\u0001\u0000"+ - "\u0000\u0000\u024a\u024b\u0003<\u001e\u0000\u024b\u024c\u0005\u001f\u0000"+ - "\u0000\u024c\u024e\u0001\u0000\u0000\u0000\u024d\u024a\u0001\u0000\u0000"+ - "\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e\u024f\u0001\u0000\u0000"+ - "\u0000\u024f\u0250\u0003<\u001e\u0000\u0250w\u0001\u0000\u0000\u0000\u0251"+ - "\u0252\u0005\u0012\u0000\u0000\u0252\u0253\u0003$\u0012\u0000\u0253\u0254"+ - "\u0005X\u0000\u0000\u0254\u0255\u0003>\u001f\u0000\u0255y\u0001\u0000"+ - "\u0000\u0000\u0256\u0257\u0005\u0011\u0000\u0000\u0257\u025a\u00036\u001b"+ - "\u0000\u0258\u0259\u0005\u001c\u0000\u0000\u0259\u025b\u0003\u001e\u000f"+ - "\u0000\u025a\u0258\u0001\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000"+ - "\u0000\u025b{\u0001\u0000\u0000\u0000;\u0087\u0090\u00a2\u00ae\u00b7\u00bf"+ - "\u00c5\u00cd\u00cf\u00d4\u00db\u00e0\u00eb\u00f1\u00f9\u00fb\u0106\u010d"+ - "\u0118\u011b\u0121\u012d\u0133\u013d\u0141\u0146\u0150\u0158\u0165\u0169"+ - "\u016d\u0174\u0178\u017f\u0185\u018c\u0194\u019c\u01a4\u01b5\u01c0\u01cb"+ - "\u01d0\u01d4\u01d9\u01e4\u01e9\u01ed\u01fb\u0206\u0214\u021f\u0222\u0227"+ - "\u023d\u0245\u0248\u024d\u025a"; + "\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0005\u000f\u012a\b\u000f\n\u000f\f\u000f\u012d"+ + "\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u0132\b\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0005\u0011\u013a\b\u0011\n\u0011\f\u0011\u013d\t\u0011\u0001\u0011\u0003"+ + "\u0011\u0140\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u0145"+ + "\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001"+ + "\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u014f\b\u0015\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0155\b\u0016\n\u0016\f\u0016"+ + "\u0158\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u0162\b\u0018\n\u0018"+ + "\f\u0018\u0165\t\u0018\u0001\u0018\u0003\u0018\u0168\b\u0018\u0001\u0018"+ + "\u0001\u0018\u0003\u0018\u016c\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u001a\u0001\u001a\u0003\u001a\u0173\b\u001a\u0001\u001a\u0001\u001a"+ + "\u0003\u001a\u0177\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b"+ + "\u017c\b\u001b\n\u001b\f\u001b\u017f\t\u001b\u0001\u001c\u0001\u001c\u0001"+ + "\u001c\u0003\u001c\u0184\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0005"+ + "\u001d\u0189\b\u001d\n\u001d\f\u001d\u018c\t\u001d\u0001\u001e\u0001\u001e"+ + "\u0001\u001e\u0005\u001e\u0191\b\u001e\n\u001e\f\u001e\u0194\t\u001e\u0001"+ + "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u0199\b\u001f\n\u001f\f\u001f"+ + "\u019c\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01a3\b!\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b2\b\"\n\"\f\"\u01b5\t\"\u0001\""+ + "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bd\b\"\n\"\f\"\u01c0"+ + "\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c8\b\""+ + "\n\"\f\"\u01cb\t\"\u0001\"\u0001\"\u0003\"\u01cf\b\"\u0001#\u0001#\u0003"+ + "#\u01d3\b#\u0001$\u0001$\u0001$\u0003$\u01d8\b$\u0001%\u0001%\u0001%\u0001"+ + "&\u0001&\u0001&\u0001&\u0005&\u01e1\b&\n&\f&\u01e4\t&\u0001\'\u0001\'"+ + "\u0003\'\u01e8\b\'\u0001\'\u0001\'\u0003\'\u01ec\b\'\u0001(\u0001(\u0001"+ + "(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005*\u01f8\b*\n*"+ + "\f*\u01fb\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0003"+ + ",\u0205\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ + "/\u0001/\u0005/\u0211\b/\n/\f/\u0214\t/\u00010\u00010\u00010\u00010\u0001"+ + "1\u00011\u00012\u00012\u00032\u021e\b2\u00013\u00033\u0221\b3\u00013\u0001"+ + "3\u00014\u00034\u0226\b4\u00014\u00014\u00015\u00015\u00016\u00016\u0001"+ + "7\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001"+ + ":\u0001:\u0001:\u0001:\u0003:\u023c\b:\u0001:\u0001:\u0001:\u0001:\u0005"+ + ":\u0242\b:\n:\f:\u0245\t:\u0003:\u0247\b:\u0001;\u0001;\u0001;\u0003;"+ + "\u024c\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001"+ + "=\u0001=\u0001=\u0003=\u0259\b=\u0001=\u0000\u0004\u0002\n\u0012\u0014"+ + ">\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ + "\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz\u0000\b\u0001"+ + "\u0000;<\u0001\u0000=?\u0002\u0000\u001a\u001aLL\u0001\u0000CD\u0002\u0000"+ + "\u001f\u001f##\u0002\u0000&&))\u0002\u0000%%33\u0002\u0000446:\u0274\u0000"+ + "|\u0001\u0000\u0000\u0000\u0002\u007f\u0001\u0000\u0000\u0000\u0004\u0090"+ + "\u0001\u0000\u0000\u0000\u0006\u00a2\u0001\u0000\u0000\u0000\b\u00a4\u0001"+ + "\u0000\u0000\u0000\n\u00c5\u0001\u0000\u0000\u0000\f\u00e0\u0001\u0000"+ + "\u0000\u0000\u000e\u00e2\u0001\u0000\u0000\u0000\u0010\u00eb\u0001\u0000"+ + "\u0000\u0000\u0012\u00f1\u0001\u0000\u0000\u0000\u0014\u0106\u0001\u0000"+ + "\u0000\u0000\u0016\u0110\u0001\u0000\u0000\u0000\u0018\u011f\u0001\u0000"+ + "\u0000\u0000\u001a\u0121\u0001\u0000\u0000\u0000\u001c\u0123\u0001\u0000"+ + "\u0000\u0000\u001e\u0126\u0001\u0000\u0000\u0000 \u0131\u0001\u0000\u0000"+ + "\u0000\"\u0135\u0001\u0000\u0000\u0000$\u0144\u0001\u0000\u0000\u0000"+ + "&\u0148\u0001\u0000\u0000\u0000(\u014a\u0001\u0000\u0000\u0000*\u014e"+ + "\u0001\u0000\u0000\u0000,\u0150\u0001\u0000\u0000\u0000.\u0159\u0001\u0000"+ + "\u0000\u00000\u015d\u0001\u0000\u0000\u00002\u016d\u0001\u0000\u0000\u0000"+ + "4\u0170\u0001\u0000\u0000\u00006\u0178\u0001\u0000\u0000\u00008\u0180"+ + "\u0001\u0000\u0000\u0000:\u0185\u0001\u0000\u0000\u0000<\u018d\u0001\u0000"+ + "\u0000\u0000>\u0195\u0001\u0000\u0000\u0000@\u019d\u0001\u0000\u0000\u0000"+ + "B\u01a2\u0001\u0000\u0000\u0000D\u01ce\u0001\u0000\u0000\u0000F\u01d2"+ + "\u0001\u0000\u0000\u0000H\u01d7\u0001\u0000\u0000\u0000J\u01d9\u0001\u0000"+ + "\u0000\u0000L\u01dc\u0001\u0000\u0000\u0000N\u01e5\u0001\u0000\u0000\u0000"+ + "P\u01ed\u0001\u0000\u0000\u0000R\u01f0\u0001\u0000\u0000\u0000T\u01f3"+ + "\u0001\u0000\u0000\u0000V\u01fc\u0001\u0000\u0000\u0000X\u0200\u0001\u0000"+ + "\u0000\u0000Z\u0206\u0001\u0000\u0000\u0000\\\u020a\u0001\u0000\u0000"+ + "\u0000^\u020d\u0001\u0000\u0000\u0000`\u0215\u0001\u0000\u0000\u0000b"+ + "\u0219\u0001\u0000\u0000\u0000d\u021d\u0001\u0000\u0000\u0000f\u0220\u0001"+ + "\u0000\u0000\u0000h\u0225\u0001\u0000\u0000\u0000j\u0229\u0001\u0000\u0000"+ + "\u0000l\u022b\u0001\u0000\u0000\u0000n\u022d\u0001\u0000\u0000\u0000p"+ + "\u0230\u0001\u0000\u0000\u0000r\u0234\u0001\u0000\u0000\u0000t\u0237\u0001"+ + "\u0000\u0000\u0000v\u024b\u0001\u0000\u0000\u0000x\u024f\u0001\u0000\u0000"+ + "\u0000z\u0254\u0001\u0000\u0000\u0000|}\u0003\u0002\u0001\u0000}~\u0005"+ + "\u0000\u0000\u0001~\u0001\u0001\u0000\u0000\u0000\u007f\u0080\u0006\u0001"+ + "\uffff\uffff\u0000\u0080\u0081\u0003\u0004\u0002\u0000\u0081\u0087\u0001"+ + "\u0000\u0000\u0000\u0082\u0083\n\u0001\u0000\u0000\u0083\u0084\u0005\u0019"+ + "\u0000\u0000\u0084\u0086\u0003\u0006\u0003\u0000\u0085\u0082\u0001\u0000"+ + "\u0000\u0000\u0086\u0089\u0001\u0000\u0000\u0000\u0087\u0085\u0001\u0000"+ + "\u0000\u0000\u0087\u0088\u0001\u0000\u0000\u0000\u0088\u0003\u0001\u0000"+ + "\u0000\u0000\u0089\u0087\u0001\u0000\u0000\u0000\u008a\u0091\u0003n7\u0000"+ + "\u008b\u0091\u0003\"\u0011\u0000\u008c\u0091\u0003\u001c\u000e\u0000\u008d"+ + "\u0091\u0003r9\u0000\u008e\u008f\u0004\u0002\u0001\u0000\u008f\u0091\u0003"+ + "0\u0018\u0000\u0090\u008a\u0001\u0000\u0000\u0000\u0090\u008b\u0001\u0000"+ + "\u0000\u0000\u0090\u008c\u0001\u0000\u0000\u0000\u0090\u008d\u0001\u0000"+ + "\u0000\u0000\u0090\u008e\u0001\u0000\u0000\u0000\u0091\u0005\u0001\u0000"+ + "\u0000\u0000\u0092\u00a3\u00032\u0019\u0000\u0093\u00a3\u0003\b\u0004"+ + "\u0000\u0094\u00a3\u0003P(\u0000\u0095\u00a3\u0003J%\u0000\u0096\u00a3"+ + "\u00034\u001a\u0000\u0097\u00a3\u0003L&\u0000\u0098\u00a3\u0003R)\u0000"+ + "\u0099\u00a3\u0003T*\u0000\u009a\u00a3\u0003X,\u0000\u009b\u00a3\u0003"+ + "Z-\u0000\u009c\u00a3\u0003t:\u0000\u009d\u00a3\u0003\\.\u0000\u009e\u009f"+ + "\u0004\u0003\u0002\u0000\u009f\u00a3\u0003z=\u0000\u00a0\u00a1\u0004\u0003"+ + "\u0003\u0000\u00a1\u00a3\u0003x<\u0000\u00a2\u0092\u0001\u0000\u0000\u0000"+ + "\u00a2\u0093\u0001\u0000\u0000\u0000\u00a2\u0094\u0001\u0000\u0000\u0000"+ + "\u00a2\u0095\u0001\u0000\u0000\u0000\u00a2\u0096\u0001\u0000\u0000\u0000"+ + "\u00a2\u0097\u0001\u0000\u0000\u0000\u00a2\u0098\u0001\u0000\u0000\u0000"+ + "\u00a2\u0099\u0001\u0000\u0000\u0000\u00a2\u009a\u0001\u0000\u0000\u0000"+ + "\u00a2\u009b\u0001\u0000\u0000\u0000\u00a2\u009c\u0001\u0000\u0000\u0000"+ + "\u00a2\u009d\u0001\u0000\u0000\u0000\u00a2\u009e\u0001\u0000\u0000\u0000"+ + "\u00a2\u00a0\u0001\u0000\u0000\u0000\u00a3\u0007\u0001\u0000\u0000\u0000"+ + "\u00a4\u00a5\u0005\u0010\u0000\u0000\u00a5\u00a6\u0003\n\u0005\u0000\u00a6"+ + "\t\u0001\u0000\u0000\u0000\u00a7\u00a8\u0006\u0005\uffff\uffff\u0000\u00a8"+ + "\u00a9\u0005,\u0000\u0000\u00a9\u00c6\u0003\n\u0005\b\u00aa\u00c6\u0003"+ + "\u0010\b\u0000\u00ab\u00c6\u0003\f\u0006\u0000\u00ac\u00ae\u0003\u0010"+ + "\b\u0000\u00ad\u00af\u0005,\u0000\u0000\u00ae\u00ad\u0001\u0000\u0000"+ + "\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000\u0000"+ + "\u0000\u00b0\u00b1\u0005\'\u0000\u0000\u00b1\u00b2\u0005+\u0000\u0000"+ + "\u00b2\u00b7\u0003\u0010\b\u0000\u00b3\u00b4\u0005\"\u0000\u0000\u00b4"+ + "\u00b6\u0003\u0010\b\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b9"+ + "\u0001\u0000\u0000\u0000\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b7\u00b8"+ + "\u0001\u0000\u0000\u0000\u00b8\u00ba\u0001\u0000\u0000\u0000\u00b9\u00b7"+ + "\u0001\u0000\u0000\u0000\u00ba\u00bb\u00052\u0000\u0000\u00bb\u00c6\u0001"+ + "\u0000\u0000\u0000\u00bc\u00bd\u0003\u0010\b\u0000\u00bd\u00bf\u0005("+ + "\u0000\u0000\u00be\u00c0\u0005,\u0000\u0000\u00bf\u00be\u0001\u0000\u0000"+ + "\u0000\u00bf\u00c0\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000"+ + "\u0000\u00c1\u00c2\u0005-\u0000\u0000\u00c2\u00c6\u0001\u0000\u0000\u0000"+ + "\u00c3\u00c4\u0004\u0005\u0004\u0000\u00c4\u00c6\u0003\u000e\u0007\u0000"+ + "\u00c5\u00a7\u0001\u0000\u0000\u0000\u00c5\u00aa\u0001\u0000\u0000\u0000"+ + "\u00c5\u00ab\u0001\u0000\u0000\u0000\u00c5\u00ac\u0001\u0000\u0000\u0000"+ + "\u00c5\u00bc\u0001\u0000\u0000\u0000\u00c5\u00c3\u0001\u0000\u0000\u0000"+ + "\u00c6\u00cf\u0001\u0000\u0000\u0000\u00c7\u00c8\n\u0005\u0000\u0000\u00c8"+ + "\u00c9\u0005\u001e\u0000\u0000\u00c9\u00ce\u0003\n\u0005\u0006\u00ca\u00cb"+ + "\n\u0004\u0000\u0000\u00cb\u00cc\u0005/\u0000\u0000\u00cc\u00ce\u0003"+ + "\n\u0005\u0005\u00cd\u00c7\u0001\u0000\u0000\u0000\u00cd\u00ca\u0001\u0000"+ + "\u0000\u0000\u00ce\u00d1\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000"+ + "\u0000\u0000\u00cf\u00d0\u0001\u0000\u0000\u0000\u00d0\u000b\u0001\u0000"+ + "\u0000\u0000\u00d1\u00cf\u0001\u0000\u0000\u0000\u00d2\u00d4\u0003\u0010"+ + "\b\u0000\u00d3\u00d5\u0005,\u0000\u0000\u00d4\u00d3\u0001\u0000\u0000"+ + "\u0000\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000"+ + "\u0000\u00d6\u00d7\u0005*\u0000\u0000\u00d7\u00d8\u0003j5\u0000\u00d8"+ + "\u00e1\u0001\u0000\u0000\u0000\u00d9\u00db\u0003\u0010\b\u0000\u00da\u00dc"+ + "\u0005,\u0000\u0000\u00db\u00da\u0001\u0000\u0000\u0000\u00db\u00dc\u0001"+ + "\u0000\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de\u0005"+ + "1\u0000\u0000\u00de\u00df\u0003j5\u0000\u00df\u00e1\u0001\u0000\u0000"+ + "\u0000\u00e0\u00d2\u0001\u0000\u0000\u0000\u00e0\u00d9\u0001\u0000\u0000"+ + "\u0000\u00e1\r\u0001\u0000\u0000\u0000\u00e2\u00e3\u0003:\u001d\u0000"+ + "\u00e3\u00e4\u0005\u0018\u0000\u0000\u00e4\u00e5\u0003D\"\u0000\u00e5"+ + "\u000f\u0001\u0000\u0000\u0000\u00e6\u00ec\u0003\u0012\t\u0000\u00e7\u00e8"+ + "\u0003\u0012\t\u0000\u00e8\u00e9\u0003l6\u0000\u00e9\u00ea\u0003\u0012"+ + "\t\u0000\u00ea\u00ec\u0001\u0000\u0000\u0000\u00eb\u00e6\u0001\u0000\u0000"+ + "\u0000\u00eb\u00e7\u0001\u0000\u0000\u0000\u00ec\u0011\u0001\u0000\u0000"+ + "\u0000\u00ed\u00ee\u0006\t\uffff\uffff\u0000\u00ee\u00f2\u0003\u0014\n"+ + "\u0000\u00ef\u00f0\u0007\u0000\u0000\u0000\u00f0\u00f2\u0003\u0012\t\u0003"+ + "\u00f1\u00ed\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000"+ + "\u00f2\u00fb\u0001\u0000\u0000\u0000\u00f3\u00f4\n\u0002\u0000\u0000\u00f4"+ + "\u00f5\u0007\u0001\u0000\u0000\u00f5\u00fa\u0003\u0012\t\u0003\u00f6\u00f7"+ + "\n\u0001\u0000\u0000\u00f7\u00f8\u0007\u0000\u0000\u0000\u00f8\u00fa\u0003"+ + "\u0012\t\u0002\u00f9\u00f3\u0001\u0000\u0000\u0000\u00f9\u00f6\u0001\u0000"+ + "\u0000\u0000\u00fa\u00fd\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000"+ + "\u0000\u0000\u00fb\u00fc\u0001\u0000\u0000\u0000\u00fc\u0013\u0001\u0000"+ + "\u0000\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fe\u00ff\u0006\n\uffff"+ + "\uffff\u0000\u00ff\u0107\u0003D\"\u0000\u0100\u0107\u0003:\u001d\u0000"+ + "\u0101\u0107\u0003\u0016\u000b\u0000\u0102\u0103\u0005+\u0000\u0000\u0103"+ + "\u0104\u0003\n\u0005\u0000\u0104\u0105\u00052\u0000\u0000\u0105\u0107"+ + "\u0001\u0000\u0000\u0000\u0106\u00fe\u0001\u0000\u0000\u0000\u0106\u0100"+ + "\u0001\u0000\u0000\u0000\u0106\u0101\u0001\u0000\u0000\u0000\u0106\u0102"+ + "\u0001\u0000\u0000\u0000\u0107\u010d\u0001\u0000\u0000\u0000\u0108\u0109"+ + "\n\u0001\u0000\u0000\u0109\u010a\u0005!\u0000\u0000\u010a\u010c\u0003"+ + "\u001a\r\u0000\u010b\u0108\u0001\u0000\u0000\u0000\u010c\u010f\u0001\u0000"+ + "\u0000\u0000\u010d\u010b\u0001\u0000\u0000\u0000\u010d\u010e\u0001\u0000"+ + "\u0000\u0000\u010e\u0015\u0001\u0000\u0000\u0000\u010f\u010d\u0001\u0000"+ + "\u0000\u0000\u0110\u0111\u0003\u0018\f\u0000\u0111\u011b\u0005+\u0000"+ + "\u0000\u0112\u011c\u0005=\u0000\u0000\u0113\u0118\u0003\n\u0005\u0000"+ + "\u0114\u0115\u0005\"\u0000\u0000\u0115\u0117\u0003\n\u0005\u0000\u0116"+ + "\u0114\u0001\u0000\u0000\u0000\u0117\u011a\u0001\u0000\u0000\u0000\u0118"+ + "\u0116\u0001\u0000\u0000\u0000\u0118\u0119\u0001\u0000\u0000\u0000\u0119"+ + "\u011c\u0001\u0000\u0000\u0000\u011a\u0118\u0001\u0000\u0000\u0000\u011b"+ + "\u0112\u0001\u0000\u0000\u0000\u011b\u0113\u0001\u0000\u0000\u0000\u011b"+ + "\u011c\u0001\u0000\u0000\u0000\u011c\u011d\u0001\u0000\u0000\u0000\u011d"+ + "\u011e\u00052\u0000\u0000\u011e\u0017\u0001\u0000\u0000\u0000\u011f\u0120"+ + "\u0003H$\u0000\u0120\u0019\u0001\u0000\u0000\u0000\u0121\u0122\u0003@"+ + " \u0000\u0122\u001b\u0001\u0000\u0000\u0000\u0123\u0124\u0005\f\u0000"+ + "\u0000\u0124\u0125\u0003\u001e\u000f\u0000\u0125\u001d\u0001\u0000\u0000"+ + "\u0000\u0126\u012b\u0003 \u0010\u0000\u0127\u0128\u0005\"\u0000\u0000"+ + "\u0128\u012a\u0003 \u0010\u0000\u0129\u0127\u0001\u0000\u0000\u0000\u012a"+ + "\u012d\u0001\u0000\u0000\u0000\u012b\u0129\u0001\u0000\u0000\u0000\u012b"+ + "\u012c\u0001\u0000\u0000\u0000\u012c\u001f\u0001\u0000\u0000\u0000\u012d"+ + "\u012b\u0001\u0000\u0000\u0000\u012e\u012f\u0003:\u001d\u0000\u012f\u0130"+ + "\u0005 \u0000\u0000\u0130\u0132\u0001\u0000\u0000\u0000\u0131\u012e\u0001"+ + "\u0000\u0000\u0000\u0131\u0132\u0001\u0000\u0000\u0000\u0132\u0133\u0001"+ + "\u0000\u0000\u0000\u0133\u0134\u0003\n\u0005\u0000\u0134!\u0001\u0000"+ + "\u0000\u0000\u0135\u0136\u0005\u0006\u0000\u0000\u0136\u013b\u0003$\u0012"+ + "\u0000\u0137\u0138\u0005\"\u0000\u0000\u0138\u013a\u0003$\u0012\u0000"+ + "\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000\u0000"+ + "\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000\u0000\u0000"+ + "\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000"+ + "\u013e\u0140\u0003*\u0015\u0000\u013f\u013e\u0001\u0000\u0000\u0000\u013f"+ + "\u0140\u0001\u0000\u0000\u0000\u0140#\u0001\u0000\u0000\u0000\u0141\u0142"+ + "\u0003&\u0013\u0000\u0142\u0143\u0005\u0018\u0000\u0000\u0143\u0145\u0001"+ + "\u0000\u0000\u0000\u0144\u0141\u0001\u0000\u0000\u0000\u0144\u0145\u0001"+ + "\u0000\u0000\u0000\u0145\u0146\u0001\u0000\u0000\u0000\u0146\u0147\u0003"+ + "(\u0014\u0000\u0147%\u0001\u0000\u0000\u0000\u0148\u0149\u0005L\u0000"+ + "\u0000\u0149\'\u0001\u0000\u0000\u0000\u014a\u014b\u0007\u0002\u0000\u0000"+ + "\u014b)\u0001\u0000\u0000\u0000\u014c\u014f\u0003,\u0016\u0000\u014d\u014f"+ + "\u0003.\u0017\u0000\u014e\u014c\u0001\u0000\u0000\u0000\u014e\u014d\u0001"+ + "\u0000\u0000\u0000\u014f+\u0001\u0000\u0000\u0000\u0150\u0151\u0005K\u0000"+ + "\u0000\u0151\u0156\u0005L\u0000\u0000\u0152\u0153\u0005\"\u0000\u0000"+ + "\u0153\u0155\u0005L\u0000\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0155"+ + "\u0158\u0001\u0000\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0156"+ + "\u0157\u0001\u0000\u0000\u0000\u0157-\u0001\u0000\u0000\u0000\u0158\u0156"+ + "\u0001\u0000\u0000\u0000\u0159\u015a\u0005A\u0000\u0000\u015a\u015b\u0003"+ + ",\u0016\u0000\u015b\u015c\u0005B\u0000\u0000\u015c/\u0001\u0000\u0000"+ + "\u0000\u015d\u015e\u0005\u0013\u0000\u0000\u015e\u0163\u0003$\u0012\u0000"+ + "\u015f\u0160\u0005\"\u0000\u0000\u0160\u0162\u0003$\u0012\u0000\u0161"+ + "\u015f\u0001\u0000\u0000\u0000\u0162\u0165\u0001\u0000\u0000\u0000\u0163"+ + "\u0161\u0001\u0000\u0000\u0000\u0163\u0164\u0001\u0000\u0000\u0000\u0164"+ + "\u0167\u0001\u0000\u0000\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0166"+ + "\u0168\u00036\u001b\u0000\u0167\u0166\u0001\u0000\u0000\u0000\u0167\u0168"+ + "\u0001\u0000\u0000\u0000\u0168\u016b\u0001\u0000\u0000\u0000\u0169\u016a"+ + "\u0005\u001d\u0000\u0000\u016a\u016c\u0003\u001e\u000f\u0000\u016b\u0169"+ + "\u0001\u0000\u0000\u0000\u016b\u016c\u0001\u0000\u0000\u0000\u016c1\u0001"+ + "\u0000\u0000\u0000\u016d\u016e\u0005\u0004\u0000\u0000\u016e\u016f\u0003"+ + "\u001e\u000f\u0000\u016f3\u0001\u0000\u0000\u0000\u0170\u0172\u0005\u000f"+ + "\u0000\u0000\u0171\u0173\u00036\u001b\u0000\u0172\u0171\u0001\u0000\u0000"+ + "\u0000\u0172\u0173\u0001\u0000\u0000\u0000\u0173\u0176\u0001\u0000\u0000"+ + "\u0000\u0174\u0175\u0005\u001d\u0000\u0000\u0175\u0177\u0003\u001e\u000f"+ + "\u0000\u0176\u0174\u0001\u0000\u0000\u0000\u0176\u0177\u0001\u0000\u0000"+ + "\u0000\u01775\u0001\u0000\u0000\u0000\u0178\u017d\u00038\u001c\u0000\u0179"+ + "\u017a\u0005\"\u0000\u0000\u017a\u017c\u00038\u001c\u0000\u017b\u0179"+ + "\u0001\u0000\u0000\u0000\u017c\u017f\u0001\u0000\u0000\u0000\u017d\u017b"+ + "\u0001\u0000\u0000\u0000\u017d\u017e\u0001\u0000\u0000\u0000\u017e7\u0001"+ + "\u0000\u0000\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u0180\u0183\u0003"+ + " \u0010\u0000\u0181\u0182\u0005\u0010\u0000\u0000\u0182\u0184\u0003\n"+ + "\u0005\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0183\u0184\u0001\u0000"+ + "\u0000\u0000\u01849\u0001\u0000\u0000\u0000\u0185\u018a\u0003H$\u0000"+ + "\u0186\u0187\u0005$\u0000\u0000\u0187\u0189\u0003H$\u0000\u0188\u0186"+ + "\u0001\u0000\u0000\u0000\u0189\u018c\u0001\u0000\u0000\u0000\u018a\u0188"+ + "\u0001\u0000\u0000\u0000\u018a\u018b\u0001\u0000\u0000\u0000\u018b;\u0001"+ + "\u0000\u0000\u0000\u018c\u018a\u0001\u0000\u0000\u0000\u018d\u0192\u0003"+ + "B!\u0000\u018e\u018f\u0005$\u0000\u0000\u018f\u0191\u0003B!\u0000\u0190"+ + "\u018e\u0001\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192"+ + "\u0190\u0001\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193"+ + "=\u0001\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u019a"+ + "\u0003<\u001e\u0000\u0196\u0197\u0005\"\u0000\u0000\u0197\u0199\u0003"+ + "<\u001e\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0199\u019c\u0001\u0000"+ + "\u0000\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019a\u019b\u0001\u0000"+ + "\u0000\u0000\u019b?\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000"+ + "\u0000\u019d\u019e\u0007\u0003\u0000\u0000\u019eA\u0001\u0000\u0000\u0000"+ + "\u019f\u01a3\u0005P\u0000\u0000\u01a0\u01a1\u0004!\n\u0000\u01a1\u01a3"+ + "\u0003F#\u0000\u01a2\u019f\u0001\u0000\u0000\u0000\u01a2\u01a0\u0001\u0000"+ + "\u0000\u0000\u01a3C\u0001\u0000\u0000\u0000\u01a4\u01cf\u0005-\u0000\u0000"+ + "\u01a5\u01a6\u0003h4\u0000\u01a6\u01a7\u0005C\u0000\u0000\u01a7\u01cf"+ + "\u0001\u0000\u0000\u0000\u01a8\u01cf\u0003f3\u0000\u01a9\u01cf\u0003h"+ + "4\u0000\u01aa\u01cf\u0003b1\u0000\u01ab\u01cf\u0003F#\u0000\u01ac\u01cf"+ + "\u0003j5\u0000\u01ad\u01ae\u0005A\u0000\u0000\u01ae\u01b3\u0003d2\u0000"+ + "\u01af\u01b0\u0005\"\u0000\u0000\u01b0\u01b2\u0003d2\u0000\u01b1\u01af"+ + "\u0001\u0000\u0000\u0000\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1"+ + "\u0001\u0000\u0000\u0000\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4\u01b6"+ + "\u0001\u0000\u0000\u0000\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7"+ + "\u0005B\u0000\u0000\u01b7\u01cf\u0001\u0000\u0000\u0000\u01b8\u01b9\u0005"+ + "A\u0000\u0000\u01b9\u01be\u0003b1\u0000\u01ba\u01bb\u0005\"\u0000\u0000"+ + "\u01bb\u01bd\u0003b1\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bd\u01c0"+ + "\u0001\u0000\u0000\u0000\u01be\u01bc\u0001\u0000\u0000\u0000\u01be\u01bf"+ + "\u0001\u0000\u0000\u0000\u01bf\u01c1\u0001\u0000\u0000\u0000\u01c0\u01be"+ + "\u0001\u0000\u0000\u0000\u01c1\u01c2\u0005B\u0000\u0000\u01c2\u01cf\u0001"+ + "\u0000\u0000\u0000\u01c3\u01c4\u0005A\u0000\u0000\u01c4\u01c9\u0003j5"+ + "\u0000\u01c5\u01c6\u0005\"\u0000\u0000\u01c6\u01c8\u0003j5\u0000\u01c7"+ + "\u01c5\u0001\u0000\u0000\u0000\u01c8\u01cb\u0001\u0000\u0000\u0000\u01c9"+ + "\u01c7\u0001\u0000\u0000\u0000\u01c9\u01ca\u0001\u0000\u0000\u0000\u01ca"+ + "\u01cc\u0001\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cc"+ + "\u01cd\u0005B\u0000\u0000\u01cd\u01cf\u0001\u0000\u0000\u0000\u01ce\u01a4"+ + "\u0001\u0000\u0000\u0000\u01ce\u01a5\u0001\u0000\u0000\u0000\u01ce\u01a8"+ + "\u0001\u0000\u0000\u0000\u01ce\u01a9\u0001\u0000\u0000\u0000\u01ce\u01aa"+ + "\u0001\u0000\u0000\u0000\u01ce\u01ab\u0001\u0000\u0000\u0000\u01ce\u01ac"+ + "\u0001\u0000\u0000\u0000\u01ce\u01ad\u0001\u0000\u0000\u0000\u01ce\u01b8"+ + "\u0001\u0000\u0000\u0000\u01ce\u01c3\u0001\u0000\u0000\u0000\u01cfE\u0001"+ + "\u0000\u0000\u0000\u01d0\u01d3\u00050\u0000\u0000\u01d1\u01d3\u0005@\u0000"+ + "\u0000\u01d2\u01d0\u0001\u0000\u0000\u0000\u01d2\u01d1\u0001\u0000\u0000"+ + "\u0000\u01d3G\u0001\u0000\u0000\u0000\u01d4\u01d8\u0003@ \u0000\u01d5"+ + "\u01d6\u0004$\u000b\u0000\u01d6\u01d8\u0003F#\u0000\u01d7\u01d4\u0001"+ + "\u0000\u0000\u0000\u01d7\u01d5\u0001\u0000\u0000\u0000\u01d8I\u0001\u0000"+ + "\u0000\u0000\u01d9\u01da\u0005\t\u0000\u0000\u01da\u01db\u0005\u001b\u0000"+ + "\u0000\u01dbK\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005\u000e\u0000\u0000"+ + "\u01dd\u01e2\u0003N\'\u0000\u01de\u01df\u0005\"\u0000\u0000\u01df\u01e1"+ + "\u0003N\'\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e4\u0001"+ + "\u0000\u0000\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3\u0001"+ + "\u0000\u0000\u0000\u01e3M\u0001\u0000\u0000\u0000\u01e4\u01e2\u0001\u0000"+ + "\u0000\u0000\u01e5\u01e7\u0003\n\u0005\u0000\u01e6\u01e8\u0007\u0004\u0000"+ + "\u0000\u01e7\u01e6\u0001\u0000\u0000\u0000\u01e7\u01e8\u0001\u0000\u0000"+ + "\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005.\u0000\u0000"+ + "\u01ea\u01ec\u0007\u0005\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000"+ + "\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ecO\u0001\u0000\u0000\u0000\u01ed"+ + "\u01ee\u0005\b\u0000\u0000\u01ee\u01ef\u0003>\u001f\u0000\u01efQ\u0001"+ + "\u0000\u0000\u0000\u01f0\u01f1\u0005\u0002\u0000\u0000\u01f1\u01f2\u0003"+ + ">\u001f\u0000\u01f2S\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u000b\u0000"+ + "\u0000\u01f4\u01f9\u0003V+\u0000\u01f5\u01f6\u0005\"\u0000\u0000\u01f6"+ + "\u01f8\u0003V+\u0000\u01f7\u01f5\u0001\u0000\u0000\u0000\u01f8\u01fb\u0001"+ + "\u0000\u0000\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000\u01f9\u01fa\u0001"+ + "\u0000\u0000\u0000\u01faU\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000"+ + "\u0000\u0000\u01fc\u01fd\u0003<\u001e\u0000\u01fd\u01fe\u0005T\u0000\u0000"+ + "\u01fe\u01ff\u0003<\u001e\u0000\u01ffW\u0001\u0000\u0000\u0000\u0200\u0201"+ + "\u0005\u0001\u0000\u0000\u0201\u0202\u0003\u0014\n\u0000\u0202\u0204\u0003"+ + "j5\u0000\u0203\u0205\u0003^/\u0000\u0204\u0203\u0001\u0000\u0000\u0000"+ + "\u0204\u0205\u0001\u0000\u0000\u0000\u0205Y\u0001\u0000\u0000\u0000\u0206"+ + "\u0207\u0005\u0007\u0000\u0000\u0207\u0208\u0003\u0014\n\u0000\u0208\u0209"+ + "\u0003j5\u0000\u0209[\u0001\u0000\u0000\u0000\u020a\u020b\u0005\n\u0000"+ + "\u0000\u020b\u020c\u0003:\u001d\u0000\u020c]\u0001\u0000\u0000\u0000\u020d"+ + "\u0212\u0003`0\u0000\u020e\u020f\u0005\"\u0000\u0000\u020f\u0211\u0003"+ + "`0\u0000\u0210\u020e\u0001\u0000\u0000\u0000\u0211\u0214\u0001\u0000\u0000"+ + "\u0000\u0212\u0210\u0001\u0000\u0000\u0000\u0212\u0213\u0001\u0000\u0000"+ + "\u0000\u0213_\u0001\u0000\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000"+ + "\u0215\u0216\u0003@ \u0000\u0216\u0217\u0005 \u0000\u0000\u0217\u0218"+ + "\u0003D\"\u0000\u0218a\u0001\u0000\u0000\u0000\u0219\u021a\u0007\u0006"+ + "\u0000\u0000\u021ac\u0001\u0000\u0000\u0000\u021b\u021e\u0003f3\u0000"+ + "\u021c\u021e\u0003h4\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021d\u021c"+ + "\u0001\u0000\u0000\u0000\u021ee\u0001\u0000\u0000\u0000\u021f\u0221\u0007"+ + "\u0000\u0000\u0000\u0220\u021f\u0001\u0000\u0000\u0000\u0220\u0221\u0001"+ + "\u0000\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000\u0222\u0223\u0005"+ + "\u001c\u0000\u0000\u0223g\u0001\u0000\u0000\u0000\u0224\u0226\u0007\u0000"+ + "\u0000\u0000\u0225\u0224\u0001\u0000\u0000\u0000\u0225\u0226\u0001\u0000"+ + "\u0000\u0000\u0226\u0227\u0001\u0000\u0000\u0000\u0227\u0228\u0005\u001b"+ + "\u0000\u0000\u0228i\u0001\u0000\u0000\u0000\u0229\u022a\u0005\u001a\u0000"+ + "\u0000\u022ak\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0007\u0000\u0000"+ + "\u022cm\u0001\u0000\u0000\u0000\u022d\u022e\u0005\u0005\u0000\u0000\u022e"+ + "\u022f\u0003p8\u0000\u022fo\u0001\u0000\u0000\u0000\u0230\u0231\u0005"+ + "A\u0000\u0000\u0231\u0232\u0003\u0002\u0001\u0000\u0232\u0233\u0005B\u0000"+ + "\u0000\u0233q\u0001\u0000\u0000\u0000\u0234\u0235\u0005\r\u0000\u0000"+ + "\u0235\u0236\u0005d\u0000\u0000\u0236s\u0001\u0000\u0000\u0000\u0237\u0238"+ + "\u0005\u0003\u0000\u0000\u0238\u023b\u0005Z\u0000\u0000\u0239\u023a\u0005"+ + "X\u0000\u0000\u023a\u023c\u0003<\u001e\u0000\u023b\u0239\u0001\u0000\u0000"+ + "\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023c\u0246\u0001\u0000\u0000"+ + "\u0000\u023d\u023e\u0005Y\u0000\u0000\u023e\u0243\u0003v;\u0000\u023f"+ + "\u0240\u0005\"\u0000\u0000\u0240\u0242\u0003v;\u0000\u0241\u023f\u0001"+ + "\u0000\u0000\u0000\u0242\u0245\u0001\u0000\u0000\u0000\u0243\u0241\u0001"+ + "\u0000\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0247\u0001"+ + "\u0000\u0000\u0000\u0245\u0243\u0001\u0000\u0000\u0000\u0246\u023d\u0001"+ + "\u0000\u0000\u0000\u0246\u0247\u0001\u0000\u0000\u0000\u0247u\u0001\u0000"+ + "\u0000\u0000\u0248\u0249\u0003<\u001e\u0000\u0249\u024a\u0005 \u0000\u0000"+ + "\u024a\u024c\u0001\u0000\u0000\u0000\u024b\u0248\u0001\u0000\u0000\u0000"+ + "\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000"+ + "\u024d\u024e\u0003<\u001e\u0000\u024ew\u0001\u0000\u0000\u0000\u024f\u0250"+ + "\u0005\u0012\u0000\u0000\u0250\u0251\u0003$\u0012\u0000\u0251\u0252\u0005"+ + "X\u0000\u0000\u0252\u0253\u0003>\u001f\u0000\u0253y\u0001\u0000\u0000"+ + "\u0000\u0254\u0255\u0005\u0011\u0000\u0000\u0255\u0258\u00036\u001b\u0000"+ + "\u0256\u0257\u0005\u001d\u0000\u0000\u0257\u0259\u0003\u001e\u000f\u0000"+ + "\u0258\u0256\u0001\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000\u0000"+ + "\u0259{\u0001\u0000\u0000\u0000:\u0087\u0090\u00a2\u00ae\u00b7\u00bf\u00c5"+ + "\u00cd\u00cf\u00d4\u00db\u00e0\u00eb\u00f1\u00f9\u00fb\u0106\u010d\u0118"+ + "\u011b\u012b\u0131\u013b\u013f\u0144\u014e\u0156\u0163\u0167\u016b\u0172"+ + "\u0176\u017d\u0183\u018a\u0192\u019a\u01a2\u01b3\u01be\u01c9\u01ce\u01d2"+ + "\u01d7\u01e2\u01e7\u01eb\u01f9\u0204\u0212\u021d\u0220\u0225\u023b\u0243"+ + "\u0246\u024b\u0258"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 5903d725bf9c2..c428a2c6411a1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -15,7 +15,6 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.Build; import org.elasticsearch.common.Strings; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; @@ -27,7 +26,6 @@ import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -46,6 +44,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionResolutionStrategy; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; @@ -614,9 +613,6 @@ public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionConte @Override public String visitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { - if (ctx.MATCH() != null) { - return ctx.MATCH().getText(); - } return visitIdentifierOrParameter(ctx.identifierOrParameter()); } @@ -815,7 +811,7 @@ public List visitGrouping(EsqlBaseParser.FieldsContext ctx) { } // wrap when necessary - no alias and no underlying attribute if (ne == null) { - ne = new Alias(source(ctx), name, value); + ne = new Alias(source(field), name, value); } list.add(ne); } @@ -927,14 +923,6 @@ String unresolvedAttributeNameInParam(ParserRuleContext ctx, Expression param) { @Override public Expression visitMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpressionContext ctx) { - if (Build.current().isSnapshot() == false) { - throw new ParsingException(source(ctx), "MATCH operator currently requires a snapshot build"); - } - return new MatchQueryPredicate( - source(ctx), - expression(ctx.valueExpression()), - visitString(ctx.queryString).fold().toString(), - null - ); + return new Match(source(ctx), expression(ctx.fieldExp), expression(ctx.queryString)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 2c8604a7c4a80..12dc77e6e7c59 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -87,7 +87,6 @@ public final class EsqlExpressionTranslators { new ExpressionTranslators.Nots(), new ExpressionTranslators.Likes(), new ExpressionTranslators.StringQueries(), - new ExpressionTranslators.Matches(), new ExpressionTranslators.MultiMatches(), new MatchFunctionTranslator(), new QueryStringFunctionTranslator(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index 7fb998e82001e..c9c292769b570 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -74,8 +74,6 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; -import static org.elasticsearch.xpack.esql.core.type.DataType.SEMANTIC_TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; @@ -366,13 +364,8 @@ public static DataType commonType(DataType left, DataType right) { } } if (isString(left) && isString(right)) { - if (left == SEMANTIC_TEXT || right == SEMANTIC_TEXT) { - return KEYWORD; - } - if (left == TEXT || right == TEXT) { - return TEXT; - } - return right; + // Both TEXT and SEMANTIC_TEXT are processed as KEYWORD + return KEYWORD; } if (left.isNumeric() && right.isNumeric()) { int lsize = left.estimatedSize().orElseThrow(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 4bf02d947c1e0..348ca4acd100e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -238,7 +238,7 @@ public final void test() throws Throwable { assumeFalse("enrich can't load fields in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.ENRICH_LOAD))); assumeFalse( "can't use match in csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_OPERATOR.capabilityName()) + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.capabilityName()) ); assumeFalse("can't load metrics in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METRICS_SYNTAX))); assumeFalse( @@ -496,7 +496,7 @@ private void assertWarnings(List warnings) { normalized.add(normW); } } - EsqlTestUtils.assertWarnings(normalized, testCase.expectedWarnings(), testCase.expectedWarningsRegex()); + testCase.assertWarnings(false).assertWarnings(normalized); } PlanRunner planRunner(BigArrays bigArrays, TestPhysicalOperationProviders physicalOperationProviders) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 8674fb5f6c7c9..c1b2adddfc838 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.parser.ParsingException; @@ -1117,21 +1118,24 @@ public void testDateTruncOnInt() { verifyUnsupported(""" from test | eval date_trunc(1 month, int) - """, "second argument of [date_trunc(1 month, int)] must be [datetime], found value [int] type [integer]"); + """, "second argument of [date_trunc(1 month, int)] must be [date_nanos or datetime], found value [int] type [integer]"); } public void testDateTruncOnFloat() { verifyUnsupported(""" from test | eval date_trunc(1 month, float) - """, "second argument of [date_trunc(1 month, float)] must be [datetime], found value [float] type [double]"); + """, "second argument of [date_trunc(1 month, float)] must be [date_nanos or datetime], found value [float] type [double]"); } public void testDateTruncOnText() { - verifyUnsupported(""" - from test - | eval date_trunc(1 month, keyword) - """, "second argument of [date_trunc(1 month, keyword)] must be [datetime], found value [keyword] type [keyword]"); + verifyUnsupported( + """ + from test + | eval date_trunc(1 month, keyword) + """, + "second argument of [date_trunc(1 month, keyword)] must be [date_nanos or datetime], found value [keyword] type [keyword]" + ); } public void testDateTruncWithNumericInterval() { @@ -2316,6 +2320,27 @@ public void testInvalidNamedParamsForIdentifierPatterns() { ); } + public void testFromEnrichAndMatchColonUsage() { + assumeTrue("Match operator is available just for snapshots", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); + + LogicalPlan plan = analyze(""" + from *:test + | EVAL x = to_string(languages) + | ENRICH _any:languages ON x + | WHERE first_name: "Anna" + """, "mapping-default.json"); + var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); + var match = as(filter.condition(), Match.class); + var enrich = as(filter.child(), Enrich.class); + assertEquals(enrich.mode(), Enrich.Mode.ANY); + assertEquals(enrich.policy().getMatchField(), "language_code"); + var eval = as(enrich.child(), Eval.class); + var esRelation = as(eval.child(), EsRelation.class); + assertEquals(esRelation.index().name(), "test"); + + } + private void verifyUnsupported(String query, String errorMessage) { verifyUnsupported(query, errorMessage, "mapping-multi-field-variation.json"); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 15aed0b45f02f..d6cda4a3a9ff7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -394,6 +394,19 @@ public void testAggFilterOnBucketingOrAggFunctions() { assertEquals("1:60: Unknown column [m]", error("from test | stats m = max(languages), min(languages) WHERE m + 2 > 1 by emp_no")); } + public void testAggWithNonBooleanFilter() { + for (String filter : List.of("\"true\"", "1", "1 + 0", "concat(\"a\", \"b\")")) { + String type = (filter.equals("1") || filter.equals("1 + 0")) ? "INTEGER" : "KEYWORD"; + assertEquals("1:19: Condition expression needs to be boolean, found [" + type + "]", error("from test | where " + filter)); + for (String by : List.of("", " by languages", " by bucket(salary, 10)")) { + assertEquals( + "1:34: Condition expression needs to be boolean, found [" + type + "]", + error("from test | stats count(*) where " + filter + by) + ); + } + } + } + public void testGroupingInsideAggsAsAgg() { assertEquals( "1:18: can only use grouping function [bucket(emp_no, 5.)] part of the BY clause", @@ -523,13 +536,12 @@ public void testGroupingAlias() throws Exception { public void testGroupingAliasDuplicate() throws Exception { assertEquals( - "1:22: column [languages] cannot be used as an aggregate " - + "once declared in the STATS BY grouping key [l = languages % 3, l = languages, l = languages % 2]", + "1:22: column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [l = languages % 3]", error("from test| stats l = languages + 3 by l = languages % 3, l = languages, l = languages % 2 | keep l") ); assertEquals( - "1:22: column [languages] cannot be used as an aggregate " + "once declared in the STATS BY grouping key [l = languages % 3]", + "1:22: column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [l = languages % 3]", error("from test| stats l = languages + 3, l = languages % 2 by l = languages % 3 | keep l") ); @@ -1112,43 +1124,51 @@ public void testWeightedAvg() { public void testMatchInsideEval() throws Exception { assumeTrue("Match operator is available just for snapshots", Build.current().isSnapshot()); - assertEquals("1:36: EVAL does not support MATCH expressions", error("row title = \"brown fox\" | eval x = title match \"fox\" ")); + assertEquals( + "1:36: [:] operator is only supported in WHERE commands", + error("row title = \"brown fox\" | eval x = title:\"fox\" ") + ); } public void testMatchFilter() throws Exception { - assumeTrue("Match operator is available just for snapshots", Build.current().isSnapshot()); - - assertEquals( - "1:63: MATCH requires a mapped index field, found [name]", - error("from test | eval name = concat(first_name, last_name) | where name match \"Anna\"") - ); + assumeTrue("Match operator is available just for snapshots", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals( - "1:19: MATCH requires a text or keyword field, but [salary] has type [integer]", - error("from test | where salary match \"100\"") + "1:19: first argument of [salary:\"100\"] must be [string], found value [salary] type [integer]", + error("from test | where salary:\"100\"") ); assertEquals( - "1:19: Invalid condition using MATCH", - error("from test | where first_name match \"Anna\" or starts_with(first_name, \"Anne\")") + "1:19: Invalid condition [first_name:\"Anna\" or starts_with(first_name, \"Anne\")]. " + + "[:] operator can't be used as part of an or condition", + error("from test | where first_name:\"Anna\" or starts_with(first_name, \"Anne\")") ); assertEquals( - "1:51: Invalid condition using MATCH", - error("from test | eval new_salary = salary + 10 | where first_name match \"Anna\" OR new_salary > 100") + "1:51: Invalid condition [first_name:\"Anna\" OR new_salary > 100]. " + "[:] operator can't be used as part of an or condition", + error("from test | eval new_salary = salary + 10 | where first_name:\"Anna\" OR new_salary > 100") ); + } + public void testMatchFunctionNotAllowedAfterCommands() throws Exception { assertEquals( - "1:45: MATCH requires a mapped index field, found [fn]", - error("from test | rename first_name as fn | where fn match \"Anna\"") + "1:24: [MATCH] function cannot be used after LIMIT", + error("from test | limit 10 | where match(first_name, \"Anna\")") ); } - public void testMatchFunctionNotAllowedAfterCommands() throws Exception { + public void testMatchFunctionAndOperatorHaveCorrectErrorMessages() throws Exception { + assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals( "1:24: [MATCH] function cannot be used after LIMIT", error("from test | limit 10 | where match(first_name, \"Anna\")") ); + assertEquals( + "1:24: [MATCH] function cannot be used after LIMIT", + error("from test | limit 10 | where match ( first_name, \"Anna\" ) ") + ); + assertEquals("1:24: [:] operator cannot be used after LIMIT", error("from test | limit 10 | where first_name:\"Anna\"")); + assertEquals("1:24: [:] operator cannot be used after LIMIT", error("from test | limit 10 | where first_name : \"Anna\"")); } public void testQueryStringFunctionsNotAllowedAfterCommands() throws Exception { @@ -1211,26 +1231,40 @@ public void testQueryStringFunctionsNotAllowedAfterCommands() throws Exception { public void testQueryStringFunctionOnlyAllowedInWhere() throws Exception { assertEquals("1:9: [QSTR] function is only supported in WHERE commands", error("row a = qstr(\"Anna\")")); - checkFullTextFunctionsOnlyAllowedInWhere("QSTR", "qstr(\"Anna\")"); + checkFullTextFunctionsOnlyAllowedInWhere("QSTR", "qstr(\"Anna\")", "function"); } public void testMatchFunctionOnlyAllowedInWhere() throws Exception { - checkFullTextFunctionsOnlyAllowedInWhere("MATCH", "match(first_name, \"Anna\")"); + checkFullTextFunctionsOnlyAllowedInWhere("MATCH", "match(first_name, \"Anna\")", "function"); } - private void checkFullTextFunctionsOnlyAllowedInWhere(String functionName, String functionInvocation) throws Exception { + public void testMatchOperatornOnlyAllowedInWhere() throws Exception { + assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); + checkFullTextFunctionsOnlyAllowedInWhere(":", "first_name:\"Anna\"", "operator"); + } + + private void checkFullTextFunctionsOnlyAllowedInWhere(String functionName, String functionInvocation, String functionType) + throws Exception { assertEquals( - "1:22: [" + functionName + "] function is only supported in WHERE commands", + "1:22: [" + functionName + "] " + functionType + " is only supported in WHERE commands", error("from test | eval y = " + functionInvocation) ); assertEquals( - "1:18: [" + functionName + "] function is only supported in WHERE commands", + "1:18: [" + functionName + "] " + functionType + " is only supported in WHERE commands", error("from test | sort " + functionInvocation + " asc") ); assertEquals( - "1:23: [" + functionName + "] function is only supported in WHERE commands", + "1:23: [" + functionName + "] " + functionType + " is only supported in WHERE commands", error("from test | STATS c = " + functionInvocation + " BY first_name") ); + assertEquals( + "1:50: [" + functionName + "] " + functionType + " is only supported in WHERE commands", + error("from test | stats max_salary = max(salary) where " + functionInvocation) + ); + assertEquals( + "1:47: [" + functionName + "] " + functionType + " is only supported in WHERE commands", + error("from test | stats max_salary = max(salary) by " + functionInvocation) + ); } public void testQueryStringFunctionArgNotNullOrConstant() throws Exception { @@ -1243,18 +1277,27 @@ public void testQueryStringFunctionArgNotNullOrConstant() throws Exception { } public void testQueryStringWithDisjunctions() { - checkWithDisjunctions("QSTR", "qstr(\"first_name: Anna\")"); + checkWithDisjunctions("QSTR", "qstr(\"first_name: Anna\")", "function"); } - public void testMatchWithDisjunctions() { - checkWithDisjunctions("MATCH", "match(first_name, \"Anna\")"); + public void testMatchFunctionWithDisjunctions() { + checkWithDisjunctions("MATCH", "match(first_name, \"Anna\")", "function"); } - private void checkWithDisjunctions(String functionName, String functionInvocation) { + public void testMatchOperatorWithDisjunctions() { + assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); + + checkWithDisjunctions(":", "first_name : \"Anna\"", "operator"); + } + + private void checkWithDisjunctions(String functionName, String functionInvocation, String functionType) { assertEquals( LoggerMessageFormat.format( null, - "1:19: Invalid condition [{} or length(first_name) > 12]. " + "Function {} can't be used as part of an or condition", + "1:19: Invalid condition [{} or length(first_name) > 12]. " + + "[{}] " + + functionType + + " can't be used as part of an or condition", functionInvocation, functionName ), @@ -1264,7 +1307,9 @@ private void checkWithDisjunctions(String functionName, String functionInvocatio LoggerMessageFormat.format( null, "1:19: Invalid condition [({} and first_name is not null) or (length(first_name) > 12 and first_name is null)]. " - + "Function {} can't be used as part of an or condition", + + "[{}] " + + functionType + + " can't be used as part of an or condition", functionInvocation, functionName ), @@ -1278,7 +1323,9 @@ private void checkWithDisjunctions(String functionName, String functionInvocatio LoggerMessageFormat.format( null, "1:19: Invalid condition [({} and first_name is not null) or first_name is null]. " - + "Function {} can't be used as part of an or condition", + + "[{}] " + + functionType + + " can't be used as part of an or condition", functionInvocation, functionName ), @@ -1287,29 +1334,71 @@ private void checkWithDisjunctions(String functionName, String functionInvocatio } public void testQueryStringFunctionWithNonBooleanFunctions() { - checkFullTextFunctionsWithNonBooleanFunctions("QSTR", "qstr(\"first_name: Anna\")"); + checkFullTextFunctionsWithNonBooleanFunctions("QSTR", "qstr(\"first_name: Anna\")", "function"); } public void testMatchFunctionWithNonBooleanFunctions() { - checkFullTextFunctionsWithNonBooleanFunctions("MATCH", "match(first_name, \"Anna\")"); + checkFullTextFunctionsWithNonBooleanFunctions("MATCH", "match(first_name, \"Anna\")", "function"); } - private void checkFullTextFunctionsWithNonBooleanFunctions(String functionName, String functionInvocation) { - assertEquals( - "1:19: Invalid condition [" + functionInvocation + " is not null]. Function " + functionName + " can't be used with ISNOTNULL", - error("from test | where " + functionInvocation + " is not null") - ); + public void testMatchOperatorWithNonBooleanFunctions() { + assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); + checkFullTextFunctionsWithNonBooleanFunctions(":", "first_name:\"Anna\"", "operator"); + } + + private void checkFullTextFunctionsWithNonBooleanFunctions(String functionName, String functionInvocation, String functionType) { + if (functionType.equals("operator") == false) { + // The following tests are only possible for functions from a parsing perspective + assertEquals( + "1:19: Invalid condition [" + + functionInvocation + + " is not null]. [" + + functionName + + "] " + + functionType + + " can't be used with ISNOTNULL", + error("from test | where " + functionInvocation + " is not null") + ); + assertEquals( + "1:19: Invalid condition [" + + functionInvocation + + " is null]. [" + + functionName + + "] " + + functionType + + " can't be used with ISNULL", + error("from test | where " + functionInvocation + " is null") + ); + assertEquals( + "1:19: Invalid condition [" + + functionInvocation + + " in (\"hello\", \"world\")]. [" + + functionName + + "] " + + functionType + + " can't be used with IN", + error("from test | where " + functionInvocation + " in (\"hello\", \"world\")") + ); + } assertEquals( - "1:19: Invalid condition [" + functionInvocation + " is null]. Function " + functionName + " can't be used with ISNULL", - error("from test | where " + functionInvocation + " is null") + "1:19: Invalid condition [coalesce(" + + functionInvocation + + ", " + + functionInvocation + + ")]. [" + + functionName + + "] " + + functionType + + " can't be used with COALESCE", + error("from test | where coalesce(" + functionInvocation + ", " + functionInvocation + ")") ); assertEquals( - "1:19: Invalid condition [" + "1:19: argument of [concat(" + functionInvocation - + " in (\"hello\", \"world\")]. Function " - + functionName - + " can't be used with IN", - error("from test | where " + functionInvocation + " in (\"hello\", \"world\")") + + ", \"a\")] must be [string], found value [" + + functionInvocation + + "] type [boolean]", + error("from test | where concat(" + functionInvocation + ", \"a\")") ); } @@ -1331,6 +1420,12 @@ public void testMatchFunctionCurrentlyUnsupportedBehaviour() throws Exception { "1:68: Unknown column [first_name]", error("from test | stats max_salary = max(salary) by emp_no | where match(first_name, \"Anna\")") ); + + assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); + assertEquals( + "1:62: Unknown column [first_name]", + error("from test | stats max_salary = max(salary) by emp_no | where first_name : \"Anna\"") + ); } public void testMatchFunctionNullArgs() throws Exception { @@ -1344,8 +1439,11 @@ public void testMatchFunctionNullArgs() throws Exception { ); } - public void testMatchFunctionTargetsExistingField() throws Exception { + public void testMatchTargetsExistingField() throws Exception { assertEquals("1:39: Unknown column [first_name]", error("from test | keep emp_no | where match(first_name, \"Anna\")")); + + assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); + assertEquals("1:33: Unknown column [first_name]", error("from test | keep emp_no | where first_name : \"Anna\"")); } public void testCoalesceWithMixedNumericTypes() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index 967b4d854c325..6d0c45a972299 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -18,8 +18,8 @@ import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.hamcrest.Matcher; +import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Set; @@ -36,58 +36,94 @@ public MatchTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { - Set supported = Set.of(DataType.KEYWORD, DataType.TEXT); - List> supportedPerPosition = List.of(supported, supported); + Set supportedTextParams = Set.of(DataType.KEYWORD, DataType.TEXT); + Set supportedNumericParams = Set.of(DataType.DOUBLE, DataType.INTEGER); + Set supportedFuzzinessParams = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.TEXT); + List> supportedPerPosition = List.of( + supportedTextParams, + supportedTextParams, + supportedNumericParams, + supportedFuzzinessParams + ); List suppliers = new LinkedList<>(); for (DataType fieldType : DataType.stringTypes()) { for (DataType queryType : DataType.stringTypes()) { - suppliers.add( - new TestCaseSupplier( - "<" + fieldType + "-ES field, " + queryType + ">", - List.of(fieldType, queryType), - () -> testCase(fieldType, randomIdentifier(), queryType, randomAlphaOfLengthBetween(1, 10), equalTo(true)) - ) - ); - suppliers.add( - new TestCaseSupplier( - "<" + fieldType + "-non ES field, " + queryType + ">", - List.of(fieldType, queryType), - typeErrorSupplier(true, supportedPerPosition, List.of(fieldType, queryType), MatchTests::matchTypeErrorSupplier) - ) - ); + addPositiveTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); + addNonFieldTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); } } - List errorsSuppliers = errorsForCasesWithoutExamples(suppliers, (v, p) -> "string"); + + List suppliersWithErrors = errorsForCasesWithoutExamples(suppliers, (v, p) -> "string"); + // Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests - return parameterSuppliersFromTypedData(errorsSuppliers.stream().filter(s -> s.types().contains(DataType.NULL) == false).toList()); + return parameterSuppliersFromTypedData( + suppliersWithErrors.stream().filter(s -> s.types().contains(DataType.NULL) == false).toList() + ); } - private static String matchTypeErrorSupplier(boolean includeOrdinal, List> validPerPosition, List types) { - return "[] cannot operate on [" + types.getFirst().typeName() + "], which is not a field from an index mapping"; + private static void addPositiveTestCase( + List paramDataTypes, + List> supportedPerPosition, + List suppliers + ) { + + // Positive case - creates an ES field from the field parameter type + suppliers.add( + new TestCaseSupplier( + getTestCaseName(paramDataTypes, "-ES field"), + paramDataTypes, + () -> new TestCaseSupplier.TestCase( + getTestParams(paramDataTypes), + "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", + DataType.BOOLEAN, + equalTo(true) + ) + ) + ); } - private static TestCaseSupplier.TestCase testCase( - DataType fieldType, - String field, - DataType queryType, - String query, - Matcher matcher + private static void addNonFieldTestCase( + List paramDataTypes, + List> supportedPerPosition, + List suppliers ) { - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData( - new FieldExpression(field, List.of(new FieldExpression.FieldValue(field))), - fieldType, - "field" - ), - new TestCaseSupplier.TypedData(new BytesRef(query), queryType, "query") - ), - "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", - DataType.BOOLEAN, - matcher + // Negative case - use directly the field parameter type + suppliers.add( + new TestCaseSupplier( + getTestCaseName(paramDataTypes, "-non ES field"), + paramDataTypes, + typeErrorSupplier(true, supportedPerPosition, paramDataTypes, MatchTests::matchTypeErrorSupplier) + ) ); } + private static List getTestParams(List paramDataTypes) { + String fieldName = randomIdentifier(); + List params = new ArrayList<>(); + params.add( + new TestCaseSupplier.TypedData( + new FieldExpression(fieldName, List.of(new FieldExpression.FieldValue(fieldName))), + paramDataTypes.get(0), + "field" + ) + ); + params.add(new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), paramDataTypes.get(1), "query")); + return params; + } + + private static String getTestCaseName(List paramDataTypes, String fieldType) { + StringBuilder sb = new StringBuilder(); + sb.append("<"); + sb.append(paramDataTypes.get(0)).append(fieldType).append(", "); + sb.append(paramDataTypes.get(1)); + sb.append(">"); + return sb.toString(); + } + + private static String matchTypeErrorSupplier(boolean includeOrdinal, List> validPerPosition, List types) { + return "[] cannot operate on [" + types.getFirst().typeName() + "], which is not a field from an index mapping"; + } + @Override protected Expression build(Source source, List args) { return new Match(source, args.get(0), args.get(1)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java index a26504b8ced9a..7e7d91cdf76f4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java @@ -87,7 +87,8 @@ private static void dateCases(List suppliers, String name, Lon args.add(dateBound("to", toType, "2023-03-01T09:00:00.00Z")); return new TestCaseSupplier.TestCase( args, - "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", + "DateTruncDatetimeEvaluator[fieldVal=Attribute[channel=0], " + + "rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", DataType.DATETIME, resultsMatcher(args) ); @@ -101,7 +102,7 @@ private static void dateCases(List suppliers, String name, Lon args.add(dateBound("to", toType, "2023-02-17T12:00:00Z")); return new TestCaseSupplier.TestCase( args, - "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[3600000 in Z][fixed]]", + "DateTruncDatetimeEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[3600000 in Z][fixed]]", DataType.DATETIME, equalTo(Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).build().prepareForUnknown().round(date.getAsLong())) ); @@ -134,7 +135,7 @@ private static void dateCasesWithSpan( args.add(new TestCaseSupplier.TypedData(span, spanType, "buckets").forceLiteral()); return new TestCaseSupplier.TestCase( args, - "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding" + spanStr + "]", + "DateTruncDatetimeEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding" + spanStr + "]", DataType.DATETIME, resultsMatcher(args) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncRoundingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncRoundingTests.java index 5af5c8e493177..b5e89fc41f368 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncRoundingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncRoundingTests.java @@ -15,7 +15,7 @@ import java.time.Period; import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.createRounding; -import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.process; +import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.processDatetime; import static org.hamcrest.Matchers.containsString; /** @@ -97,10 +97,13 @@ public void testCreateRoundingNullInterval() { public void testDateTruncFunction() { long ts = toMillis("2023-02-17T10:25:33.38Z"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> process(ts, createRounding(Period.ofDays(-1)))); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> processDatetime(ts, createRounding(Period.ofDays(-1))) + ); assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); - e = expectThrows(IllegalArgumentException.class, () -> process(ts, createRounding(Duration.ofHours(-1)))); + e = expectThrows(IllegalArgumentException.class, () -> processDatetime(ts, createRounding(Duration.ofHours(-1)))); assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java index 0e4968cc2a504..2403900064645 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -20,6 +21,7 @@ import java.time.Duration; import java.time.Instant; import java.time.Period; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -37,56 +39,84 @@ public DateTruncTests(@Name("TestCase") Supplier test @ParametersFactory public static Iterable parameters() { long ts = toMillis("2023-02-17T10:25:33.38Z"); - List suppliers = List.of( - ofDatePeriod(Period.ofDays(1), ts, "2023-02-17T00:00:00.00Z"), - ofDatePeriod(Period.ofMonths(1), ts, "2023-02-01T00:00:00.00Z"), - ofDatePeriod(Period.ofYears(1), ts, "2023-01-01T00:00:00.00Z"), - ofDatePeriod(Period.ofDays(10), ts, "2023-02-12T00:00:00.00Z"), - // 7 days period should return weekly rounding - ofDatePeriod(Period.ofDays(7), ts, "2023-02-13T00:00:00.00Z"), - // 3 months period should return quarterly - ofDatePeriod(Period.ofMonths(3), ts, "2023-01-01T00:00:00.00Z"), - ofDuration(Duration.ofHours(1), ts, "2023-02-17T10:00:00.00Z"), - ofDuration(Duration.ofMinutes(1), ts, "2023-02-17T10:25:00.00Z"), - ofDuration(Duration.ofSeconds(1), ts, "2023-02-17T10:25:33.00Z"), - ofDuration(Duration.ofHours(3), ts, "2023-02-17T09:00:00.00Z"), - ofDuration(Duration.ofMinutes(15), ts, "2023-02-17T10:15:00.00Z"), - ofDuration(Duration.ofSeconds(30), ts, "2023-02-17T10:25:30.00Z"), - randomSecond() - ); + List suppliers = new ArrayList<>(); + suppliers.addAll(ofDatePeriod(Period.ofDays(1), ts, "2023-02-17T00:00:00.00Z")); + suppliers.addAll(ofDatePeriod(Period.ofMonths(1), ts, "2023-02-01T00:00:00.00Z")); + suppliers.addAll(ofDatePeriod(Period.ofYears(1), ts, "2023-01-01T00:00:00.00Z")); + suppliers.addAll(ofDatePeriod(Period.ofDays(10), ts, "2023-02-12T00:00:00.00Z")); + // 7 days period should return weekly rounding + suppliers.addAll(ofDatePeriod(Period.ofDays(7), ts, "2023-02-13T00:00:00.00Z")); + // 3 months period should return quarterly + suppliers.addAll(ofDatePeriod(Period.ofMonths(3), ts, "2023-01-01T00:00:00.00Z")); + suppliers.addAll(ofDuration(Duration.ofHours(1), ts, "2023-02-17T10:00:00.00Z")); + suppliers.addAll(ofDuration(Duration.ofMinutes(1), ts, "2023-02-17T10:25:00.00Z")); + suppliers.addAll(ofDuration(Duration.ofSeconds(1), ts, "2023-02-17T10:25:33.00Z")); + suppliers.addAll(ofDuration(Duration.ofHours(3), ts, "2023-02-17T09:00:00.00Z")); + suppliers.addAll(ofDuration(Duration.ofMinutes(15), ts, "2023-02-17T10:15:00.00Z")); + suppliers.addAll(ofDuration(Duration.ofSeconds(30), ts, "2023-02-17T10:25:30.00Z")); + suppliers.add(randomSecond()); + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> switch (p) { case 0 -> "dateperiod or timeduration"; - case 1 -> "datetime"; + case 1 -> "date_nanos or datetime"; default -> null; }); } - private static TestCaseSupplier ofDatePeriod(Period period, long value, String expectedDate) { - return new TestCaseSupplier( - List.of(DataType.DATE_PERIOD, DataType.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(period, DataType.DATE_PERIOD, "interval").forceLiteral(), - new TestCaseSupplier.TypedData(value, DataType.DATETIME, "date") - ), - Matchers.startsWith("DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), - DataType.DATETIME, - equalTo(toMillis(expectedDate)) + private static List ofDatePeriod(Period period, long value, String expectedDate) { + return List.of( + new TestCaseSupplier( + List.of(DataType.DATE_PERIOD, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(period, DataType.DATE_PERIOD, "interval").forceLiteral(), + new TestCaseSupplier.TypedData(value, DataType.DATETIME, "date") + ), + Matchers.startsWith("DateTruncDatetimeEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), + DataType.DATETIME, + equalTo(toMillis(expectedDate)) + ) + ), + new TestCaseSupplier( + List.of(DataType.DATE_PERIOD, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(period, DataType.DATE_PERIOD, "interval").forceLiteral(), + new TestCaseSupplier.TypedData(DateUtils.toNanoSeconds(value), DataType.DATE_NANOS, "date") + ), + Matchers.startsWith("DateTruncDateNanosEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), + DataType.DATE_NANOS, + equalTo(toNanos(expectedDate)) + ) ) ); } - private static TestCaseSupplier ofDuration(Duration duration, long value, String expectedDate) { - return new TestCaseSupplier( - List.of(DataType.TIME_DURATION, DataType.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(duration, DataType.TIME_DURATION, "interval").forceLiteral(), - new TestCaseSupplier.TypedData(value, DataType.DATETIME, "date") - ), - Matchers.startsWith("DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), - DataType.DATETIME, - equalTo(toMillis(expectedDate)) + private static List ofDuration(Duration duration, long value, String expectedDate) { + return List.of( + new TestCaseSupplier( + List.of(DataType.TIME_DURATION, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(duration, DataType.TIME_DURATION, "interval").forceLiteral(), + new TestCaseSupplier.TypedData(value, DataType.DATETIME, "date") + ), + Matchers.startsWith("DateTruncDatetimeEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), + DataType.DATETIME, + equalTo(toMillis(expectedDate)) + ) + ), + new TestCaseSupplier( + List.of(DataType.TIME_DURATION, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(duration, DataType.TIME_DURATION, "interval").forceLiteral(), + new TestCaseSupplier.TypedData(DateUtils.toNanoSeconds(value), DataType.DATE_NANOS, "date") + ), + Matchers.startsWith("DateTruncDateNanosEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), + DataType.DATE_NANOS, + equalTo(toNanos(expectedDate)) + ) ) ); } @@ -109,7 +139,7 @@ private static TestCaseSupplier randomSecond() { new TestCaseSupplier.TypedData(Duration.ofSeconds(1), DataType.TIME_DURATION, "interval"), new TestCaseSupplier.TypedData(toMillis(dateFragment + ".38Z"), DataType.DATETIME, "date") ), - "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + "DateTruncDatetimeEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", DataType.DATETIME, equalTo(toMillis(dateFragment + ".00Z")) ); @@ -124,6 +154,10 @@ private static long toMillis(String timestamp) { return Instant.parse(timestamp).toEpochMilli(); } + private static long toNanos(String timestamp) { + return DateUtils.toLong(Instant.parse(timestamp)); + } + @Override protected Expression build(Source source, List args) { return new DateTrunc(source, args.get(0), args.get(1)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthSerializationTests.java new file mode 100644 index 0000000000000..2564ac0bdb1cf --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthSerializationTests.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests; + +public class BitLengthSerializationTests extends AbstractUnaryScalarSerializationTests { + @Override + protected BitLength create(Source source, Expression child) { + return new BitLength(source, child); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthTests.java new file mode 100644 index 0000000000000..bce4328a08abf --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/BitLengthTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class BitLengthTests extends AbstractScalarFunctionTestCase { + + public BitLengthTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + + for (DataType stringType : DataType.stringTypes()) { + for (var supplier : TestCaseSupplier.stringCases(stringType)) { + suppliers.add(makeSupplier(supplier)); + } + } + + return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers, (v, p) -> "string"); + } + + @Override + protected Expression build(Source source, List args) { + return new BitLength(source, args.get(0)); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(fieldSupplier.name(), List.of(fieldSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + String evaluatorToString = "BitLengthEvaluator[val=Attribute[channel=0]]"; + BytesRef value = BytesRefs.toBytesRef(fieldTypedData.data()); + var expectedValue = value.length * Byte.SIZE; + + return new TestCaseSupplier.TestCase(List.of(fieldTypedData), evaluatorToString, DataType.INTEGER, equalTo(expectedValue)); + }); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 3436502610d62..905ca190ebe79 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.search.IndexSearcher; -import org.elasticsearch.Build; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; @@ -26,6 +25,7 @@ import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -1091,11 +1091,11 @@ public void testMissingFieldsDoNotGetExtracted() { * estimatedRowSize[324] */ public void testSingleMatchFilterPushdown() { - assumeTrue("Match operator is available just for snapshots", Build.current().isSnapshot()); + assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); var plan = plannerOptimizer.plan(""" from test - | where first_name match "Anna" + | where first_name:"Anna" """); var limit = as(plan, LimitExec.class); @@ -1123,15 +1123,15 @@ public void testSingleMatchFilterPushdown() { * [_doc{f}#22], limit[1000], sort[[FieldSort[field=emp_no{f}#12, direction=ASC, nulls=LAST]]] estimatedRowSize[336] */ public void testMultipleMatchFilterPushdown() { - assumeTrue("Match operator is available just for snapshots", Build.current().isSnapshot()); + assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); String query = """ from test - | where first_name match "Anna" OR first_name match "Anneke" + | where first_name:"Anna" and first_name:"Anneke" | sort emp_no | where emp_no > 10000 | eval description = concat("emp_no: ", to_str(emp_no), ", name: ", first_name, " ", last_name) - | where last_name match "Xinglin" + | where last_name:"Xinglin" """; var plan = plannerOptimizer.plan(query); @@ -1143,9 +1143,8 @@ public void testMultipleMatchFilterPushdown() { var actualLuceneQuery = as(fieldExtract.child(), EsQueryExec.class).query(); Source filterSource = new Source(4, 8, "emp_no > 10000"); - var expectedLuceneQuery = new BoolQueryBuilder().must( - new BoolQueryBuilder().should(new MatchQueryBuilder("first_name", "Anna")).should(new MatchQueryBuilder("first_name", "Anneke")) - ) + var expectedLuceneQuery = new BoolQueryBuilder().must(new MatchQueryBuilder("first_name", "Anna")) + .must(new MatchQueryBuilder("first_name", "Anneke")) .must(wrapWithSingleQuery(query, QueryBuilders.rangeQuery("emp_no").gt(10000), "emp_no", filterSource)) .must(new MatchQueryBuilder("last_name", "Xinglin")); assertThat(actualLuceneQuery.toString(), is(expectedLuceneQuery.toString())); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index b022f955fd458..fdc4935d457e9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -38,7 +38,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -491,6 +490,11 @@ public void testRemoveOverridesInAggregate() throws Exception { var alias = as(aggregates.get(0), Alias.class); var max = as(alias.child(), Max.class); assertThat(Expressions.name(max.arguments().get(0)), equalTo("emp_no")); + assertWarnings( + "No limit defined, adding default limit of [1000]", + "Line 2:28: Field 'x' shadowed by field at line 2:45", + "Line 2:9: Field 'x' shadowed by field at line 2:45" + ); } // expected stats b by b (grouping overrides the rest of the aggs) @@ -513,6 +517,11 @@ public void testAggsWithOverridingInputAndGrouping() throws Exception { var aggregates = agg.aggregates(); assertThat(aggregates, hasSize(1)); assertThat(Expressions.names(aggregates), contains("b")); + assertWarnings( + "No limit defined, adding default limit of [1000]", + "Line 2:28: Field 'b' shadowed by field at line 2:47", + "Line 2:9: Field 'b' shadowed by field at line 2:47" + ); } /** @@ -5792,7 +5801,7 @@ public void testMatchWithNonIndexedColumnCurrentlyUnsupported() { from test | eval initial = substring(first_name, 1) | where match(initial, "A")""")); assertTrue(e.getMessage().startsWith("Found ")); assertEquals( - "1:67: [MATCH] cannot operate on [initial], which is not a field from an index mapping", + "1:67: [MATCH] function cannot operate on [initial], which is not a field from an index mapping", e.getMessage().substring(header.length()) ); @@ -5800,7 +5809,7 @@ public void testMatchWithNonIndexedColumnCurrentlyUnsupported() { from test | eval text=concat(first_name, last_name) | where match(text, "cat")""")); assertTrue(e.getMessage().startsWith("Found ")); assertEquals( - "1:67: [MATCH] cannot operate on [text], which is not a field from an index mapping", + "1:67: [MATCH] function cannot operate on [text], which is not a field from an index mapping", e.getMessage().substring(header.length()) ); } @@ -5813,11 +5822,7 @@ public void testMatchFunctionIsNotNullable() { VerificationException ve = expectThrows(VerificationException.class, () -> plan(queryText)); assertThat( ve.getMessage(), - containsString("[MATCH] cannot operate on [text::keyword], which is not a field from an index mapping") + containsString("[MATCH] function cannot operate on [text::keyword], which is not a field from an index mapping") ); } - - private Literal nullOf(DataType dataType) { - return new Literal(Source.EMPTY, null, dataType); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 3b59a1d176a98..9f5d6440e4a06 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -4984,8 +4984,101 @@ public void testPushTopNDistanceWithCompoundFilterToSource() { } /** - * This test shows that with an additional EVAL used in the filter, we can no longer push down the SORT distance. - * TODO: This could be optimized in future work. Consider moving much of EnableSpatialDistancePushdown into logical planning. + * Tests that multiple sorts, including distance and a field, are pushed down to the source. + * + * ProjectExec[[abbrev{f}#25, name{f}#26, location{f}#29, country{f}#30, city{f}#31, scalerank{f}#27, scale{r}#7]] + * \_TopNExec[[ + * Order[distance{r}#4,ASC,LAST], + * Order[scalerank{f}#27,ASC,LAST], + * Order[scale{r}#7,DESC,FIRST], + * Order[loc{r}#10,DESC,FIRST] + * ],5[INTEGER],0] + * \_ExchangeExec[[abbrev{f}#25, name{f}#26, location{f}#29, country{f}#30, city{f}#31, scalerank{f}#27, scale{r}#7, + * distance{r}#4, loc{r}#10],false] + * \_ProjectExec[[abbrev{f}#25, name{f}#26, location{f}#29, country{f}#30, city{f}#31, scalerank{f}#27, scale{r}#7, + * distance{r}#4, loc{r}#10]] + * \_FieldExtractExec[abbrev{f}#25, name{f}#26, country{f}#30, city{f}#31][] + * \_EvalExec[[ + * STDISTANCE(location{f}#29,[1 1 0 0 0 e1 7a 14 ae 47 21 29 40 a0 1a 2f dd 24 d6 4b 40][GEO_POINT]) AS distance, + * 10[INTEGER] - scalerank{f}#27 AS scale, TOSTRING(location{f}#29) AS loc + * ]] + * \_FieldExtractExec[location{f}#29, scalerank{f}#27][] + * \_EsQueryExec[airports], indexMode[standard], query[{ + * "bool":{ + * "filter":[ + * {"esql_single_value":{"field":"scalerank","next":{...},"source":"scalerank < 6@3:31"}}, + * {"bool":{ + * "must":[ + * {"geo_shape":{"location":{"relation":"INTERSECTS","shape":{...}}}}, + * {"geo_shape":{"location":{"relation":"DISJOINT","shape":{...}}}} + * ],"boost":1.0}}],"boost":1.0}}][_doc{f}#44], limit[5], sort[[ + * GeoDistanceSort[field=location{f}#29, direction=ASC, lat=55.673, lon=12.565], + * FieldSort[field=scalerank{f}#27, direction=ASC, nulls=LAST] + * ]] estimatedRowSize[303] + * + */ + public void testPushTopNDistanceAndPushableFieldWithCompoundFilterToSource() { + var optimized = optimizedPlan(physicalPlan(""" + FROM airports + | EVAL distance = ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")), scale = 10 - scalerank, loc = location::string + | WHERE distance < 500000 AND scalerank < 6 AND distance > 10000 + | SORT distance ASC, scalerank ASC, scale DESC, loc DESC + | LIMIT 5 + | KEEP abbrev, name, location, country, city, scalerank, scale + """, airports)); + + var project = as(optimized, ProjectExec.class); + var topN = as(project.child(), TopNExec.class); + assertThat(topN.order().size(), is(4)); + var exchange = asRemoteExchange(topN.child()); + + project = as(exchange.child(), ProjectExec.class); + assertThat( + names(project.projections()), + contains("abbrev", "name", "location", "country", "city", "scalerank", "scale", "distance", "loc") + ); + var extract = as(project.child(), FieldExtractExec.class); + assertThat(names(extract.attributesToExtract()), contains("abbrev", "name", "country", "city")); + var evalExec = as(extract.child(), EvalExec.class); + var alias = as(evalExec.fields().get(0), Alias.class); + assertThat(alias.name(), is("distance")); + var stDistance = as(alias.child(), StDistance.class); + assertThat(stDistance.left().toString(), startsWith("location")); + extract = as(evalExec.child(), FieldExtractExec.class); + assertThat(names(extract.attributesToExtract()), contains("location", "scalerank")); + var source = source(extract.child()); + + // Assert that the TopN(distance) is pushed down as geo-sort(location) + assertThat(source.limit(), is(topN.limit())); + Set orderSet = orderAsSet(topN.order().subList(0, 2)); + Set sortsSet = sortsAsSet(source.sorts(), Map.of("location", "distance")); + assertThat(orderSet, is(sortsSet)); + + // Fine-grained checks on the pushed down sort + assertThat(source.limit(), is(l(5))); + assertThat(source.sorts().size(), is(2)); + EsQueryExec.Sort sort = source.sorts().get(0); + assertThat(sort.direction(), is(Order.OrderDirection.ASC)); + assertThat(name(sort.field()), is("location")); + assertThat(sort.sortBuilder(), isA(GeoDistanceSortBuilder.class)); + sort = source.sorts().get(1); + assertThat(sort.direction(), is(Order.OrderDirection.ASC)); + assertThat(name(sort.field()), is("scalerank")); + assertThat(sort.sortBuilder(), isA(FieldSortBuilder.class)); + + // Fine-grained checks on the pushed down query + var bool = as(source.query(), BoolQueryBuilder.class); + var rangeQueryBuilders = bool.filter().stream().filter(p -> p instanceof SingleValueQuery.Builder).toList(); + assertThat("Expected one range query builder", rangeQueryBuilders.size(), equalTo(1)); + assertThat(((SingleValueQuery.Builder) rangeQueryBuilders.get(0)).field(), equalTo("scalerank")); + var filterBool = bool.filter().stream().filter(p -> p instanceof BoolQueryBuilder).toList(); + var fb = as(filterBool.get(0), BoolQueryBuilder.class); + var shapeQueryBuilders = fb.must().stream().filter(p -> p instanceof SpatialRelatesQuery.ShapeQueryBuilder).toList(); + assertShapeQueryRange(shapeQueryBuilders, 10000.0, 500000.0); + } + + /** + * This test shows that if the filter contains a predicate on the same field that is sorted, we cannot push down the sort. * * ProjectExec[[abbrev{f}#23, name{f}#24, location{f}#27, country{f}#28, city{f}#29, scalerank{f}#25 AS scale]] * \_TopNExec[[Order[distance{r}#4,ASC,LAST], Order[scalerank{f}#25,ASC,LAST]],5[INTEGER],0] @@ -5021,6 +5114,7 @@ public void testPushTopNDistanceAndNonPushableEvalWithCompoundFilterToSource() { var project = as(optimized, ProjectExec.class); var topN = as(project.child(), TopNExec.class); + assertThat(topN.order().size(), is(2)); var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); @@ -5059,7 +5153,7 @@ public void testPushTopNDistanceAndNonPushableEvalWithCompoundFilterToSource() { } /** - * This test further shows that with a non-aliasing function, with the same name, less gets pushed down. + * This test shows that if the filter contains a predicate on the same field that is sorted, we cannot push down the sort. * * ProjectExec[[abbrev{f}#23, name{f}#24, location{f}#27, country{f}#28, city{f}#29, scale{r}#10]] * \_TopNExec[[Order[distance{r}#4,ASC,LAST], Order[scale{r}#10,ASC,LAST]],5[INTEGER],0] @@ -5096,6 +5190,7 @@ public void testPushTopNDistanceAndNonPushableEvalsWithCompoundFilterToSource() """, airports)); var project = as(optimized, ProjectExec.class); var topN = as(project.child(), TopNExec.class); + assertThat(topN.order().size(), is(2)); var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); @@ -5133,7 +5228,8 @@ public void testPushTopNDistanceAndNonPushableEvalsWithCompoundFilterToSource() } /** - * This test shows that with if the top level AND'd predicate contains a non-pushable component, we should not push anything. + * This test shows that with if the top level predicate contains a non-pushable component (eg. disjunction), + * we should not push down the filter. * * ProjectExec[[abbrev{f}#8612, name{f}#8613, location{f}#8616, country{f}#8617, city{f}#8618, scalerank{f}#8614 AS scale]] * \_TopNExec[[Order[distance{r}#8596,ASC,LAST], Order[scalerank{f}#8614,ASC,LAST]],5[INTEGER],0] @@ -5171,6 +5267,7 @@ public void testPushTopNDistanceWithCompoundFilterToSourceAndDisjunctiveNonPusha var project = as(optimized, ProjectExec.class); var topN = as(project.child(), TopNExec.class); + assertThat(topN.order().size(), is(2)); var exchange = asRemoteExchange(topN.child()); project = as(exchange.child(), ProjectExec.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 1e9fc5c281c45..0f46c1f44e8d3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; @@ -2297,4 +2298,41 @@ public void testMetricWithGroupKeyAsAgg() { expectVerificationError(query, "grouping key [a] already specified in the STATS BY clause"); } } + + public void testMatchOperatorConstantQueryString() { + assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); + var plan = statement("FROM test | WHERE field:\"value\""); + var filter = as(plan, Filter.class); + var match = (Match) filter.condition(); + var matchField = (UnresolvedAttribute) match.field(); + assertThat(matchField.name(), equalTo("field")); + assertThat(match.query().fold(), equalTo("value")); + } + + public void testInvalidMatchOperator() { + assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); + expectError("from test | WHERE field:", "line 1:25: mismatched input '' expecting {QUOTED_STRING, "); + expectError( + "from test | WHERE field:CONCAT(\"hello\", \"world\")", + "line 1:25: mismatched input 'CONCAT' expecting {QUOTED_STRING, INTEGER_LITERAL, DECIMAL_LITERAL, " + ); + expectError("from test | WHERE field:123::STRING", "line 1:28: mismatched input '::' expecting {, '|', 'and', 'or'}"); + expectError( + "from test | WHERE field:(true OR false)", + "line 1:25: extraneous input '(' expecting {QUOTED_STRING, INTEGER_LITERAL, DECIMAL_LITERAL, " + ); + expectError( + "from test | WHERE field:another_field_or_value", + "line 1:25: mismatched input 'another_field_or_value' expecting {QUOTED_STRING, INTEGER_LITERAL, DECIMAL_LITERAL, " + ); + expectError("from test | WHERE field:2+3", "line 1:26: mismatched input '+' expecting {, '|', 'and', 'or'}"); + expectError( + "from test | WHERE \"field\":\"value\"", + "line 1:26: mismatched input ':' expecting {, '|', 'and', '::', 'or', '+', '-', '*', '/', '%'}" + ); + expectError( + "from test | WHERE CONCAT(\"field\", 1):\"value\"", + "line 1:37: mismatched input ':' expecting {, '|', 'and', '::', 'or', '+', '-', '*', '/', '%'}" + ); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java index b2228b5543ef2..b30f0870496e3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java @@ -36,10 +36,8 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.OBJECT; import static org.elasticsearch.xpack.esql.core.type.DataType.PARTIAL_AGG; import static org.elasticsearch.xpack.esql.core.type.DataType.SCALED_FLOAT; -import static org.elasticsearch.xpack.esql.core.type.DataType.SEMANTIC_TEXT; import static org.elasticsearch.xpack.esql.core.type.DataType.SHORT; import static org.elasticsearch.xpack.esql.core.type.DataType.SOURCE; -import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; import static org.elasticsearch.xpack.esql.core.type.DataType.TSID_DATA_TYPE; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; @@ -71,10 +69,8 @@ public void testCommonTypeStrings() { } else if ((isString(dataType1) && isString(dataType2))) { if (dataType1 == dataType2) { assertEqualsCommonType(dataType1, dataType2, dataType1); - } else if (dataType1 == SEMANTIC_TEXT || dataType2 == SEMANTIC_TEXT) { - assertEqualsCommonType(dataType1, dataType2, KEYWORD); } else { - assertEqualsCommonType(dataType1, dataType2, TEXT); + assertEqualsCommonType(dataType1, dataType2, KEYWORD); } } else { assertNullCommonType(dataType1, dataType2); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java index 3a774a7a37d93..1fef26989d845 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference; +import org.elasticsearch.client.Request; import org.elasticsearch.inference.TaskType; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; @@ -27,8 +28,15 @@ public class DefaultEndPointsIT extends InferenceBaseRestTest { private TestThreadPool threadPool; @Before - public void createThreadPool() { + public void setupTest() throws IOException { threadPool = new TestThreadPool(DefaultEndPointsIT.class.getSimpleName()); + + Request loggingSettings = new Request("PUT", "_cluster/settings"); + loggingSettings.setJsonEntity(""" + {"persistent" : { + "logger.org.elasticsearch.xpack.ml.packageloader" : "DEBUG" + }}"""); + client().performRequest(loggingSettings); } @After @@ -64,7 +72,7 @@ private static void assertDefaultElserConfig(Map modelConfig) { assertThat( modelConfig.toString(), adaptiveAllocations, - Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 8)) + Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 32)) ); } @@ -99,7 +107,7 @@ private static void assertDefaultE5Config(Map modelConfig) { assertThat( modelConfig.toString(), adaptiveAllocations, - Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 8)) + Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 32)) ); } } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index 8713511c2f5f2..be6b3725b0f35 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -31,7 +31,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.registry.ModelRegistry; @@ -592,7 +591,7 @@ private Model buildElserModelConfig(String inferenceEntityId, TaskType taskType) ElasticsearchInternalService.NAME, ElserInternalServiceSettingsTests.createRandom(), ElserMlNodeTaskSettingsTests.createRandom(), - ChunkingSettingsFeatureFlag.isEnabled() && randomBoolean() ? ChunkingSettingsTests.createRandomChunkingSettings() : null + randomBoolean() ? ChunkingSettingsTests.createRandomChunkingSettings() : null ); default -> throw new IllegalArgumentException("task type " + taskType + " is not supported"); }; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 3ad1e626481bc..f068caff805af 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -101,7 +101,6 @@ import org.elasticsearch.xpack.inference.services.ibmwatsonx.IbmWatsonxService; import org.elasticsearch.xpack.inference.services.mistral.MistralService; import org.elasticsearch.xpack.inference.services.openai.OpenAiService; -import org.elasticsearch.xpack.inference.telemetry.ApmInferenceStats; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; import java.util.ArrayList; @@ -239,7 +238,7 @@ public Collection createComponents(PluginServices services) { shardBulkInferenceActionFilter.set(actionFilter); var meterRegistry = services.telemetryProvider().getMeterRegistry(); - var stats = new PluginComponentBinding<>(InferenceStats.class, ApmInferenceStats.create(meterRegistry)); + var stats = new PluginComponentBinding<>(InferenceStats.class, InferenceStats.create(meterRegistry)); return List.of(modelRegistry, registry, httpClientManager, stats); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index e046e2aad463b..ba9ab3c133731 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -7,12 +7,15 @@ package org.elasticsearch.xpack.inference.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.InferenceServiceResults; @@ -25,20 +28,22 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.common.DelegatingProcessor; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.telemetry.InferenceStats; +import org.elasticsearch.xpack.inference.telemetry.InferenceTimer; -import java.util.Set; import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.modelAttributes; +import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.responseAttributes; public class TransportInferenceAction extends HandledTransportAction { + private static final Logger log = LogManager.getLogger(TransportInferenceAction.class); private static final String STREAMING_INFERENCE_TASK_TYPE = "streaming_inference"; private static final String STREAMING_TASK_ACTION = "xpack/inference/streaming_inference[n]"; - private static final Set> supportsStreaming = Set.of(); - private final ModelRegistry modelRegistry; private final InferenceServiceRegistry serviceRegistry; private final InferenceStats inferenceStats; @@ -62,17 +67,22 @@ public TransportInferenceAction( @Override protected void doExecute(Task task, InferenceAction.Request request, ActionListener listener) { + var timer = InferenceTimer.start(); - ActionListener getModelListener = listener.delegateFailureAndWrap((delegate, unparsedModel) -> { + var getModelListener = ActionListener.wrap((UnparsedModel unparsedModel) -> { var service = serviceRegistry.getService(unparsedModel.service()); if (service.isEmpty()) { - listener.onFailure(unknownServiceException(unparsedModel.service(), request.getInferenceEntityId())); + var e = unknownServiceException(unparsedModel.service(), request.getInferenceEntityId()); + recordMetrics(unparsedModel, timer, e); + listener.onFailure(e); return; } if (request.getTaskType().isAnyOrSame(unparsedModel.taskType()) == false) { // not the wildcard task type and not the model task type - listener.onFailure(incompatibleTaskTypeException(request.getTaskType(), unparsedModel.taskType())); + var e = incompatibleTaskTypeException(request.getTaskType(), unparsedModel.taskType()); + recordMetrics(unparsedModel, timer, e); + listener.onFailure(e); return; } @@ -83,20 +93,69 @@ protected void doExecute(Task task, InferenceAction.Request request, ActionListe unparsedModel.settings(), unparsedModel.secrets() ); - inferOnService(model, request, service.get(), delegate); + inferOnServiceWithMetrics(model, request, service.get(), timer, listener); + }, e -> { + try { + inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(e)); + } catch (Exception metricsException) { + log.atDebug().withThrowable(metricsException).log("Failed to record metrics when the model is missing, dropping metrics"); + } + listener.onFailure(e); }); modelRegistry.getModelWithSecrets(request.getInferenceEntityId(), getModelListener); } - private void inferOnService( + private void recordMetrics(UnparsedModel model, InferenceTimer timer, @Nullable Throwable t) { + try { + inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(model, t)); + } catch (Exception e) { + log.atDebug().withThrowable(e).log("Failed to record metrics with an unparsed model, dropping metrics"); + } + } + + private void inferOnServiceWithMetrics( Model model, InferenceAction.Request request, InferenceService service, + InferenceTimer timer, ActionListener listener + ) { + inferenceStats.requestCount().incrementBy(1, modelAttributes(model)); + inferOnService(model, request, service, ActionListener.wrap(inferenceResults -> { + if (request.isStreaming()) { + var taskProcessor = streamingTaskManager.create(STREAMING_INFERENCE_TASK_TYPE, STREAMING_TASK_ACTION); + inferenceResults.publisher().subscribe(taskProcessor); + + var instrumentedStream = new PublisherWithMetrics(timer, model); + taskProcessor.subscribe(instrumentedStream); + + listener.onResponse(new InferenceAction.Response(inferenceResults, instrumentedStream)); + } else { + recordMetrics(model, timer, null); + listener.onResponse(new InferenceAction.Response(inferenceResults)); + } + }, e -> { + recordMetrics(model, timer, e); + listener.onFailure(e); + })); + } + + private void recordMetrics(Model model, InferenceTimer timer, @Nullable Throwable t) { + try { + inferenceStats.inferenceDuration().record(timer.elapsedMillis(), responseAttributes(model, t)); + } catch (Exception e) { + log.atDebug().withThrowable(e).log("Failed to record metrics with a parsed model, dropping metrics"); + } + } + + private void inferOnService( + Model model, + InferenceAction.Request request, + InferenceService service, + ActionListener listener ) { if (request.isStreaming() == false || service.canStream(request.getTaskType())) { - inferenceStats.incrementRequestCount(model); service.infer( model, request.getQuery(), @@ -105,7 +164,7 @@ private void inferOnService( request.getTaskSettings(), request.getInputType(), request.getInferenceTimeout(), - createListener(request, listener) + listener ); } else { listener.onFailure(unsupportedStreamingTaskException(request, service)); @@ -133,20 +192,6 @@ private ElasticsearchStatusException unsupportedStreamingTaskException(Inference } } - private ActionListener createListener( - InferenceAction.Request request, - ActionListener listener - ) { - if (request.isStreaming()) { - return listener.delegateFailureAndWrap((l, inferenceResults) -> { - var taskProcessor = streamingTaskManager.create(STREAMING_INFERENCE_TASK_TYPE, STREAMING_TASK_ACTION); - inferenceResults.publisher().subscribe(taskProcessor); - l.onResponse(new InferenceAction.Response(inferenceResults, taskProcessor)); - }); - } - return listener.delegateFailureAndWrap((l, inferenceResults) -> l.onResponse(new InferenceAction.Response(inferenceResults))); - } - private static ElasticsearchStatusException unknownServiceException(String service, String inferenceId) { return new ElasticsearchStatusException("Unknown service [{}] for model [{}]. ", RestStatus.BAD_REQUEST, service, inferenceId); } @@ -160,4 +205,37 @@ private static ElasticsearchStatusException incompatibleTaskTypeException(TaskTy ); } + private class PublisherWithMetrics extends DelegatingProcessor { + private final InferenceTimer timer; + private final Model model; + + private PublisherWithMetrics(InferenceTimer timer, Model model) { + this.timer = timer; + this.model = model; + } + + @Override + protected void next(ChunkedToXContent item) { + downstream().onNext(item); + } + + @Override + public void onError(Throwable throwable) { + recordMetrics(model, timer, throwable); + super.onError(throwable); + } + + @Override + protected void onCancel() { + recordMetrics(model, timer, null); + super.onCancel(); + } + + @Override + public void onComplete() { + recordMetrics(model, timer, null); + super.onComplete(); + } + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java index 20520ca829297..2ede1684e315b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java @@ -14,8 +14,14 @@ public class ChunkingSettingsBuilder { public static final SentenceBoundaryChunkingSettings DEFAULT_SETTINGS = new SentenceBoundaryChunkingSettings(250, 1); + // Old settings used for backward compatibility for endpoints created before 8.16 when default was changed + public static final WordBoundaryChunkingSettings OLD_DEFAULT_SETTINGS = new WordBoundaryChunkingSettings(250, 100); public static ChunkingSettings fromMap(Map settings) { + if (settings == null) { + return OLD_DEFAULT_SETTINGS; + } + if (settings.isEmpty()) { return DEFAULT_SETTINGS; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/DelegatingProcessor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/DelegatingProcessor.java index fc2d890dd89e6..03e794e42c3a2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/DelegatingProcessor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/DelegatingProcessor.java @@ -61,11 +61,14 @@ public void request(long n) { public void cancel() { if (isClosed.compareAndSet(false, true) && upstream != null) { upstream.cancel(); + onCancel(); } } }; } + protected void onCancel() {} + @Override public void onSubscribe(Flow.Subscription subscription) { if (upstream != null) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 9e7f8712b4087..ec4b8d9bb4d3d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -209,6 +209,15 @@ public static ElasticsearchStatusException invalidModelTypeForUpdateModelWithEmb ); } + public static ElasticsearchStatusException invalidModelTypeForUpdateModelWithChatCompletionDetails( + Class invalidModelType + ) { + throw new ElasticsearchStatusException( + Strings.format("Can't update chat completion details for model with unexpected type %s", invalidModelType), + RestStatus.BAD_REQUEST + ); + } + public static String missingSettingErrorMsg(String settingName, String scope) { return Strings.format("[%s] does not contain the required setting [%s]", scope, settingName); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java index f1472dda4f86f..c84b4314b9d1a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java @@ -33,7 +33,6 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.inference.configuration.SettingsConfigurationSelectOption; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.alibabacloudsearch.AlibabaCloudSearchActionCreator; @@ -64,6 +63,7 @@ import static org.elasticsearch.xpack.core.inference.action.InferenceAction.Request.DEFAULT_TIMEOUT; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -104,7 +104,7 @@ public void parseRequestConfig( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && List.of(TEXT_EMBEDDING, SPARSE_EMBEDDING).contains(taskType)) { + if (List.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING).contains(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -227,8 +227,8 @@ public AlibabaCloudSearchModel parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && List.of(TEXT_EMBEDDING, SPARSE_EMBEDDING).contains(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (List.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING).contains(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelWithoutLoggingDeprecations( @@ -248,8 +248,8 @@ public AlibabaCloudSearchModel parsePersistedConfig(String inferenceEntityId, Ta Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && List.of(TEXT_EMBEDDING, SPARSE_EMBEDDING).contains(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (List.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING).contains(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelWithoutLoggingDeprecations( @@ -302,21 +302,12 @@ protected void doChunkedInfer( AlibabaCloudSearchModel alibabaCloudSearchModel = (AlibabaCloudSearchModel) model; var actionCreator = new AlibabaCloudSearchActionCreator(getSender(), getServiceComponents()); - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - getEmbeddingTypeFromTaskType(alibabaCloudSearchModel.getTaskType()), - alibabaCloudSearchModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - getEmbeddingTypeFromTaskType(alibabaCloudSearchModel.getTaskType()) - ).batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + getEmbeddingTypeFromTaskType(alibabaCloudSearchModel.getTaskType()), + alibabaCloudSearchModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = alibabaCloudSearchModel.accept(actionCreator, taskSettings, inputType); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java index f42b48ce59a89..f9822c7ab4af9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -33,7 +33,6 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.inference.configuration.SettingsConfigurationSelectOption; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.amazonbedrock.AmazonBedrockActionCreator; @@ -61,6 +60,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -121,18 +121,12 @@ protected void doChunkedInfer( if (model instanceof AmazonBedrockModel baseAmazonBedrockModel) { var maxBatchSize = getEmbeddingsMaxBatchSize(baseAmazonBedrockModel.provider()); - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - maxBatchSize, - EmbeddingRequestChunker.EmbeddingType.FLOAT, - baseAmazonBedrockModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker(inputs.getInputs(), maxBatchSize, EmbeddingRequestChunker.EmbeddingType.FLOAT) - .batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + maxBatchSize, + EmbeddingRequestChunker.EmbeddingType.FLOAT, + baseAmazonBedrockModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = baseAmazonBedrockModel.accept(actionCreator, taskSettings); @@ -160,7 +154,7 @@ public void parseRequestConfig( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -199,8 +193,8 @@ public Model parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModel( @@ -221,8 +215,8 @@ public Model parsePersistedConfig(String modelId, TaskType taskType, Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModel( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java index 89efb1c95a12a..a2f8dc409585e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java @@ -33,7 +33,6 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.inference.configuration.SettingsConfigurationSelectOption; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.azureaistudio.AzureAiStudioActionCreator; @@ -50,6 +49,7 @@ import org.elasticsearch.xpack.inference.services.azureaistudio.embeddings.AzureAiStudioEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.EnumSet; import java.util.HashMap; @@ -60,6 +60,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -113,21 +114,12 @@ protected void doChunkedInfer( if (model instanceof AzureAiStudioModel baseAzureAiStudioModel) { var actionCreator = new AzureAiStudioActionCreator(getSender(), getServiceComponents()); - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT, - baseAzureAiStudioModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT - ).batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.FLOAT, + baseAzureAiStudioModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = baseAzureAiStudioModel.accept(actionCreator, taskSettings); @@ -150,7 +142,7 @@ public void parseRequestConfig( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -189,8 +181,8 @@ public AzureAiStudioModel parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelFromPersistent( @@ -210,8 +202,8 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelFromPersistent( @@ -324,62 +316,52 @@ private AzureAiStudioModel createModelFromPersistent( @Override public void checkModelConfig(Model model, ActionListener listener) { + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); + } + + @Override + public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { if (model instanceof AzureAiStudioEmbeddingsModel embeddingsModel) { - ServiceUtils.getEmbeddingSize( - model, - this, - listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateEmbeddingModelConfig(embeddingsModel, size))) + var serviceSettings = embeddingsModel.getServiceSettings(); + var similarityFromModel = serviceSettings.similarity(); + var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; + + var updatedServiceSettings = new AzureAiStudioEmbeddingsServiceSettings( + serviceSettings.target(), + serviceSettings.provider(), + serviceSettings.endpointType(), + embeddingSize, + serviceSettings.dimensionsSetByUser(), + serviceSettings.maxInputTokens(), + similarityToUse, + serviceSettings.rateLimitSettings() ); - } else if (model instanceof AzureAiStudioChatCompletionModel chatCompletionModel) { - listener.onResponse(updateChatCompletionModelConfig(chatCompletionModel)); + + return new AzureAiStudioEmbeddingsModel(embeddingsModel, updatedServiceSettings); } else { - listener.onResponse(model); + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); } } - private AzureAiStudioEmbeddingsModel updateEmbeddingModelConfig(AzureAiStudioEmbeddingsModel embeddingsModel, int embeddingsSize) { - if (embeddingsModel.getServiceSettings().dimensionsSetByUser() - && embeddingsModel.getServiceSettings().dimensions() != null - && embeddingsModel.getServiceSettings().dimensions() != embeddingsSize) { - throw new ElasticsearchStatusException( - Strings.format( - "The retrieved embeddings size [%s] does not match the size specified in the settings [%s]. " - + "Please recreate the [%s] configuration with the correct dimensions", - embeddingsSize, - embeddingsModel.getServiceSettings().dimensions(), - embeddingsModel.getConfigurations().getInferenceEntityId() - ), - RestStatus.BAD_REQUEST + @Override + public Model updateModelWithChatCompletionDetails(Model model) { + if (model instanceof AzureAiStudioChatCompletionModel chatCompletionModel) { + var taskSettings = chatCompletionModel.getTaskSettings(); + var modelMaxNewTokens = taskSettings.maxNewTokens(); + var maxNewTokensToUse = modelMaxNewTokens == null ? DEFAULT_MAX_NEW_TOKENS : modelMaxNewTokens; + + var updatedTaskSettings = new AzureAiStudioChatCompletionTaskSettings( + taskSettings.temperature(), + taskSettings.topP(), + taskSettings.doSample(), + maxNewTokensToUse ); - } - var similarityFromModel = embeddingsModel.getServiceSettings().similarity(); - var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; - - AzureAiStudioEmbeddingsServiceSettings serviceSettings = new AzureAiStudioEmbeddingsServiceSettings( - embeddingsModel.getServiceSettings().target(), - embeddingsModel.getServiceSettings().provider(), - embeddingsModel.getServiceSettings().endpointType(), - embeddingsSize, - embeddingsModel.getServiceSettings().dimensionsSetByUser(), - embeddingsModel.getServiceSettings().maxInputTokens(), - similarityToUse, - embeddingsModel.getServiceSettings().rateLimitSettings() - ); - - return new AzureAiStudioEmbeddingsModel(embeddingsModel, serviceSettings); - } - - private AzureAiStudioChatCompletionModel updateChatCompletionModelConfig(AzureAiStudioChatCompletionModel chatCompletionModel) { - var modelMaxNewTokens = chatCompletionModel.getTaskSettings().maxNewTokens(); - var maxNewTokensToUse = modelMaxNewTokens == null ? DEFAULT_MAX_NEW_TOKENS : modelMaxNewTokens; - var updatedTaskSettings = new AzureAiStudioChatCompletionTaskSettings( - chatCompletionModel.getTaskSettings().temperature(), - chatCompletionModel.getTaskSettings().topP(), - chatCompletionModel.getTaskSettings().doSample(), - maxNewTokensToUse - ); - return new AzureAiStudioChatCompletionModel(chatCompletionModel, updatedTaskSettings); + return new AzureAiStudioChatCompletionModel(chatCompletionModel, updatedTaskSettings); + } else { + throw ServiceUtils.invalidModelTypeForUpdateModelWithChatCompletionDetails(model.getClass()); + } } private static void checkProviderAndEndpointTypeForTask( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index 6e825355ee74f..6d36e5f6c8fe7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -32,7 +32,6 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationDisplayType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreator; @@ -56,6 +55,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -90,7 +90,7 @@ public void parseRequestConfig( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -188,8 +188,8 @@ public AzureOpenAiModel parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelFromPersistent( @@ -209,8 +209,8 @@ public AzureOpenAiModel parsePersistedConfig(String inferenceEntityId, TaskType Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelFromPersistent( @@ -272,21 +272,12 @@ protected void doChunkedInfer( AzureOpenAiModel azureOpenAiModel = (AzureOpenAiModel) model; var actionCreator = new AzureOpenAiActionCreator(getSender(), getServiceComponents()); - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT, - azureOpenAiModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT - ).batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.FLOAT, + azureOpenAiModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = azureOpenAiModel.accept(actionCreator, taskSettings); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 1685683173a11..de1d055e160da 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -29,7 +29,6 @@ import org.elasticsearch.inference.TaskSettingsConfiguration; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionCreator; @@ -55,6 +54,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -91,7 +91,7 @@ public void parseRequestConfig( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -186,8 +186,8 @@ public CohereModel parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelWithoutLoggingDeprecations( @@ -207,8 +207,8 @@ public CohereModel parsePersistedConfig(String inferenceEntityId, TaskType taskT Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelWithoutLoggingDeprecations( @@ -271,21 +271,12 @@ protected void doChunkedInfer( CohereModel cohereModel = (CohereModel) model; var actionCreator = new CohereActionCreator(getSender(), getServiceComponents()); - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.fromDenseVectorElementType(model.getServiceSettings().elementType()), - cohereModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.fromDenseVectorElementType(model.getServiceSettings().elementType()) - ).batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.fromDenseVectorElementType(model.getServiceSettings().elementType()), + cohereModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = cohereModel.accept(actionCreator, taskSettings, inputType); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 2e69a88731fd3..83249266c79ab 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -36,7 +36,6 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.inference.configuration.SettingsConfigurationSelectOption; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; @@ -74,6 +73,7 @@ import java.util.stream.Stream; import static org.elasticsearch.xpack.core.inference.results.ResultUtils.createInvalidChunkedResultException; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -150,8 +150,7 @@ public void parseRequestConfig( String serviceName = (String) config.remove(ModelConfigurations.SERVICE); // required for elser service in elasticsearch service ChunkingSettings chunkingSettings; - if (ChunkingSettingsFeatureFlag.isEnabled() - && (TaskType.TEXT_EMBEDDING.equals(taskType) || TaskType.SPARSE_EMBEDDING.equals(taskType))) { + if (TaskType.TEXT_EMBEDDING.equals(taskType) || TaskType.SPARSE_EMBEDDING.equals(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -458,9 +457,8 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() - && (TaskType.TEXT_EMBEDDING.equals(taskType) || TaskType.SPARSE_EMBEDDING.equals(taskType))) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType) || TaskType.SPARSE_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } String modelId = (String) serviceSettingsMap.get(MODEL_ID); @@ -675,21 +673,12 @@ public void chunkedInfer( if (model instanceof ElasticsearchInternalModel esModel) { - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - input, - EMBEDDING_MAX_BATCH_SIZE, - embeddingTypeFromTaskTypeAndSettings(model.getTaskType(), esModel.internalServiceSettings), - esModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker( - input, - EMBEDDING_MAX_BATCH_SIZE, - embeddingTypeFromTaskTypeAndSettings(model.getTaskType(), esModel.internalServiceSettings) - ).batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + input, + EMBEDDING_MAX_BATCH_SIZE, + embeddingTypeFromTaskTypeAndSettings(model.getTaskType(), esModel.internalServiceSettings), + esModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); for (var batch : batchedRequests) { var inferenceRequest = buildInferenceRequest( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java index d5f021b77e7c4..1c01ebbe2c0e4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java @@ -31,7 +31,6 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationDisplayType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.SenderExecutableAction; @@ -62,6 +61,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -94,7 +94,7 @@ public void parseRequestConfig( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -168,8 +168,8 @@ public GoogleAiStudioModel parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelFromPersistent( @@ -210,8 +210,8 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelFromPersistent( @@ -321,21 +321,13 @@ protected void doChunkedInfer( ) { GoogleAiStudioModel googleAiStudioModel = (GoogleAiStudioModel) model; - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT, - googleAiStudioModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT - ).batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.FLOAT, + googleAiStudioModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); + for (var request : batchedRequests) { doInfer(model, new DocumentsOnlyInput(request.batch().inputs()), taskSettings, inputType, timeout, request.listener()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java index a38691c4de750..0b4da10e7130f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java @@ -31,7 +31,6 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationDisplayType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.googlevertexai.GoogleVertexAiActionCreator; @@ -55,6 +54,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -89,7 +89,7 @@ public void parseRequestConfig( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -128,8 +128,8 @@ public Model parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelFromPersistent( @@ -149,8 +149,8 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelFromPersistent( @@ -227,21 +227,12 @@ protected void doChunkedInfer( GoogleVertexAiModel googleVertexAiModel = (GoogleVertexAiModel) model; var actionCreator = new GoogleVertexAiActionCreator(getSender(), getServiceComponents()); - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT, - googleVertexAiModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT - ).batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.FLOAT, + googleVertexAiModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = googleVertexAiModel.accept(actionCreator, taskSettings); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java index 9f2615ac5c515..1fa45ab1d0f75 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java @@ -16,7 +16,6 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -29,6 +28,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -57,7 +57,7 @@ public void parseRequestConfig( Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -93,8 +93,8 @@ public HuggingFaceModel parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModel( @@ -113,8 +113,8 @@ public HuggingFaceModel parsePersistedConfig(String inferenceEntityId, TaskType Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModel( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index b1c478d229c73..eede14a975234 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -28,7 +28,6 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationDisplayType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -129,21 +128,12 @@ protected void doChunkedInfer( var huggingFaceModel = (HuggingFaceModel) model; var actionCreator = new HuggingFaceActionCreator(getSender(), getServiceComponents()); - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT, - huggingFaceModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT - ).batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.FLOAT, + huggingFaceModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = huggingFaceModel.accept(actionCreator); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java index acc20fa35fd47..2e810c357f8bd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java @@ -31,7 +31,6 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationDisplayType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.mistral.MistralActionCreator; @@ -56,6 +55,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -102,21 +102,12 @@ protected void doChunkedInfer( var actionCreator = new MistralActionCreator(getSender(), getServiceComponents()); if (model instanceof MistralEmbeddingsModel mistralEmbeddingsModel) { - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - MistralConstants.MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT, - mistralEmbeddingsModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - MistralConstants.MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT - ).batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + MistralConstants.MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.FLOAT, + mistralEmbeddingsModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = mistralEmbeddingsModel.accept(actionCreator, taskSettings); @@ -154,7 +145,7 @@ public void parseRequestConfig( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -193,8 +184,8 @@ public Model parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelFromPersistent( @@ -214,8 +205,8 @@ public Model parsePersistedConfig(String modelId, TaskType taskType, Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } return createModelFromPersistent( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 7b65f97a3074c..81ab87a461696 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -31,7 +31,6 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationDisplayType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator; @@ -59,6 +58,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; @@ -91,7 +91,7 @@ public void parseRequestConfig( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { chunkingSettings = ChunkingSettingsBuilder.fromMap( removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) ); @@ -187,8 +187,8 @@ public OpenAiModel parsePersistedConfigWithSecrets( Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } moveModelFromTaskToServiceSettings(taskSettingsMap, serviceSettingsMap); @@ -210,8 +210,8 @@ public OpenAiModel parsePersistedConfig(String inferenceEntityId, TaskType taskT Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); ChunkingSettings chunkingSettings = null; - if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { - chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); } moveModelFromTaskToServiceSettings(taskSettingsMap, serviceSettingsMap); @@ -276,21 +276,12 @@ protected void doChunkedInfer( OpenAiModel openAiModel = (OpenAiModel) model; var actionCreator = new OpenAiActionCreator(getSender(), getServiceComponents()); - List batchedRequests; - if (ChunkingSettingsFeatureFlag.isEnabled()) { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT, - openAiModel.getConfigurations().getChunkingSettings() - ).batchRequestsWithListeners(listener); - } else { - batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT - ).batchRequestsWithListeners(listener); - } + List batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.FLOAT, + openAiModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = openAiModel.accept(actionCreator, taskSettings); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ChatCompletionModelValidator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ChatCompletionModelValidator.java new file mode 100644 index 0000000000000..b7a9fa7e6f3ab --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ChatCompletionModelValidator.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.validation; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.Model; + +public class ChatCompletionModelValidator implements ModelValidator { + + private final ServiceIntegrationValidator serviceIntegrationValidator; + + public ChatCompletionModelValidator(ServiceIntegrationValidator serviceIntegrationValidator) { + this.serviceIntegrationValidator = serviceIntegrationValidator; + } + + @Override + public void validate(InferenceService service, Model model, ActionListener listener) { + serviceIntegrationValidator.validate(service, model, listener.delegateFailureAndWrap((delegate, r) -> { + delegate.onResponse(postValidate(service, model)); + })); + } + + private Model postValidate(InferenceService service, Model model) { + return service.updateModelWithChatCompletionDetails(model); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java index 0464e790ba79a..b5bf77cbb3c7d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java @@ -20,7 +20,10 @@ public static ModelValidator buildModelValidator(TaskType taskType) { case TEXT_EMBEDDING -> { return new TextEmbeddingModelValidator(new SimpleServiceIntegrationValidator()); } - case SPARSE_EMBEDDING, RERANK, COMPLETION, ANY -> { + case COMPLETION -> { + return new ChatCompletionModelValidator(new SimpleServiceIntegrationValidator()); + } + case SPARSE_EMBEDDING, RERANK, ANY -> { return new SimpleModelValidator(new SimpleServiceIntegrationValidator()); } default -> throw new IllegalArgumentException(Strings.format("Can't validate inference model of for task type %s ", taskType)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStats.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStats.java deleted file mode 100644 index ae14a0792dead..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStats.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.telemetry; - -import org.elasticsearch.inference.Model; -import org.elasticsearch.telemetry.metric.LongCounter; -import org.elasticsearch.telemetry.metric.MeterRegistry; - -import java.util.HashMap; -import java.util.Objects; - -public class ApmInferenceStats implements InferenceStats { - private final LongCounter inferenceAPMRequestCounter; - - public ApmInferenceStats(LongCounter inferenceAPMRequestCounter) { - this.inferenceAPMRequestCounter = Objects.requireNonNull(inferenceAPMRequestCounter); - } - - @Override - public void incrementRequestCount(Model model) { - var service = model.getConfigurations().getService(); - var taskType = model.getTaskType(); - var modelId = model.getServiceSettings().modelId(); - - var attributes = new HashMap(5); - attributes.put("service", service); - attributes.put("task_type", taskType.toString()); - if (modelId != null) { - attributes.put("model_id", modelId); - } - - inferenceAPMRequestCounter.incrementBy(1, attributes); - } - - public static ApmInferenceStats create(MeterRegistry meterRegistry) { - return new ApmInferenceStats( - meterRegistry.registerLongCounter( - "es.inference.requests.count.total", - "Inference API request counts for a particular service, task type, model ID", - "operations" - ) - ); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceStats.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceStats.java index d080e818e45fc..afdbc21bae319 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceStats.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceStats.java @@ -7,15 +7,87 @@ package org.elasticsearch.xpack.inference.telemetry; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.MeterRegistry; -public interface InferenceStats { +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; - /** - * Increment the counter for a particular value in a thread safe manner. - * @param model the model to increment request count for - */ - void incrementRequestCount(Model model); +import static java.util.Map.entry; +import static java.util.stream.Stream.concat; - InferenceStats NOOP = model -> {}; +public record InferenceStats(LongCounter requestCount, LongHistogram inferenceDuration) { + + public InferenceStats { + Objects.requireNonNull(requestCount); + Objects.requireNonNull(inferenceDuration); + } + + public static InferenceStats create(MeterRegistry meterRegistry) { + return new InferenceStats( + meterRegistry.registerLongCounter( + "es.inference.requests.count.total", + "Inference API request counts for a particular service, task type, model ID", + "operations" + ), + meterRegistry.registerLongHistogram( + "es.inference.requests.time", + "Inference API request counts for a particular service, task type, model ID", + "ms" + ) + ); + } + + public static Map modelAttributes(Model model) { + return toMap(modelAttributeEntries(model)); + } + + private static Stream> modelAttributeEntries(Model model) { + var stream = Stream.>builder() + .add(entry("service", model.getConfigurations().getService())) + .add(entry("task_type", model.getTaskType().toString())); + if (model.getServiceSettings().modelId() != null) { + stream.add(entry("model_id", model.getServiceSettings().modelId())); + } + return stream.build(); + } + + private static Map toMap(Stream> stream) { + return stream.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + public static Map responseAttributes(Model model, @Nullable Throwable t) { + return toMap(concat(modelAttributeEntries(model), errorAttributes(t))); + } + + public static Map responseAttributes(UnparsedModel model, @Nullable Throwable t) { + var unknownModelAttributes = Stream.>builder() + .add(entry("service", model.service())) + .add(entry("task_type", model.taskType().toString())) + .build(); + + return toMap(concat(unknownModelAttributes, errorAttributes(t))); + } + + public static Map responseAttributes(@Nullable Throwable t) { + return toMap(errorAttributes(t)); + } + + private static Stream> errorAttributes(@Nullable Throwable t) { + return switch (t) { + case null -> Stream.of(entry("status_code", 200)); + case ElasticsearchStatusException ese -> Stream.>builder() + .add(entry("status_code", ese.status().getStatus())) + .add(entry("error.type", String.valueOf(ese.status().getStatus()))) + .build(); + default -> Stream.of(entry("error.type", t.getClass().getSimpleName())); + }; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceTimer.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceTimer.java new file mode 100644 index 0000000000000..d43f4954edb52 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/telemetry/InferenceTimer.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.telemetry; + +import java.time.Clock; +import java.time.Duration; +import java.time.Instant; +import java.util.Objects; + +public record InferenceTimer(Instant startTime, Clock clock) { + + public InferenceTimer { + Objects.requireNonNull(startTime); + Objects.requireNonNull(clock); + } + + public static InferenceTimer start() { + return start(Clock.systemUTC()); + } + + public static InferenceTimer start(Clock clock) { + return new InferenceTimer(clock.instant(), clock); + } + + public long elapsedMillis() { + return Duration.between(startTime(), clock().instant()).toMillis(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java index 03613901c7816..732e2cc826570 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserInternalServiceSettingsTests; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserMlNodeTaskSettings; @@ -29,7 +28,7 @@ public static ModelConfigurations createRandomInstance() { randomAlphaOfLength(6), randomServiceSettings(), randomTaskSettings(taskType), - ChunkingSettingsFeatureFlag.isEnabled() && randomBoolean() ? ChunkingSettingsTests.createRandomChunkingSettings() : null + randomBoolean() ? ChunkingSettingsTests.createRandomChunkingSettings() : null ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java new file mode 100644 index 0000000000000..0ed9cbf56b3fa --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java @@ -0,0 +1,354 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.action.task.StreamingTaskManager; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.elasticsearch.xpack.inference.telemetry.InferenceStats; +import org.junit.Before; +import org.mockito.ArgumentCaptor; + +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.Flow; +import java.util.function.Consumer; + +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isA; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.assertArg; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class TransportInferenceActionTests extends ESTestCase { + private static final String serviceId = "serviceId"; + private static final TaskType taskType = TaskType.COMPLETION; + private static final String inferenceId = "inferenceEntityId"; + private ModelRegistry modelRegistry; + private InferenceServiceRegistry serviceRegistry; + private InferenceStats inferenceStats; + private StreamingTaskManager streamingTaskManager; + private TransportInferenceAction action; + + @Before + public void setUp() throws Exception { + super.setUp(); + TransportService transportService = mock(); + ActionFilters actionFilters = mock(); + modelRegistry = mock(); + serviceRegistry = mock(); + inferenceStats = new InferenceStats(mock(), mock()); + streamingTaskManager = mock(); + action = new TransportInferenceAction( + transportService, + actionFilters, + modelRegistry, + serviceRegistry, + inferenceStats, + streamingTaskManager + ); + } + + public void testMetricsAfterModelRegistryError() { + var expectedException = new IllegalStateException("hello"); + var expectedError = expectedException.getClass().getSimpleName(); + + doAnswer(ans -> { + ActionListener listener = ans.getArgument(1); + listener.onFailure(expectedException); + return null; + }).when(modelRegistry).getModelWithSecrets(any(), any()); + + var listener = doExecute(taskType); + verify(listener).onFailure(same(expectedException)); + + verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { + assertThat(attributes.get("service"), nullValue()); + assertThat(attributes.get("task_type"), nullValue()); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), nullValue()); + assertThat(attributes.get("error.type"), is(expectedError)); + })); + } + + private ActionListener doExecute(TaskType taskType) { + return doExecute(taskType, false); + } + + private ActionListener doExecute(TaskType taskType, boolean stream) { + InferenceAction.Request request = mock(); + when(request.getInferenceEntityId()).thenReturn(inferenceId); + when(request.getTaskType()).thenReturn(taskType); + when(request.isStreaming()).thenReturn(stream); + ActionListener listener = mock(); + action.doExecute(mock(), request, listener); + return listener; + } + + public void testMetricsAfterMissingService() { + mockModelRegistry(taskType); + + when(serviceRegistry.getService(any())).thenReturn(Optional.empty()); + + var listener = doExecute(taskType); + + verify(listener).onFailure(assertArg(e -> { + assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat(e.getMessage(), is("Unknown service [" + serviceId + "] for model [" + inferenceId + "]. ")); + assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + })); + verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { + assertThat(attributes.get("service"), is(serviceId)); + assertThat(attributes.get("task_type"), is(taskType.toString())); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), is(RestStatus.BAD_REQUEST.getStatus())); + assertThat(attributes.get("error.type"), is(String.valueOf(RestStatus.BAD_REQUEST.getStatus()))); + })); + } + + private void mockModelRegistry(TaskType expectedTaskType) { + var unparsedModel = new UnparsedModel(inferenceId, expectedTaskType, serviceId, Map.of(), Map.of()); + doAnswer(ans -> { + ActionListener listener = ans.getArgument(1); + listener.onResponse(unparsedModel); + return null; + }).when(modelRegistry).getModelWithSecrets(any(), any()); + } + + public void testMetricsAfterUnknownTaskType() { + var modelTaskType = TaskType.RERANK; + var requestTaskType = TaskType.SPARSE_EMBEDDING; + mockModelRegistry(modelTaskType); + when(serviceRegistry.getService(any())).thenReturn(Optional.of(mock())); + + var listener = doExecute(requestTaskType); + + verify(listener).onFailure(assertArg(e -> { + assertThat(e, isA(ElasticsearchStatusException.class)); + assertThat( + e.getMessage(), + is( + "Incompatible task_type, the requested type [" + + requestTaskType + + "] does not match the model type [" + + modelTaskType + + "]" + ) + ); + assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); + })); + verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { + assertThat(attributes.get("service"), is(serviceId)); + assertThat(attributes.get("task_type"), is(modelTaskType.toString())); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), is(RestStatus.BAD_REQUEST.getStatus())); + assertThat(attributes.get("error.type"), is(String.valueOf(RestStatus.BAD_REQUEST.getStatus()))); + })); + } + + public void testMetricsAfterInferError() { + var expectedException = new IllegalStateException("hello"); + var expectedError = expectedException.getClass().getSimpleName(); + mockService(listener -> listener.onFailure(expectedException)); + + var listener = doExecute(taskType); + + verify(listener).onFailure(same(expectedException)); + verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { + assertThat(attributes.get("service"), is(serviceId)); + assertThat(attributes.get("task_type"), is(taskType.toString())); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), nullValue()); + assertThat(attributes.get("error.type"), is(expectedError)); + })); + } + + public void testMetricsAfterStreamUnsupported() { + var expectedStatus = RestStatus.METHOD_NOT_ALLOWED; + var expectedError = String.valueOf(expectedStatus.getStatus()); + mockService(l -> {}); + + var listener = doExecute(taskType, true); + + verify(listener).onFailure(assertArg(e -> { + assertThat(e, isA(ElasticsearchStatusException.class)); + var ese = (ElasticsearchStatusException) e; + assertThat(ese.getMessage(), is("Streaming is not allowed for service [" + serviceId + "].")); + assertThat(ese.status(), is(expectedStatus)); + })); + verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { + assertThat(attributes.get("service"), is(serviceId)); + assertThat(attributes.get("task_type"), is(taskType.toString())); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), is(expectedStatus.getStatus())); + assertThat(attributes.get("error.type"), is(expectedError)); + })); + } + + public void testMetricsAfterInferSuccess() { + mockService(listener -> listener.onResponse(mock())); + + var listener = doExecute(taskType); + + verify(listener).onResponse(any()); + verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { + assertThat(attributes.get("service"), is(serviceId)); + assertThat(attributes.get("task_type"), is(taskType.toString())); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), is(200)); + assertThat(attributes.get("error.type"), nullValue()); + })); + } + + public void testMetricsAfterStreamInferSuccess() { + mockStreamResponse(Flow.Subscriber::onComplete); + verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { + assertThat(attributes.get("service"), is(serviceId)); + assertThat(attributes.get("task_type"), is(taskType.toString())); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), is(200)); + assertThat(attributes.get("error.type"), nullValue()); + })); + } + + public void testMetricsAfterStreamInferFailure() { + var expectedException = new IllegalStateException("hello"); + var expectedError = expectedException.getClass().getSimpleName(); + mockStreamResponse(subscriber -> { + subscriber.subscribe(mock()); + subscriber.onError(expectedException); + }); + verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { + assertThat(attributes.get("service"), is(serviceId)); + assertThat(attributes.get("task_type"), is(taskType.toString())); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), nullValue()); + assertThat(attributes.get("error.type"), is(expectedError)); + })); + } + + public void testMetricsAfterStreamCancel() { + var response = mockStreamResponse(s -> s.onSubscribe(mock())); + response.subscribe(new Flow.Subscriber<>() { + @Override + public void onSubscribe(Flow.Subscription subscription) { + subscription.cancel(); + } + + @Override + public void onNext(ChunkedToXContent item) { + + } + + @Override + public void onError(Throwable throwable) { + + } + + @Override + public void onComplete() { + + } + }); + + verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> { + assertThat(attributes.get("service"), is(serviceId)); + assertThat(attributes.get("task_type"), is(taskType.toString())); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), is(200)); + assertThat(attributes.get("error.type"), nullValue()); + })); + } + + private Flow.Publisher mockStreamResponse(Consumer> action) { + mockService(true, Set.of(), listener -> { + Flow.Processor taskProcessor = mock(); + doAnswer(innerAns -> { + action.accept(innerAns.getArgument(0)); + return null; + }).when(taskProcessor).subscribe(any()); + when(streamingTaskManager.create(any(), any())).thenReturn(taskProcessor); + var inferenceServiceResults = mock(InferenceServiceResults.class); + when(inferenceServiceResults.publisher()).thenReturn(mock()); + listener.onResponse(inferenceServiceResults); + }); + + var listener = doExecute(taskType, true); + var captor = ArgumentCaptor.forClass(InferenceAction.Response.class); + verify(listener).onResponse(captor.capture()); + assertTrue(captor.getValue().isStreaming()); + assertNotNull(captor.getValue().publisher()); + return captor.getValue().publisher(); + } + + private void mockService(Consumer> listenerAction) { + mockService(false, Set.of(), listenerAction); + } + + private void mockService( + boolean stream, + Set supportedStreamingTasks, + Consumer> listenerAction + ) { + InferenceService service = mock(); + Model model = mockModel(); + when(service.parsePersistedConfigWithSecrets(any(), any(), any(), any())).thenReturn(model); + when(service.name()).thenReturn(serviceId); + + when(service.canStream(any())).thenReturn(stream); + when(service.supportedStreamingTasks()).thenReturn(supportedStreamingTasks); + doAnswer(ans -> { + listenerAction.accept(ans.getArgument(7)); + return null; + }).when(service).infer(any(), any(), any(), anyBoolean(), any(), any(), any(), any()); + mockModelAndServiceRegistry(service); + } + + private Model mockModel() { + Model model = mock(); + ModelConfigurations modelConfigurations = mock(); + when(modelConfigurations.getService()).thenReturn(serviceId); + when(model.getConfigurations()).thenReturn(modelConfigurations); + when(model.getTaskType()).thenReturn(taskType); + when(model.getServiceSettings()).thenReturn(mock()); + return model; + } + + private void mockModelAndServiceRegistry(InferenceService service) { + var unparsedModel = new UnparsedModel(inferenceId, taskType, serviceId, Map.of(), Map.of()); + doAnswer(ans -> { + ActionListener listener = ans.getArgument(1); + listener.onResponse(unparsedModel); + return null; + }).when(modelRegistry).getModelWithSecrets(any(), any()); + + when(serviceRegistry.getService(any())).thenReturn(Optional.of(service)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java index 235a3730ce4f6..4a284e0a84ff5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java @@ -19,6 +19,12 @@ public class ChunkingSettingsBuilderTests extends ESTestCase { public static final SentenceBoundaryChunkingSettings DEFAULT_SETTINGS = new SentenceBoundaryChunkingSettings(250, 1); + public void testNullChunkingSettingsMap() { + ChunkingSettings chunkingSettings = ChunkingSettingsBuilder.fromMap(null); + + assertEquals(ChunkingSettingsBuilder.OLD_DEFAULT_SETTINGS, chunkingSettings); + } + public void testEmptyChunkingSettingsMap() { ChunkingSettings chunkingSettings = ChunkingSettingsBuilder.fromMap(Collections.emptyMap()); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java index 445d9c68a88aa..aac111c22558e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.alibabacloudsearch; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.bytes.BytesArray; @@ -28,7 +27,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; @@ -71,7 +69,6 @@ import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettingsTests.getSecretSettingsMap; import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.mock; @@ -118,34 +115,7 @@ public void testParseRequestConfig_CreatesAnEmbeddingsModel() throws IOException } } - public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = new AlibabaCloudSearchService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool))) { - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - service.parseRequestConfig( - "id", - TaskType.TEXT_EMBEDDING, - getRequestConfigMap( - AlibabaCloudSearchEmbeddingsServiceSettingsTests.getServiceSettingsMap("service_id", "host", "default"), - AlibabaCloudSearchEmbeddingsTaskSettingsTests.getTaskSettingsMap(null), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ), - modelVerificationListener - ); - } - } - - public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = new AlibabaCloudSearchService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool))) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(AlibabaCloudSearchEmbeddingsModel.class)); @@ -172,8 +142,7 @@ public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsP } } - public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = new AlibabaCloudSearchService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool))) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(AlibabaCloudSearchEmbeddingsModel.class)); @@ -199,30 +168,7 @@ public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsN } } - public void testParsePersistedConfig_CreatesAnEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = new AlibabaCloudSearchService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool))) { - var model = service.parsePersistedConfig( - "id", - TaskType.TEXT_EMBEDDING, - getPersistedConfigMap( - AlibabaCloudSearchEmbeddingsServiceSettingsTests.getServiceSettingsMap("service_id", "host", "default"), - AlibabaCloudSearchEmbeddingsTaskSettingsTests.getTaskSettingsMap(null), - createRandomChunkingSettingsMap() - ).config() - ); - - assertThat(model, instanceOf(AlibabaCloudSearchEmbeddingsModel.class)); - var embeddingsModel = (AlibabaCloudSearchEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("service_id")); - assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getHost(), is("host")); - assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getWorkspaceName(), is("default")); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - } - } - - public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = new AlibabaCloudSearchService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool))) { var model = service.parsePersistedConfig( "id", @@ -243,8 +189,7 @@ public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSetting } } - public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = new AlibabaCloudSearchService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool))) { var model = service.parsePersistedConfig( "id", @@ -264,36 +209,7 @@ public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSetting } } - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = new AlibabaCloudSearchService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool))) { - var persistedConfig = getPersistedConfigMap( - AlibabaCloudSearchEmbeddingsServiceSettingsTests.getServiceSettingsMap("service_id", "host", "default"), - AlibabaCloudSearchEmbeddingsTaskSettingsTests.getTaskSettingsMap(null), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ); - var model = service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ); - - assertThat(model, instanceOf(AlibabaCloudSearchEmbeddingsModel.class)); - var embeddingsModel = (AlibabaCloudSearchEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("service_id")); - assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getHost(), is("host")); - assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getWorkspaceName(), is("default")); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); - } - } - - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = new AlibabaCloudSearchService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool))) { var persistedConfig = getPersistedConfigMap( AlibabaCloudSearchEmbeddingsServiceSettingsTests.getServiceSettingsMap("service_id", "host", "default"), @@ -318,9 +234,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChun } } - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = new AlibabaCloudSearchService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool))) { var persistedConfig = getPersistedConfigMap( AlibabaCloudSearchEmbeddingsServiceSettingsTests.getServiceSettingsMap("service_id", "host", "default"), @@ -411,31 +325,19 @@ public void doInfer( } } - public void testChunkedInfer_TextEmbeddingBatches() throws IOException { - testChunkedInfer(TaskType.TEXT_EMBEDDING, null); - } - - public void testChunkedInfer_TextEmbeddingChunkingSettingsSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_TextEmbeddingChunkingSettingsSet() throws IOException { testChunkedInfer(TaskType.TEXT_EMBEDDING, ChunkingSettingsTests.createRandomChunkingSettings()); } - public void testChunkedInfer_TextEmbeddingChunkingSettingsNotSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_TextEmbeddingChunkingSettingsNotSet() throws IOException { testChunkedInfer(TaskType.TEXT_EMBEDDING, null); } - public void testChunkedInfer_SparseEmbeddingBatches() throws IOException { - testChunkedInfer(TaskType.SPARSE_EMBEDDING, null); - } - - public void testChunkedInfer_SparseEmbeddingChunkingSettingsSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_SparseEmbeddingChunkingSettingsSet() throws IOException { testChunkedInfer(TaskType.SPARSE_EMBEDDING, ChunkingSettingsTests.createRandomChunkingSettings()); } - public void testChunkedInfer_SparseEmbeddingChunkingSettingsNotSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_SparseEmbeddingChunkingSettingsNotSet() throws IOException { testChunkedInfer(TaskType.SPARSE_EMBEDDING, null); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index 6de6b38330ad1..e76fb10c96131 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; @@ -522,35 +521,7 @@ public void testParseRequestConfig_MovesModel() throws IOException { } } - public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createAmazonBedrockService()) { - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - service.parseRequestConfig( - "id", - TaskType.TEXT_EMBEDDING, - getRequestConfigMap( - createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null), - Map.of(), - createRandomChunkingSettingsMap(), - getAmazonBedrockSecretSettingsMap("access", "secret") - ), - modelVerificationListener - ); - } - } - - public void testParseRequestConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createAmazonBedrockService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); @@ -579,9 +550,7 @@ public void testParseRequestConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChun } } - public void testParseRequestConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createAmazonBedrockService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); @@ -658,38 +627,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddings } } - public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createAmazonBedrockService()) { - var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); - var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); - - var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); - - var model = service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ); - - assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); - - var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); - assertThat(settings.region(), is("region")); - assertThat(settings.modelId(), is("model")); - assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); - assertNull(model.getConfigurations().getChunkingSettings()); - var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); - assertThat(secretSettings.accessKey.toString(), is("access")); - assertThat(secretSettings.secretKey.toString(), is("secret")); - } - } - - public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createAmazonBedrockService()) { var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); @@ -721,11 +659,8 @@ public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddings } } - public - void - testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvided() + throws IOException { try (var service = createAmazonBedrockService()) { var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); @@ -936,38 +871,7 @@ public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModel() thr } } - public - void - testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createAmazonBedrockService()) { - var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); - var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); - - var persistedConfig = getPersistedConfigMap( - settingsMap, - new HashMap(Map.of()), - createRandomChunkingSettingsMap(), - secretSettingsMap - ); - - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - - assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); - - var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); - assertThat(settings.region(), is("region")); - assertThat(settings.modelId(), is("model")); - assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); - assertNull(model.getConfigurations().getChunkingSettings()); - assertNull(model.getSecretSettings()); - } - } - - public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createAmazonBedrockService()) { var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); @@ -992,9 +896,7 @@ public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenCh } } - public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createAmazonBedrockService()) { var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); @@ -1523,21 +1425,7 @@ public void testSupportsStreaming() throws IOException { } } - public void testChunkedInfer_CallsInfer_ConvertsFloatResponse_ForEmbeddings() throws IOException { - var model = AmazonBedrockEmbeddingsModelTests.createModel( - "id", - "region", - "model", - AmazonBedrockProvider.AMAZONTITAN, - "access", - "secret" - ); - - testChunkedInfer(model); - } - - public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsSet() throws IOException { var model = AmazonBedrockEmbeddingsModelTests.createModel( "id", "region", @@ -1551,8 +1439,7 @@ public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws I testChunkedInfer(model); } - public void testChunkedInfer_ChunkingSettingsNotSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException { var model = AmazonBedrockEmbeddingsModelTests.createModel( "id", "region", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index ec5eef4428e7d..76ea7a5bde5ca 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; @@ -54,6 +53,7 @@ import org.elasticsearch.xpack.inference.services.azureaistudio.embeddings.AzureAiStudioEmbeddingsModelTests; import org.elasticsearch.xpack.inference.services.azureaistudio.embeddings.AzureAiStudioEmbeddingsServiceSettingsTests; import org.elasticsearch.xpack.inference.services.azureaistudio.embeddings.AzureAiStudioEmbeddingsTaskSettingsTests; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; @@ -137,34 +137,7 @@ public void testParseRequestConfig_CreatesAnAzureAiStudioEmbeddingsModel() throw } } - public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createService()) { - var serviceSettings = getEmbeddingsServiceSettingsMap("http://target.local", "openai", "token", null, null, null, null); - - var config = getRequestConfigMap( - serviceSettings, - getEmbeddingsTaskSettingsMap("user"), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ); - - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, modelVerificationListener); - } - } - - public void testParseRequestConfig_CreatesAnAzureAiStudioEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnAzureAiStudioEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(AzureAiStudioEmbeddingsModel.class)); @@ -192,9 +165,7 @@ public void testParseRequestConfig_CreatesAnAzureAiStudioEmbeddingsModelWhenChun } } - public void testParseRequestConfig_CreatesAnAzureAiStudioEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnAzureAiStudioEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(AzureAiStudioEmbeddingsModel.class)); @@ -558,37 +529,7 @@ public void testParsePersistedConfig_CreatesAnAzureAiStudioEmbeddingsModel() thr } } - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createService()) { - var config = getPersistedConfigMap( - getEmbeddingsServiceSettingsMap("http://target.local", "openai", "token", 1024, true, 512, null), - getEmbeddingsTaskSettingsMap("user"), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ); - - var model = service.parsePersistedConfigWithSecrets("id", TaskType.TEXT_EMBEDDING, config.config(), config.secrets()); - - assertThat(model, instanceOf(AzureAiStudioEmbeddingsModel.class)); - - var embeddingsModel = (AzureAiStudioEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().target(), is("http://target.local")); - assertThat(embeddingsModel.getServiceSettings().provider(), is(AzureAiStudioProvider.OPENAI)); - assertThat(embeddingsModel.getServiceSettings().endpointType(), is(AzureAiStudioEndpointType.TOKEN)); - assertThat(embeddingsModel.getServiceSettings().dimensions(), is(1024)); - assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(true)); - assertThat(embeddingsModel.getServiceSettings().maxInputTokens(), is(512)); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); - assertThat(embeddingsModel.getTaskSettings().user(), is("user")); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - } - } - - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createService()) { var config = getPersistedConfigMap( getEmbeddingsServiceSettingsMap("http://target.local", "openai", "token", 1024, true, 512, null), @@ -614,9 +555,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChun } } - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createService()) { var config = getPersistedConfigMap( getEmbeddingsServiceSettingsMap("http://target.local", "openai", "token", 1024, true, 512, null), @@ -831,35 +770,7 @@ public void testParsePersistedConfig_WithoutSecretsCreatesEmbeddingsModel() thro } } - public void testParsePersistedConfig_WithoutSecretsCreatesAnEmbeddingsModelWithoutChunkingSettingsFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createService()) { - var config = getPersistedConfigMap( - getEmbeddingsServiceSettingsMap("http://target.local", "openai", "token", 1024, true, 512, null), - getEmbeddingsTaskSettingsMap("user"), - createRandomChunkingSettingsMap(), - Map.of() - ); - - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, config.config()); - - assertThat(model, instanceOf(AzureAiStudioEmbeddingsModel.class)); - - var embeddingsModel = (AzureAiStudioEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().target(), is("http://target.local")); - assertThat(embeddingsModel.getServiceSettings().provider(), is(AzureAiStudioProvider.OPENAI)); - assertThat(embeddingsModel.getServiceSettings().endpointType(), is(AzureAiStudioEndpointType.TOKEN)); - assertThat(embeddingsModel.getServiceSettings().dimensions(), is(1024)); - assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(true)); - assertThat(embeddingsModel.getServiceSettings().maxInputTokens(), is(512)); - assertThat(embeddingsModel.getTaskSettings().user(), is("user")); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - } - } - - public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createService()) { var config = getPersistedConfigMap( getEmbeddingsServiceSettingsMap("http://target.local", "openai", "token", 1024, true, 512, null), @@ -884,8 +795,7 @@ public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSetting } } - public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createService()) { var config = getPersistedConfigMap( getEmbeddingsServiceSettingsMap("http://target.local", "openai", "token", 1024, true, 512, null), @@ -1064,6 +974,112 @@ public void testCheckModelConfig_WorksForChatCompletionsModel() throws IOExcepti } } + public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new AzureAiStudioService(senderFactory, createWithEmptySettings(threadPool))) { + var model = AzureAiStudioChatCompletionModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomFrom(AzureAiStudioProvider.values()), + randomFrom(AzureAiStudioEndpointType.values()), + randomAlphaOfLength(10) + ); + assertThrows( + ElasticsearchStatusException.class, + () -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); } + ); + } + } + + public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(null); + } + + public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values())); + } + + private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new AzureAiStudioService(senderFactory, createWithEmptySettings(threadPool))) { + var embeddingSize = randomNonNegativeInt(); + var model = AzureAiStudioEmbeddingsModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomFrom(AzureAiStudioProvider.values()), + randomFrom(AzureAiStudioEndpointType.values()), + randomAlphaOfLength(10), + randomNonNegativeInt(), + randomBoolean(), + randomNonNegativeInt(), + similarityMeasure, + randomAlphaOfLength(10), + RateLimitSettingsTests.createRandom() + ); + + Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); + + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? SimilarityMeasure.DOT_PRODUCT : similarityMeasure; + assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); + assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); + } + } + + public void testUpdateModelWithChatCompletionDetails_InvalidModelProvided() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new AzureAiStudioService(senderFactory, createWithEmptySettings(threadPool))) { + var model = AzureAiStudioEmbeddingsModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomFrom(AzureAiStudioProvider.values()), + randomFrom(AzureAiStudioEndpointType.values()), + randomAlphaOfLength(10), + randomNonNegativeInt(), + randomBoolean(), + randomNonNegativeInt(), + randomFrom(SimilarityMeasure.values()), + randomAlphaOfLength(10), + RateLimitSettingsTests.createRandom() + ); + assertThrows(ElasticsearchStatusException.class, () -> { service.updateModelWithChatCompletionDetails(model); }); + } + } + + public void testUpdateModelWithChatCompletionDetails_NullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithChatCompletionDetails_Successful(null); + } + + public void testUpdateModelWithChatCompletionDetails_NonNullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithChatCompletionDetails_Successful(randomNonNegativeInt()); + } + + private void testUpdateModelWithChatCompletionDetails_Successful(Integer maxNewTokens) throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new AzureAiStudioService(senderFactory, createWithEmptySettings(threadPool))) { + var model = AzureAiStudioChatCompletionModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomFrom(AzureAiStudioProvider.values()), + randomFrom(AzureAiStudioEndpointType.values()), + randomAlphaOfLength(10), + randomDouble(), + randomDouble(), + randomBoolean(), + maxNewTokens, + RateLimitSettingsTests.createRandom() + ); + + Model updatedModel = service.updateModelWithChatCompletionDetails(model); + assertThat(updatedModel, instanceOf(AzureAiStudioChatCompletionModel.class)); + AzureAiStudioChatCompletionTaskSettings updatedTaskSettings = (AzureAiStudioChatCompletionTaskSettings) updatedModel + .getTaskSettings(); + Integer expectedMaxNewTokens = maxNewTokens == null + ? AzureAiStudioChatCompletionTaskSettings.DEFAULT_MAX_NEW_TOKENS + : maxNewTokens; + assertEquals(expectedMaxNewTokens, updatedTaskSettings.maxNewTokens()); + } + } + public void testInfer_ThrowsErrorWhenModelIsNotAzureAiStudioModel() throws IOException { var sender = mock(Sender.class); @@ -1100,25 +1116,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAzureAiStudioModel() throws IOExc verifyNoMoreInteractions(sender); } - public void testChunkedInfer() throws IOException { - var model = AzureAiStudioEmbeddingsModelTests.createModel( - "id", - getUrl(webServer), - AzureAiStudioProvider.OPENAI, - AzureAiStudioEndpointType.TOKEN, - "apikey", - null, - false, - null, - null, - "user", - null - ); - testChunkedInfer(model); - } - - public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsSet() throws IOException { var model = AzureAiStudioEmbeddingsModelTests.createModel( "id", getUrl(webServer), @@ -1136,8 +1134,7 @@ public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws I testChunkedInfer(model); } - public void testChunkedInfer_ChunkingSettingsNotSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException { var model = AzureAiStudioEmbeddingsModelTests.createModel( "id", getUrl(webServer), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index 41fd7d099d416..40f8b7e0977e4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; @@ -135,32 +134,7 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModel() throws IOExc } } - public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createAzureOpenAiService()) { - var config = getRequestConfigMap( - getRequestAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", null, null), - getAzureOpenAiRequestTaskSettingsMap("user"), - createRandomChunkingSettingsMap(), - getAzureOpenAiSecretSettingsMap("secret", null) - ); - - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, modelVerificationListener); - } - } - - public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createAzureOpenAiService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); @@ -188,9 +162,7 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSet } } - public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createAzureOpenAiService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); @@ -393,43 +365,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnAzureOpenAiEmbeddingsMo } } - public - void - testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createAzureOpenAiService()) { - var persistedConfig = getPersistedConfigMap( - getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", 100, 512), - getAzureOpenAiRequestTaskSettingsMap("user"), - createRandomChunkingSettingsMap(), - getAzureOpenAiSecretSettingsMap("secret", null) - ); - - var model = service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ); - - assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); - - var embeddingsModel = (AzureOpenAiEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().resourceName(), is("resource_name")); - assertThat(embeddingsModel.getServiceSettings().deploymentId(), is("deployment_id")); - assertThat(embeddingsModel.getServiceSettings().apiVersion(), is("api_version")); - assertThat(embeddingsModel.getServiceSettings().dimensions(), is(100)); - assertThat(embeddingsModel.getServiceSettings().maxInputTokens(), is(512)); - assertThat(embeddingsModel.getTaskSettings().user(), is("user")); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); - } - } - - public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createAzureOpenAiService()) { var persistedConfig = getPersistedConfigMap( getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", 100, 512), @@ -459,9 +395,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWh } } - public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createAzureOpenAiService()) { var persistedConfig = getPersistedConfigMap( getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", 100, 512), @@ -686,33 +620,7 @@ public void testParsePersistedConfig_CreatesAnAzureOpenAiEmbeddingsModel() throw } } - public void testParsePersistedConfig_CreatesAnAzureOpenAiEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createAzureOpenAiService()) { - var persistedConfig = getPersistedConfigMap( - getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", null, null), - getAzureOpenAiRequestTaskSettingsMap("user"), - createRandomChunkingSettingsMap() - ); - - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - - assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); - - var embeddingsModel = (AzureOpenAiEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().resourceName(), is("resource_name")); - assertThat(embeddingsModel.getServiceSettings().deploymentId(), is("deployment_id")); - assertThat(embeddingsModel.getServiceSettings().apiVersion(), is("api_version")); - assertThat(embeddingsModel.getTaskSettings().user(), is("user")); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertNull(embeddingsModel.getSecretSettings()); - } - } - - public void testParsePersistedConfig_CreatesAnAzureOpenAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnAzureOpenAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createAzureOpenAiService()) { var persistedConfig = getPersistedConfigMap( getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", null, null), @@ -734,9 +642,7 @@ public void testParsePersistedConfig_CreatesAnAzureOpenAiEmbeddingsModelWhenChun } } - public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createAzureOpenAiService()) { var persistedConfig = getPersistedConfigMap( getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", null, null), @@ -1326,14 +1232,7 @@ public void testInfer_UnauthorisedResponse() throws IOException, URISyntaxExcept } } - public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOException, URISyntaxException { - var model = AzureOpenAiEmbeddingsModelTests.createModel("resource", "deployment", "apiversion", "user", "apikey", null, "id"); - - testChunkedInfer(model); - } - - public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws IOException, URISyntaxException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsSet() throws IOException, URISyntaxException { var model = AzureOpenAiEmbeddingsModelTests.createModel( "resource", "deployment", @@ -1348,8 +1247,7 @@ public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws I testChunkedInfer(model); } - public void testChunkedInfer_ChunkingSettingsNotSetAndFeatureFlagEnabled() throws IOException, URISyntaxException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException, URISyntaxException { var model = AzureOpenAiEmbeddingsModelTests.createModel("resource", "deployment", "apiversion", "user", null, "apikey", null, "id"); testChunkedInfer(model); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 3ce06df1f7fdb..725879e76efc1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; @@ -141,29 +140,7 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModel() throws IOExce } } - public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createCohereService()) { - var serviceSettings = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null); - - var config = getRequestConfigMap( - serviceSettings, - getTaskSettingsMap(null, null), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ); - - var failureListener = ActionListener.wrap((model) -> fail("Model parsing should have failed"), e -> { - MatcherAssert.assertThat(e, instanceOf(ElasticsearchStatusException.class)); - MatcherAssert.assertThat(e.getMessage(), containsString("Model configuration contains settings")); - }); - service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener); - } - } - - public void testParseRequestConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createCohereService()) { ActionListener modelListener = ActionListener.wrap(model -> { MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); @@ -196,9 +173,7 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModelWhenChunkingSett } } - public void testParseRequestConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createCohereService()) { ActionListener modelListener = ActionListener.wrap(model -> { MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); @@ -405,38 +380,7 @@ public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModel() } } - public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createCohereService()) { - var persistedConfig = getPersistedConfigMap( - CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), - getTaskSettingsMap(null, null), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ); - - var model = service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ); - - MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); - - var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); - MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); - } - } - - public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createCohereService()) { var persistedConfig = getPersistedConfigMap( CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), @@ -463,9 +407,7 @@ public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWhe } } - public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createCohereService()) { var persistedConfig = getPersistedConfigMap( CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), @@ -693,32 +635,7 @@ public void testParsePersistedConfig_CreatesACohereEmbeddingsModel() throws IOEx } } - public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createCohereService()) { - var persistedConfig = getPersistedConfigMap( - CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), - getTaskSettingsMap(null, CohereTruncation.NONE), - createRandomChunkingSettingsMap() - ); - - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - - MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); - - var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); - MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE))); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertNull(embeddingsModel.getSecretSettings()); - } - } - - public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createCohereService()) { var persistedConfig = getPersistedConfigMap( CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), @@ -739,9 +656,7 @@ public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWhenChunkingSe } } - public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createCohereService()) { var persistedConfig = getPersistedConfigMap( CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), @@ -1417,22 +1332,7 @@ public void testInfer_DoesNotSetInputType_WhenNotPresentInTaskSettings_AndUnspec } } - public void testChunkedInfer_BatchesCalls() throws IOException { - var model = CohereEmbeddingsModelTests.createModel( - getUrl(webServer), - "secret", - new CohereEmbeddingsTaskSettings(null, null), - 1024, - 1024, - "model", - null - ); - - testChunkedInfer(model); - } - - public void testChunkedInfer_BatchesCallsChunkingSettingsSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_BatchesCallsChunkingSettingsSet() throws IOException { var model = CohereEmbeddingsModelTests.createModel( getUrl(webServer), "secret", @@ -1447,8 +1347,7 @@ public void testChunkedInfer_BatchesCallsChunkingSettingsSetAndFeatureFlagEnable testChunkedInfer(model); } - public void testChunkedInfer_ChunkingSettingsNotSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException { var model = CohereEmbeddingsModelTests.createModel( getUrl(webServer), "secret", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index cad33b56ce235..89a27a921cbea 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.action.util.QueryPage; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; @@ -272,37 +271,6 @@ public void testParseRequestConfig_E5() { } { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - var service = createService(mock(Client.class), BaseElasticsearchInternalService.PreferredModelVariant.PLATFORM_AGNOSTIC); - var settings = new HashMap(); - settings.put( - ModelConfigurations.SERVICE_SETTINGS, - new HashMap<>( - Map.of( - ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, - 1, - ElasticsearchInternalServiceSettings.NUM_THREADS, - 4, - ElasticsearchInternalServiceSettings.MODEL_ID, - MULTILINGUAL_E5_SMALL_MODEL_ID - ) - ) - ); - settings.put(ModelConfigurations.CHUNKING_SETTINGS, createRandomChunkingSettingsMap()); - - service.parseRequestConfig(randomInferenceEntityId, TaskType.TEXT_EMBEDDING, settings, modelVerificationListener); - } - - { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); var service = createService(mock(Client.class), BaseElasticsearchInternalService.PreferredModelVariant.PLATFORM_AGNOSTIC); var settings = new HashMap(); settings.put( @@ -331,7 +299,6 @@ public void testParseRequestConfig_E5() { } { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); var service = createService(mock(Client.class), BaseElasticsearchInternalService.PreferredModelVariant.PLATFORM_AGNOSTIC); var settings = new HashMap(); settings.put( @@ -460,40 +427,6 @@ public void testParseRequestConfig_elser() { } { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - Client mockClient = mock(Client.class); - when(mockClient.threadPool()).thenReturn(threadPool); - var service = createService(mockClient); - var config = new HashMap(); - config.put(ModelConfigurations.SERVICE, OLD_ELSER_SERVICE_NAME); - config.put( - ModelConfigurations.SERVICE_SETTINGS, - new HashMap<>( - Map.of( - ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, - 1, - ElasticsearchInternalServiceSettings.NUM_THREADS, - 4, - ElasticsearchInternalServiceSettings.MODEL_ID, - ElserModels.ELSER_V2_MODEL - ) - ) - ); - config.put(ModelConfigurations.CHUNKING_SETTINGS, createRandomChunkingSettingsMap()); - - service.parseRequestConfig(randomInferenceEntityId, TaskType.SPARSE_EMBEDDING, config, modelVerificationListener); - } - - { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); Client mockClient = mock(Client.class); when(mockClient.threadPool()).thenReturn(threadPool); var service = createService(mockClient); @@ -531,7 +464,6 @@ public void testParseRequestConfig_elser() { } { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); Client mockClient = mock(Client.class); when(mockClient.threadPool()).thenReturn(threadPool); var service = createService(mockClient); @@ -661,55 +593,11 @@ public void testParseRequestConfig_SparseEmbeddingWithoutChunkingSettings() { testParseRequestConfig_SparseEmbedding(false, Optional.empty()); } - @SuppressWarnings("unchecked") - public void testParseRequestConfig_SparseEmbeddingWithChunkingSettingsDisabledAndProvided() { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - var client = mock(Client.class); - doAnswer(invocation -> { - var listener = (ActionListener) invocation.getArguments()[2]; - listener.onResponse( - new GetTrainedModelsAction.Response(new QueryPage<>(List.of(mock(TrainedModelConfig.class)), 1, mock(ParseField.class))) - ); - return null; - }).when(client).execute(Mockito.same(GetTrainedModelsAction.INSTANCE), any(), any()); - - when(client.threadPool()).thenReturn(threadPool); - - var service = createService(client); - var settings = new HashMap(); - settings.put( - ModelConfigurations.SERVICE_SETTINGS, - new HashMap<>( - Map.of( - ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, - 1, - ElasticsearchInternalServiceSettings.NUM_THREADS, - 4, - ElasticsearchInternalServiceSettings.MODEL_ID, - "foo" - ) - ) - ); - settings.put(ModelConfigurations.CHUNKING_SETTINGS, createRandomChunkingSettingsMap()); - - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - service.parseRequestConfig(randomInferenceEntityId, TaskType.SPARSE_EMBEDDING, settings, modelVerificationListener); - } - - public void testParseRequestConfig_SparseEmbeddingWithChunkingSettingsEnabledAndProvided() { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_SparseEmbeddingWithChunkingSettingsProvided() { testParseRequestConfig_SparseEmbedding(true, Optional.of(createRandomChunkingSettingsMap())); } - public void testParseRequestConfig_SparseEmbeddingWithChunkingSettingsEnabledAndNotProvided() { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_SparseEmbeddingWithChunkingSettingsNotProvided() { testParseRequestConfig_SparseEmbedding(true, Optional.empty()); } @@ -948,8 +836,7 @@ public void testChunkInfer_E5WithNullChunkingSettings() { testChunkInfer_e5(null); } - public void testChunkInfer_E5ChunkingSettingsSetAndFeatureFlagEnabled() { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkInfer_E5ChunkingSettingsSet() { testChunkInfer_e5(ChunkingSettingsTests.createRandomChunkingSettings()); } @@ -1020,8 +907,7 @@ public void testChunkInfer_SparseWithNullChunkingSettings() { testChunkInfer_Sparse(null); } - public void testChunkInfer_SparseWithChunkingSettingsSetAndFeatureFlagEnabled() { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkInfer_SparseWithChunkingSettingsSet() { testChunkInfer_Sparse(ChunkingSettingsTests.createRandomChunkingSettings()); } @@ -1087,8 +973,7 @@ public void testChunkInfer_ElserWithNullChunkingSettings() { testChunkInfer_Elser(null); } - public void testChunkInfer_ElserWithChunkingSettingsSetAndFeatureFlagEnabled() { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkInfer_ElserWithChunkingSettingsSet() { testChunkInfer_Elser(ChunkingSettingsTests.createRandomChunkingSettings()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java index e94a3f5d727cf..bc8020d8d88fe 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; @@ -163,37 +162,7 @@ public void testParseRequestConfig_CreatesAGoogleAiStudioEmbeddingsModel() throw } } - public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() - throws IOException { - var apiKey = "apiKey"; - var modelId = "model"; - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createGoogleAiStudioService()) { - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, Matchers.instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - service.parseRequestConfig( - "id", - TaskType.TEXT_EMBEDDING, - getRequestConfigMap( - new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)), - new HashMap<>(Map.of()), - createRandomChunkingSettingsMap(), - getSecretSettingsMap(apiKey) - ), - modelVerificationListener - ); - } - } - - public void testParseRequestConfig_CreatesAGoogleAiStudioEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAGoogleAiStudioEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { var apiKey = "apiKey"; var modelId = "model"; @@ -221,9 +190,7 @@ public void testParseRequestConfig_CreatesAGoogleAiStudioEmbeddingsModelWhenChun } } - public void testParseRequestConfig_CreatesAGoogleAiStudioEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAGoogleAiStudioEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { var apiKey = "apiKey"; var modelId = "model"; @@ -394,40 +361,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAGoogleAiStudioEmbeddings } } - public void testParsePersistedConfigWithSecrets_CreatesAGoogleAiStudioEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - var modelId = "model"; - var apiKey = "apiKey"; - - try (var service = createGoogleAiStudioService()) { - var persistedConfig = getPersistedConfigMap( - new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)), - getTaskSettingsMapEmpty(), - createRandomChunkingSettingsMap(), - getSecretSettingsMap(apiKey) - ); - - var model = service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ); - - assertThat(model, instanceOf(GoogleAiStudioEmbeddingsModel.class)); - - var embeddingsModel = (GoogleAiStudioEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); - assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); - } - } - - public void testParsePersistedConfigWithSecrets_CreatesAGoogleAiStudioEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAGoogleAiStudioEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { var modelId = "model"; var apiKey = "apiKey"; @@ -456,9 +390,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAGoogleAiStudioEmbeddings } } - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { var modelId = "model"; var apiKey = "apiKey"; @@ -617,33 +549,7 @@ public void testParsePersistedConfig_CreatesAGoogleAiStudioCompletionModel() thr } } - public void testParsePersistedConfig_CreatesAGoogleAiEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - var modelId = "model"; - - try (var service = createGoogleAiStudioService()) { - var persistedConfig = getPersistedConfigMap( - new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)), - getTaskSettingsMapEmpty(), - createRandomChunkingSettingsMap() - ); - - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - - assertThat(model, instanceOf(GoogleAiStudioEmbeddingsModel.class)); - - var embeddingsModel = (GoogleAiStudioEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); - assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertNull(embeddingsModel.getSecretSettings()); - } - } - - public void testParsePersistedConfig_CreatesAGoogleAiStudioEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAGoogleAiStudioEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { var modelId = "model"; try (var service = createGoogleAiStudioService()) { @@ -665,9 +571,7 @@ public void testParsePersistedConfig_CreatesAGoogleAiStudioEmbeddingsModelWhenCh } } - public void testParsePersistedConfig_CreatesAGoogleAiStudioEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAGoogleAiStudioEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { var modelId = "model"; try (var service = createGoogleAiStudioService()) { @@ -921,16 +825,15 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { } } - public void testChunkedInfer_Batches() throws IOException { + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException { var modelId = "modelId"; var apiKey = "apiKey"; - var model = GoogleAiStudioEmbeddingsModelTests.createModel(modelId, apiKey, getUrl(webServer)); + var model = GoogleAiStudioEmbeddingsModelTests.createModel(modelId, null, apiKey, getUrl(webServer)); testChunkedInfer(modelId, apiKey, model); } - public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsSet() throws IOException { var modelId = "modelId"; var apiKey = "apiKey"; var model = GoogleAiStudioEmbeddingsModelTests.createModel(modelId, createRandomChunkingSettings(), apiKey, getUrl(webServer)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java index da38cdc763db4..6f28301078853 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; @@ -49,7 +48,6 @@ import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -119,38 +117,7 @@ public void testParseRequestConfig_CreatesGoogleVertexAiEmbeddingsModel() throws } } - public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createGoogleVertexAiService()) { - var config = getRequestConfigMap( - new HashMap<>( - Map.of( - ServiceFields.MODEL_ID, - "model", - GoogleVertexAiServiceFields.LOCATION, - "location", - GoogleVertexAiServiceFields.PROJECT_ID, - "project" - ) - ), - getTaskSettingsMap(true), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("{}") - ); - - var failureListener = ActionListener.wrap(model -> fail("Expected exception, but got model: " + model), exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - }); - - service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener); - } - } - - public void testParseRequestConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { var projectId = "project"; var location = "location"; var modelId = "model"; @@ -196,9 +163,7 @@ public void testParseRequestConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenChun } } - public void testParseRequestConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { var projectId = "project"; var location = "location"; var modelId = "model"; @@ -457,61 +422,7 @@ public void testParsePersistedConfigWithSecrets_CreatesGoogleVertexAiEmbeddingsM } } - public void testParsePersistedConfigWithSecrets_CreatesAGoogleVertexAiEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - var projectId = "project"; - var location = "location"; - var modelId = "model"; - var autoTruncate = true; - var serviceAccountJson = """ - { - "some json" - } - """; - - try (var service = createGoogleVertexAiService()) { - var persistedConfig = getPersistedConfigMap( - new HashMap<>( - Map.of( - ServiceFields.MODEL_ID, - modelId, - GoogleVertexAiServiceFields.LOCATION, - location, - GoogleVertexAiServiceFields.PROJECT_ID, - projectId, - GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER, - true - ) - ), - getTaskSettingsMap(autoTruncate), - createRandomChunkingSettingsMap(), - getSecretSettingsMap(serviceAccountJson) - ); - - var model = service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ); - - assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class)); - - var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); - assertThat(embeddingsModel.getServiceSettings().location(), is(location)); - assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); - assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE)); - assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate))); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertThat(embeddingsModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson)); - } - } - - public void testParsePersistedConfigWithSecrets_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { var projectId = "project"; var location = "location"; var modelId = "model"; @@ -561,9 +472,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAGoogleVertexAiEmbeddings } } - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { var projectId = "project"; var location = "location"; var modelId = "model"; @@ -841,49 +750,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists } } - public void testParsePersistedConfig_CreatesAGoogleVertexAiEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - var projectId = "project"; - var location = "location"; - var modelId = "model"; - var autoTruncate = true; - - try (var service = createGoogleVertexAiService()) { - var persistedConfig = getPersistedConfigMap( - new HashMap<>( - Map.of( - ServiceFields.MODEL_ID, - modelId, - GoogleVertexAiServiceFields.LOCATION, - location, - GoogleVertexAiServiceFields.PROJECT_ID, - projectId, - GoogleVertexAiEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER, - true - ) - ), - getTaskSettingsMap(autoTruncate), - createRandomChunkingSettingsMap() - ); - - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - - assertThat(model, instanceOf(GoogleVertexAiEmbeddingsModel.class)); - - var embeddingsModel = (GoogleVertexAiEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); - assertThat(embeddingsModel.getServiceSettings().location(), is(location)); - assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); - assertThat(embeddingsModel.getServiceSettings().dimensionsSetByUser(), is(Boolean.TRUE)); - assertThat(embeddingsModel.getTaskSettings(), is(new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate))); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - } - } - - public void testParsePersistedConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { var projectId = "project"; var location = "location"; var modelId = "model"; @@ -921,8 +788,7 @@ public void testParsePersistedConfig_CreatesAGoogleVertexAiEmbeddingsModelWhenCh } } - public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { var projectId = "project"; var location = "location"; var modelId = "model"; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index a683d6e3cb051..0ff4bd805ea36 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; @@ -73,7 +72,6 @@ import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettingsTests.getServiceSettingsMap; import static org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettingsTests.getSecretSettingsMap; import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -119,32 +117,7 @@ public void testParseRequestConfig_CreatesAnEmbeddingsModel() throws IOException } } - public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createHuggingFaceService()) { - var config = getRequestConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); - config.put("extra_key", "value"); - - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - service.parseRequestConfig( - "id", - TaskType.TEXT_EMBEDDING, - getRequestConfigMap(getServiceSettingsMap("url"), createRandomChunkingSettingsMap(), getSecretSettingsMap("secret")), - modelVerificationListener - ); - } - } - - public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createHuggingFaceService()) { ActionListener modelVerificationActionListener = ActionListener.wrap((model) -> { assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); @@ -164,8 +137,7 @@ public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsP } } - public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createHuggingFaceService()) { ActionListener modelVerificationActionListener = ActionListener.wrap((model) -> { assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); @@ -287,36 +259,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModel() throw } } - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createHuggingFaceService()) { - var persistedConfig = getPersistedConfigMap( - getServiceSettingsMap("url"), - new HashMap<>(), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ); - - var model = service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ); - - assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); - - var embeddingsModel = (HuggingFaceEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); - } - } - - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createHuggingFaceService()) { var persistedConfig = getPersistedConfigMap( getServiceSettingsMap("url"), @@ -341,9 +284,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChun } } - public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createHuggingFaceService()) { var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), new HashMap<>(), getSecretSettingsMap("secret")); @@ -502,25 +443,7 @@ public void testParsePersistedConfig_CreatesAnEmbeddingsModel() throws IOExcepti } } - public void testParsePersistedConfig_CreatesAnEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createHuggingFaceService()) { - var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), createRandomChunkingSettingsMap()); - - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - - assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); - - var embeddingsModel = (HuggingFaceEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertNull(embeddingsModel.getSecretSettings()); - } - } - - public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createHuggingFaceService()) { var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), createRandomChunkingSettingsMap()); @@ -535,8 +458,7 @@ public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSetting } } - public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createHuggingFaceService()) { var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java index d9075b7988368..71e9eac9a6635 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.ModelConfigurationsTests; @@ -128,35 +127,7 @@ public void testParseRequestConfig_CreatesAMistralEmbeddingsModel() throws IOExc } } - public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createService()) { - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - service.parseRequestConfig( - "id", - TaskType.TEXT_EMBEDDING, - getRequestConfigMap( - getEmbeddingsServiceSettingsMap("mistral-embed", null, null, null), - getEmbeddingsTaskSettingsMap(), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ), - modelVerificationListener - ); - } - } - - public void testParseRequestConfig_CreatesAMistralEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAMistralEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(MistralEmbeddingsModel.class)); @@ -182,9 +153,7 @@ public void testParseRequestConfig_CreatesAMistralEmbeddingsModelWhenChunkingSet } } - public void testParseRequestConfig_CreatesAMistralEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAMistralEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(MistralEmbeddingsModel.class)); @@ -328,32 +297,7 @@ public void testParsePersistedConfig_CreatesAMistralEmbeddingsModel() throws IOE } } - public void testParsePersistedConfig_CreatesAMistralEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createService()) { - var config = getPersistedConfigMap( - getEmbeddingsServiceSettingsMap("mistral-embed", 1024, 512, null), - getEmbeddingsTaskSettingsMap(), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ); - - var model = service.parsePersistedConfigWithSecrets("id", TaskType.TEXT_EMBEDDING, config.config(), config.secrets()); - - assertThat(model, instanceOf(MistralEmbeddingsModel.class)); - - var embeddingsModel = (MistralEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().modelId(), is("mistral-embed")); - assertThat(embeddingsModel.getServiceSettings().dimensions(), is(1024)); - assertThat(embeddingsModel.getServiceSettings().maxInputTokens(), is(512)); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); - } - } - - public void testParsePersistedConfig_CreatesAMistralEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAMistralEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createService()) { var config = getPersistedConfigMap( getEmbeddingsServiceSettingsMap("mistral-embed", 1024, 512, null), @@ -375,9 +319,7 @@ public void testParsePersistedConfig_CreatesAMistralEmbeddingsModelWhenChunkingS } } - public void testParsePersistedConfig_CreatesAMistralEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAMistralEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createService()) { var config = getPersistedConfigMap( getEmbeddingsServiceSettingsMap("mistral-embed", 1024, 512, null), @@ -519,32 +461,7 @@ public void testParsePersistedConfig_WithoutSecretsCreatesEmbeddingsModel() thro } } - public void testParsePersistedConfig_WithoutSecretsCreatesAnEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createService()) { - var config = getPersistedConfigMap( - getEmbeddingsServiceSettingsMap("mistral-embed", 1024, 512, null), - getEmbeddingsTaskSettingsMap(), - createRandomChunkingSettingsMap(), - Map.of() - ); - - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, config.config()); - - assertThat(model, instanceOf(MistralEmbeddingsModel.class)); - - var embeddingsModel = (MistralEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().modelId(), is("mistral-embed")); - assertThat(embeddingsModel.getServiceSettings().dimensions(), is(1024)); - assertThat(embeddingsModel.getServiceSettings().maxInputTokens(), is(512)); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - } - } - - public void testParsePersistedConfig_WithoutSecretsCreatesAnEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_WithoutSecretsCreatesAnEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createService()) { var config = getPersistedConfigMap( getEmbeddingsServiceSettingsMap("mistral-embed", 1024, 512, null), @@ -565,9 +482,7 @@ public void testParsePersistedConfig_WithoutSecretsCreatesAnEmbeddingsModelWhenC } } - public void testParsePersistedConfig_WithoutSecretsCreatesAnEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_WithoutSecretsCreatesAnEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createService()) { var config = getPersistedConfigMap( getEmbeddingsServiceSettingsMap("mistral-embed", 1024, 512, null), @@ -693,15 +608,14 @@ public void testInfer_ThrowsErrorWhenModelIsNotMistralEmbeddingsModel() throws I verifyNoMoreInteractions(sender); } - public void testChunkedInfer_Embeddings_CallsInfer_ConvertsFloatResponse() throws IOException { - var model = MistralEmbeddingModelTests.createModel("id", "mistral-embed", "apikey", null, null, null, null); + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException { + var model = MistralEmbeddingModelTests.createModel("id", "mistral-embed", null, "apikey", null, null, null, null); model.setURI(getUrl(webServer)); testChunkedInfer(model); } - public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsSet() throws IOException { var model = MistralEmbeddingModelTests.createModel( "id", "mistral-embed", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 91479b0d18bdb..509a1f8a3d010 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; @@ -346,35 +345,7 @@ public void testParseRequestConfig_MovesModel() throws IOException { } } - public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createOpenAiService()) { - ActionListener modelVerificationListener = ActionListener.wrap( - model -> fail("Expected exception, but got model: " + model), - exception -> { - assertThat(exception, instanceOf(ElasticsearchStatusException.class)); - assertThat(exception.getMessage(), containsString("Model configuration contains settings")); - } - ); - - service.parseRequestConfig( - "id", - TaskType.TEXT_EMBEDDING, - getRequestConfigMap( - getServiceSettingsMap("model", null, null), - getTaskSettingsMap(null), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ), - modelVerificationListener - ); - } - } - - public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createOpenAiService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); @@ -402,9 +373,7 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSet } } - public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createOpenAiService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); @@ -504,39 +473,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWi } } - public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createOpenAiService()) { - var persistedConfig = getPersistedConfigMap( - getServiceSettingsMap("model", null, null, null, null, true), - getTaskSettingsMap(null), - createRandomChunkingSettingsMap(), - getSecretSettingsMap("secret") - ); - - var model = service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ); - - assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); - - var embeddingsModel = (OpenAiEmbeddingsModel) model; - assertNull(embeddingsModel.getServiceSettings().uri()); - assertNull(embeddingsModel.getServiceSettings().organizationId()); - assertThat(embeddingsModel.getServiceSettings().modelId(), is("model")); - assertNull(embeddingsModel.getTaskSettings().user()); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); - } - } - - public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createOpenAiService()) { var persistedConfig = getPersistedConfigMap( getServiceSettingsMap("model", null, null, null, null, true), @@ -564,9 +501,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWh } } - public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createOpenAiService()) { var persistedConfig = getPersistedConfigMap( getServiceSettingsMap("model", null, null, null, null, true), @@ -788,33 +723,7 @@ public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUr } } - public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); - try (var service = createOpenAiService()) { - var persistedConfig = getPersistedConfigMap( - getServiceSettingsMap("model", null, null, null, null, true), - getTaskSettingsMap(null), - createRandomChunkingSettingsMap() - ); - - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - - assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); - - var embeddingsModel = (OpenAiEmbeddingsModel) model; - assertNull(embeddingsModel.getServiceSettings().uri()); - assertNull(embeddingsModel.getServiceSettings().organizationId()); - assertThat(embeddingsModel.getServiceSettings().modelId(), is("model")); - assertNull(embeddingsModel.getTaskSettings().user()); - assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); - assertNull(embeddingsModel.getSecretSettings()); - } - } - - public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { try (var service = createOpenAiService()) { var persistedConfig = getPersistedConfigMap( getServiceSettingsMap("model", null, null, null, null, true), @@ -836,9 +745,7 @@ public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingS } } - public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() - throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { try (var service = createOpenAiService()) { var persistedConfig = getPersistedConfigMap( getServiceSettingsMap("model", null, null, null, null, true), @@ -1589,13 +1496,7 @@ public void testMoveModelFromTaskToServiceSettings_AlreadyMoved() { assertEquals("model", serviceSettings.get(ServiceFields.MODEL_ID)); } - public void testChunkedInfer_Batches() throws IOException { - var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user"); - testChunkedInfer(model); - } - - public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsSet() throws IOException { var model = OpenAiEmbeddingsModelTests.createModel( getUrl(webServer), "org", @@ -1608,8 +1509,7 @@ public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws I testChunkedInfer(model); } - public void testChunkedInfer_ChunkingSettingsNotSetAndFeatureFlagEnabled() throws IOException { - assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException { var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user", (ChunkingSettings) null); testChunkedInfer(model); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ChatCompletionModelValidatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ChatCompletionModelValidatorTests.java new file mode 100644 index 0000000000000..89ab07d25e83d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ChatCompletionModelValidatorTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.validation; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; +import org.mockito.Mock; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.openMocks; + +public class ChatCompletionModelValidatorTests extends ESTestCase { + @Mock + private ServiceIntegrationValidator mockServiceIntegrationValidator; + @Mock + private InferenceService mockInferenceService; + @Mock + private InferenceServiceResults mockInferenceServiceResults; + @Mock + private Model mockModel; + @Mock + private ActionListener mockActionListener; + + private ChatCompletionModelValidator underTest; + + @Before + public void setup() { + openMocks(this); + + underTest = new ChatCompletionModelValidator(mockServiceIntegrationValidator); + } + + public void testValidate_ServiceIntegrationValidatorThrowsException() { + doThrow(ElasticsearchStatusException.class).when(mockServiceIntegrationValidator) + .validate(eq(mockInferenceService), eq(mockModel), any()); + + assertThrows( + ElasticsearchStatusException.class, + () -> { underTest.validate(mockInferenceService, mockModel, mockActionListener); } + ); + + verify(mockServiceIntegrationValidator).validate(eq(mockInferenceService), eq(mockModel), any()); + verify(mockActionListener).delegateFailureAndWrap(any()); + verifyNoMoreInteractions( + mockServiceIntegrationValidator, + mockInferenceService, + mockInferenceServiceResults, + mockModel, + mockActionListener + ); + } + + public void testValidate_ChatCompletionDetailsUpdated() { + when(mockActionListener.delegateFailureAndWrap(any())).thenCallRealMethod(); + when(mockInferenceService.updateModelWithChatCompletionDetails(mockModel)).thenReturn(mockModel); + doAnswer(ans -> { + ActionListener responseListener = ans.getArgument(2); + responseListener.onResponse(mockInferenceServiceResults); + return null; + }).when(mockServiceIntegrationValidator).validate(eq(mockInferenceService), eq(mockModel), any()); + + underTest.validate(mockInferenceService, mockModel, mockActionListener); + + verify(mockServiceIntegrationValidator).validate(eq(mockInferenceService), eq(mockModel), any()); + verify(mockActionListener).delegateFailureAndWrap(any()); + verify(mockActionListener).onResponse(mockModel); + verify(mockInferenceService).updateModelWithChatCompletionDetails(mockModel); + verifyNoMoreInteractions( + mockServiceIntegrationValidator, + mockInferenceService, + mockInferenceServiceResults, + mockModel, + mockActionListener + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilderTests.java index c534fea8aeb3e..0153113be75d9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilderTests.java @@ -34,7 +34,7 @@ private Map> taskTypeToModelValidatorC TaskType.RERANK, SimpleModelValidator.class, TaskType.COMPLETION, - SimpleModelValidator.class, + ChatCompletionModelValidator.class, TaskType.ANY, SimpleModelValidator.class ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStatsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStatsTests.java deleted file mode 100644 index 1a5aba5f89ad2..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/ApmInferenceStatsTests.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.telemetry; - -import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.telemetry.metric.LongCounter; -import org.elasticsearch.telemetry.metric.MeterRegistry; -import org.elasticsearch.test.ESTestCase; - -import java.util.Map; - -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class ApmInferenceStatsTests extends ESTestCase { - - public void testRecordWithModel() { - var longCounter = mock(LongCounter.class); - - var stats = new ApmInferenceStats(longCounter); - - stats.incrementRequestCount(model("service", TaskType.ANY, "modelId")); - - verify(longCounter).incrementBy( - eq(1L), - eq(Map.of("service", "service", "task_type", TaskType.ANY.toString(), "model_id", "modelId")) - ); - } - - public void testRecordWithoutModel() { - var longCounter = mock(LongCounter.class); - - var stats = new ApmInferenceStats(longCounter); - - stats.incrementRequestCount(model("service", TaskType.ANY, null)); - - verify(longCounter).incrementBy(eq(1L), eq(Map.of("service", "service", "task_type", TaskType.ANY.toString()))); - } - - public void testCreation() { - assertNotNull(ApmInferenceStats.create(MeterRegistry.NOOP)); - } - - private Model model(String service, TaskType taskType, String modelId) { - var configuration = mock(ModelConfigurations.class); - when(configuration.getService()).thenReturn(service); - var settings = mock(ServiceSettings.class); - if (modelId != null) { - when(settings.modelId()).thenReturn(modelId); - } - - var model = mock(Model.class); - when(model.getTaskType()).thenReturn(taskType); - when(model.getConfigurations()).thenReturn(configuration); - when(model.getServiceSettings()).thenReturn(settings); - - return model; - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/InferenceStatsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/InferenceStatsTests.java new file mode 100644 index 0000000000000..d9327295ba5fa --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/InferenceStatsTests.java @@ -0,0 +1,217 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.telemetry; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.inference.UnparsedModel; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.MeterRegistry; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.modelAttributes; +import static org.elasticsearch.xpack.inference.telemetry.InferenceStats.responseAttributes; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.assertArg; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class InferenceStatsTests extends ESTestCase { + + public void testRecordWithModel() { + var longCounter = mock(LongCounter.class); + var stats = new InferenceStats(longCounter, mock()); + + stats.requestCount().incrementBy(1, modelAttributes(model("service", TaskType.ANY, "modelId"))); + + verify(longCounter).incrementBy( + eq(1L), + eq(Map.of("service", "service", "task_type", TaskType.ANY.toString(), "model_id", "modelId")) + ); + } + + public void testRecordWithoutModel() { + var longCounter = mock(LongCounter.class); + var stats = new InferenceStats(longCounter, mock()); + + stats.requestCount().incrementBy(1, modelAttributes(model("service", TaskType.ANY, null))); + + verify(longCounter).incrementBy(eq(1L), eq(Map.of("service", "service", "task_type", TaskType.ANY.toString()))); + } + + public void testCreation() { + assertNotNull(InferenceStats.create(MeterRegistry.NOOP)); + } + + public void testRecordDurationWithoutError() { + var expectedLong = randomLong(); + var histogramCounter = mock(LongHistogram.class); + var stats = new InferenceStats(mock(), histogramCounter); + + stats.inferenceDuration().record(expectedLong, responseAttributes(model("service", TaskType.ANY, "modelId"), null)); + + verify(histogramCounter).record(eq(expectedLong), assertArg(attributes -> { + assertThat(attributes.get("service"), is("service")); + assertThat(attributes.get("task_type"), is(TaskType.ANY.toString())); + assertThat(attributes.get("model_id"), is("modelId")); + assertThat(attributes.get("status_code"), is(200)); + assertThat(attributes.get("error.type"), nullValue()); + })); + } + + /** + * "If response status code was sent or received and status indicates an error according to HTTP span status definition, + * error.type SHOULD be set to the status code number (represented as a string)" + * - https://opentelemetry.io/docs/specs/semconv/http/http-metrics/ + */ + public void testRecordDurationWithElasticsearchStatusException() { + var expectedLong = randomLong(); + var histogramCounter = mock(LongHistogram.class); + var stats = new InferenceStats(mock(), histogramCounter); + var statusCode = RestStatus.BAD_REQUEST; + var exception = new ElasticsearchStatusException("hello", statusCode); + var expectedError = String.valueOf(statusCode.getStatus()); + + stats.inferenceDuration().record(expectedLong, responseAttributes(model("service", TaskType.ANY, "modelId"), exception)); + + verify(histogramCounter).record(eq(expectedLong), assertArg(attributes -> { + assertThat(attributes.get("service"), is("service")); + assertThat(attributes.get("task_type"), is(TaskType.ANY.toString())); + assertThat(attributes.get("model_id"), is("modelId")); + assertThat(attributes.get("status_code"), is(statusCode.getStatus())); + assertThat(attributes.get("error.type"), is(expectedError)); + })); + } + + /** + * "If the request fails with an error before response status code was sent or received, + * error.type SHOULD be set to exception type" + * - https://opentelemetry.io/docs/specs/semconv/http/http-metrics/ + */ + public void testRecordDurationWithOtherException() { + var expectedLong = randomLong(); + var histogramCounter = mock(LongHistogram.class); + var stats = new InferenceStats(mock(), histogramCounter); + var exception = new IllegalStateException("ahh"); + var expectedError = exception.getClass().getSimpleName(); + + stats.inferenceDuration().record(expectedLong, responseAttributes(model("service", TaskType.ANY, "modelId"), exception)); + + verify(histogramCounter).record(eq(expectedLong), assertArg(attributes -> { + assertThat(attributes.get("service"), is("service")); + assertThat(attributes.get("task_type"), is(TaskType.ANY.toString())); + assertThat(attributes.get("model_id"), is("modelId")); + assertThat(attributes.get("status_code"), nullValue()); + assertThat(attributes.get("error.type"), is(expectedError)); + })); + } + + public void testRecordDurationWithUnparsedModelAndElasticsearchStatusException() { + var expectedLong = randomLong(); + var histogramCounter = mock(LongHistogram.class); + var stats = new InferenceStats(mock(), histogramCounter); + var statusCode = RestStatus.BAD_REQUEST; + var exception = new ElasticsearchStatusException("hello", statusCode); + var expectedError = String.valueOf(statusCode.getStatus()); + + var unparsedModel = new UnparsedModel("inferenceEntityId", TaskType.ANY, "service", Map.of(), Map.of()); + + stats.inferenceDuration().record(expectedLong, responseAttributes(unparsedModel, exception)); + + verify(histogramCounter).record(eq(expectedLong), assertArg(attributes -> { + assertThat(attributes.get("service"), is("service")); + assertThat(attributes.get("task_type"), is(TaskType.ANY.toString())); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), is(statusCode.getStatus())); + assertThat(attributes.get("error.type"), is(expectedError)); + })); + } + + public void testRecordDurationWithUnparsedModelAndOtherException() { + var expectedLong = randomLong(); + var histogramCounter = mock(LongHistogram.class); + var stats = new InferenceStats(mock(), histogramCounter); + var exception = new IllegalStateException("ahh"); + var expectedError = exception.getClass().getSimpleName(); + + var unparsedModel = new UnparsedModel("inferenceEntityId", TaskType.ANY, "service", Map.of(), Map.of()); + + stats.inferenceDuration().record(expectedLong, responseAttributes(unparsedModel, exception)); + + verify(histogramCounter).record(eq(expectedLong), assertArg(attributes -> { + assertThat(attributes.get("service"), is("service")); + assertThat(attributes.get("task_type"), is(TaskType.ANY.toString())); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), nullValue()); + assertThat(attributes.get("error.type"), is(expectedError)); + })); + } + + public void testRecordDurationWithUnknownModelAndElasticsearchStatusException() { + var expectedLong = randomLong(); + var histogramCounter = mock(LongHistogram.class); + var stats = new InferenceStats(mock(), histogramCounter); + var statusCode = RestStatus.BAD_REQUEST; + var exception = new ElasticsearchStatusException("hello", statusCode); + var expectedError = String.valueOf(statusCode.getStatus()); + + stats.inferenceDuration().record(expectedLong, responseAttributes(exception)); + + verify(histogramCounter).record(eq(expectedLong), assertArg(attributes -> { + assertThat(attributes.get("service"), nullValue()); + assertThat(attributes.get("task_type"), nullValue()); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), is(statusCode.getStatus())); + assertThat(attributes.get("error.type"), is(expectedError)); + })); + } + + public void testRecordDurationWithUnknownModelAndOtherException() { + var expectedLong = randomLong(); + var histogramCounter = mock(LongHistogram.class); + var stats = new InferenceStats(mock(), histogramCounter); + var exception = new IllegalStateException("ahh"); + var expectedError = exception.getClass().getSimpleName(); + + stats.inferenceDuration().record(expectedLong, responseAttributes(exception)); + + verify(histogramCounter).record(eq(expectedLong), assertArg(attributes -> { + assertThat(attributes.get("service"), nullValue()); + assertThat(attributes.get("task_type"), nullValue()); + assertThat(attributes.get("model_id"), nullValue()); + assertThat(attributes.get("status_code"), nullValue()); + assertThat(attributes.get("error.type"), is(expectedError)); + })); + } + + private Model model(String service, TaskType taskType, String modelId) { + var configuration = mock(ModelConfigurations.class); + when(configuration.getService()).thenReturn(service); + var settings = mock(ServiceSettings.class); + if (modelId != null) { + when(settings.modelId()).thenReturn(modelId); + } + + var model = mock(Model.class); + when(model.getTaskType()).thenReturn(taskType); + when(model.getConfigurations()).thenReturn(configuration); + when(model.getServiceSettings()).thenReturn(settings); + + return model; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/InferenceTimerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/InferenceTimerTests.java new file mode 100644 index 0000000000000..72b29d176f8c1 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/telemetry/InferenceTimerTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.telemetry; + +import org.elasticsearch.test.ESTestCase; + +import java.time.Clock; +import java.time.Instant; +import java.time.temporal.ChronoUnit; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class InferenceTimerTests extends ESTestCase { + + public void testElapsedMillis() { + var expectedDuration = randomLongBetween(10, 300); + + var startTime = Instant.now(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(startTime).thenReturn(startTime.plus(expectedDuration, ChronoUnit.MILLIS)); + var timer = InferenceTimer.start(clock); + + assertThat(expectedDuration, is(timer.elapsedMillis())); + } +} diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index 701bcd204fcfe..fe406722ae1e2 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; @@ -30,6 +31,15 @@ public InferenceRestIT(final ClientYamlTestCandidate testCandidate) { super(testCandidate); } + @Override + protected Settings restClientSettings() { + var baseSettings = super.restClientSettings(); + return Settings.builder() + .put(baseSettings) + .put(CLIENT_SOCKET_TIMEOUT, "120s") // Long timeout for model download + .build(); + } + @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle index 198099329c7c0..7e4df5654f225 100644 --- a/x-pack/plugin/kql/build.gradle +++ b/x-pack/plugin/kql/build.gradle @@ -1,8 +1,10 @@ import org.elasticsearch.gradle.internal.info.BuildParams + import static org.elasticsearch.gradle.util.PlatformUtils.normalize apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.publish' esplugin { @@ -17,19 +19,21 @@ base { dependencies { compileOnly project(path: xpackModule('core')) - compileOnly "org.antlr:antlr4-runtime:${versions.antlr4}" + implementation "org.antlr:antlr4-runtime:${versions.antlr4}" testImplementation "org.antlr:antlr4-runtime:${versions.antlr4}" testImplementation project(':test:framework') testImplementation(testArtifact(project(xpackModule('core')))) } -/**************************************************************** - * Enable QA/rest integration tests for snapshot builds only * - * TODO: Enable for all builds upon this feature release * - ****************************************************************/ -if (BuildParams.isSnapshotBuild()) { - addQaCheckDependencies(project) +tasks.named('yamlRestTest') { + usesDefaultDistribution() +}.configure { + /**************************************************************** + * Enable QA/rest integration tests for snapshot builds only * + * TODO: Enable for all builds upon this feature release * + ****************************************************************/ + enabled = BuildParams.isSnapshotBuild() } /********************************** diff --git a/x-pack/plugin/kql/licenses/antlr4-runtime-LICENSE.txt b/x-pack/plugin/kql/licenses/antlr4-runtime-LICENSE.txt new file mode 100644 index 0000000000000..95d0a2554f686 --- /dev/null +++ b/x-pack/plugin/kql/licenses/antlr4-runtime-LICENSE.txt @@ -0,0 +1,26 @@ +[The "BSD license"] +Copyright (c) 2015 Terence Parr, Sam Harwell +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/x-pack/plugin/kql/licenses/antlr4-runtime-NOTICE.txt b/x-pack/plugin/kql/licenses/antlr4-runtime-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 index 52a70b9d4c018..da015b699cb15 100644 --- a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 +++ b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 @@ -88,7 +88,7 @@ fieldQueryValue ; fieldName - : value=UNQUOTED_LITERAL+ + : value=UNQUOTED_LITERAL | value=QUOTED_STRING | value=WILDCARD ; diff --git a/x-pack/plugin/kql/src/main/java/module-info.java b/x-pack/plugin/kql/src/main/java/module-info.java new file mode 100644 index 0000000000000..41e51033b9c70 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/module-info.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +module org.elasticsearch.kql { + requires org.elasticsearch.server; + requires org.elasticsearch.xcontent; + requires org.antlr.antlr4.runtime; + requires org.elasticsearch.base; + requires org.apache.lucene.queryparser; + requires org.elasticsearch.logging; + requires org.apache.lucene.core; + + exports org.elasticsearch.xpack.kql; + exports org.elasticsearch.xpack.kql.parser; + exports org.elasticsearch.xpack.kql.query; +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/KqlPlugin.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/KqlPlugin.java index 4734924b23618..217513bd2c0da 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/KqlPlugin.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/KqlPlugin.java @@ -7,10 +7,21 @@ package org.elasticsearch.xpack.kql; +import org.elasticsearch.Build; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.xpack.kql.query.KqlQueryBuilder; + +import java.util.List; public class KqlPlugin extends Plugin implements SearchPlugin, ExtensiblePlugin { + @Override + public List> getQueries() { + if (Build.current().isSnapshot()) { + return List.of(new SearchPlugin.QuerySpec<>(KqlQueryBuilder.NAME, KqlQueryBuilder::new, KqlQueryBuilder::fromXContent)); + } + return List.of(); + } } diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java index a6de28104e313..5fe3a61c0a761 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlAstBuilder.java @@ -9,6 +9,7 @@ import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -16,29 +17,34 @@ import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; +import java.util.Set; import java.util.function.BiConsumer; import java.util.function.BiFunction; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; -import static org.elasticsearch.xpack.kql.parser.KqlParserExecutionContext.isDateField; -import static org.elasticsearch.xpack.kql.parser.KqlParserExecutionContext.isKeywordField; -import static org.elasticsearch.xpack.kql.parser.KqlParserExecutionContext.isRuntimeField; +import static org.elasticsearch.xpack.kql.parser.KqlParsingContext.isDateField; +import static org.elasticsearch.xpack.kql.parser.KqlParsingContext.isKeywordField; +import static org.elasticsearch.xpack.kql.parser.KqlParsingContext.isRuntimeField; +import static org.elasticsearch.xpack.kql.parser.KqlParsingContext.isSearchableField; import static org.elasticsearch.xpack.kql.parser.ParserUtils.escapeLuceneQueryString; +import static org.elasticsearch.xpack.kql.parser.ParserUtils.extractText; import static org.elasticsearch.xpack.kql.parser.ParserUtils.hasWildcard; +import static org.elasticsearch.xpack.kql.parser.ParserUtils.typedParsing; class KqlAstBuilder extends KqlBaseBaseVisitor { - private final KqlParserExecutionContext kqlParserExecutionContext; + private final KqlParsingContext kqlParsingContext; - KqlAstBuilder(KqlParserExecutionContext kqlParserExecutionContext) { - this.kqlParserExecutionContext = kqlParserExecutionContext; + KqlAstBuilder(KqlParsingContext kqlParsingContext) { + this.kqlParsingContext = kqlParsingContext; } public QueryBuilder toQueryBuilder(ParserRuleContext ctx) { if (ctx instanceof KqlBaseParser.TopLevelQueryContext topLeveQueryContext) { if (topLeveQueryContext.query() != null) { - return ParserUtils.typedParsing(this, topLeveQueryContext.query(), QueryBuilder.class); + return typedParsing(this, topLeveQueryContext.query(), QueryBuilder.class); } return new MatchAllQueryBuilder(); @@ -59,9 +65,9 @@ public QueryBuilder visitAndBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) // TODO: KQLContext has an option to wrap the clauses into a filter instead of a must clause. Do we need it? for (ParserRuleContext subQueryCtx : ctx.query()) { if (subQueryCtx instanceof KqlBaseParser.BooleanQueryContext booleanSubQueryCtx && isAndQuery(booleanSubQueryCtx)) { - ParserUtils.typedParsing(this, subQueryCtx, BoolQueryBuilder.class).must().forEach(builder::must); + typedParsing(this, subQueryCtx, BoolQueryBuilder.class).must().forEach(builder::must); } else { - builder.must(ParserUtils.typedParsing(this, subQueryCtx, QueryBuilder.class)); + builder.must(typedParsing(this, subQueryCtx, QueryBuilder.class)); } } @@ -73,9 +79,9 @@ public QueryBuilder visitOrBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { for (ParserRuleContext subQueryCtx : ctx.query()) { if (subQueryCtx instanceof KqlBaseParser.BooleanQueryContext booleanSubQueryCtx && isOrQuery(booleanSubQueryCtx)) { - ParserUtils.typedParsing(this, subQueryCtx, BoolQueryBuilder.class).should().forEach(builder::should); + typedParsing(this, subQueryCtx, BoolQueryBuilder.class).should().forEach(builder::should); } else { - builder.should(ParserUtils.typedParsing(this, subQueryCtx, QueryBuilder.class)); + builder.should(typedParsing(this, subQueryCtx, QueryBuilder.class)); } } @@ -84,12 +90,12 @@ public QueryBuilder visitOrBooleanQuery(KqlBaseParser.BooleanQueryContext ctx) { @Override public QueryBuilder visitNotQuery(KqlBaseParser.NotQueryContext ctx) { - return QueryBuilders.boolQuery().mustNot(ParserUtils.typedParsing(this, ctx.simpleQuery(), QueryBuilder.class)); + return QueryBuilders.boolQuery().mustNot(typedParsing(this, ctx.simpleQuery(), QueryBuilder.class)); } @Override public QueryBuilder visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { - return ParserUtils.typedParsing(this, ctx.query(), QueryBuilder.class); + return typedParsing(this, ctx.query(), QueryBuilder.class); } @Override @@ -121,12 +127,16 @@ public QueryBuilder visitExistsQuery(KqlBaseParser.ExistsQueryContext ctx) { public QueryBuilder visitRangeQuery(KqlBaseParser.RangeQueryContext ctx) { BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery().minimumShouldMatch(1); - String queryText = ParserUtils.extractText(ctx.rangeQueryValue()); + String queryText = extractText(ctx.rangeQueryValue()); BiFunction rangeOperation = rangeOperation(ctx.operator); withFields(ctx.fieldName(), (fieldName, mappedFieldType) -> { RangeQueryBuilder rangeQuery = rangeOperation.apply(QueryBuilders.rangeQuery(fieldName), queryText); - // TODO: add timezone for date fields + + if (kqlParsingContext.timeZone() != null) { + rangeQuery.timeZone(kqlParsingContext.timeZone().getId()); + } + boolQueryBuilder.should(rangeQuery); }); @@ -135,42 +145,54 @@ public QueryBuilder visitRangeQuery(KqlBaseParser.RangeQueryContext ctx) { @Override public QueryBuilder visitFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx) { - String queryText = ParserUtils.extractText(ctx.fieldQueryValue()); + String queryText = extractText(ctx.fieldQueryValue()); if (hasWildcard(ctx.fieldQueryValue())) { - // TODO: set default fields. - return QueryBuilders.queryStringQuery(escapeLuceneQueryString(queryText, true)); + QueryStringQueryBuilder queryString = QueryBuilders.queryStringQuery(escapeLuceneQueryString(queryText, true)); + if (kqlParsingContext.defaultField() != null) { + queryString.defaultField(kqlParsingContext.defaultField()); + } + return queryString; } boolean isPhraseMatch = ctx.fieldQueryValue().QUOTED_STRING() != null; - return QueryBuilders.multiMatchQuery(queryText) - // TODO: add default fields? + MultiMatchQueryBuilder multiMatchQuery = QueryBuilders.multiMatchQuery(queryText) .type(isPhraseMatch ? MultiMatchQueryBuilder.Type.PHRASE : MultiMatchQueryBuilder.Type.BEST_FIELDS) .lenient(true); + + if (kqlParsingContext.defaultField() != null) { + kqlParsingContext.resolveDefaultFieldNames() + .stream() + .filter(kqlParsingContext::isSearchableField) + .forEach(multiMatchQuery::field); + } + + return multiMatchQuery; } @Override public QueryBuilder visitFieldQuery(KqlBaseParser.FieldQueryContext ctx) { BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery().minimumShouldMatch(1); - String queryText = ParserUtils.extractText(ctx.fieldQueryValue()); + String queryText = extractText(ctx.fieldQueryValue()); boolean hasWildcard = hasWildcard(ctx.fieldQueryValue()); withFields(ctx.fieldName(), (fieldName, mappedFieldType) -> { QueryBuilder fieldQuery = null; if (hasWildcard && isKeywordField(mappedFieldType)) { - fieldQuery = QueryBuilders.wildcardQuery(fieldName, queryText) - .caseInsensitive(kqlParserExecutionContext.isCaseSensitive() == false); + fieldQuery = QueryBuilders.wildcardQuery(fieldName, queryText).caseInsensitive(kqlParsingContext.caseInsensitive()); } else if (hasWildcard) { fieldQuery = QueryBuilders.queryStringQuery(escapeLuceneQueryString(queryText, true)).field(fieldName); } else if (isDateField(mappedFieldType)) { - // TODO: add timezone - fieldQuery = QueryBuilders.rangeQuery(fieldName).gte(queryText).lte(queryText); + RangeQueryBuilder rangeFieldQuery = QueryBuilders.rangeQuery(fieldName).gte(queryText).lte(queryText); + if (kqlParsingContext.timeZone() != null) { + rangeFieldQuery.timeZone(kqlParsingContext.timeZone().getId()); + } + fieldQuery = rangeFieldQuery; } else if (isKeywordField(mappedFieldType)) { - fieldQuery = QueryBuilders.termQuery(fieldName, queryText) - .caseInsensitive(kqlParserExecutionContext.isCaseSensitive() == false); + fieldQuery = QueryBuilders.termQuery(fieldName, queryText).caseInsensitive(kqlParsingContext.caseInsensitive()); } else if (ctx.fieldQueryValue().QUOTED_STRING() != null) { fieldQuery = QueryBuilders.matchPhraseQuery(fieldName, queryText); } else { @@ -194,7 +216,26 @@ private static boolean isOrQuery(KqlBaseParser.BooleanQueryContext ctx) { } private void withFields(KqlBaseParser.FieldNameContext ctx, BiConsumer fieldConsummer) { - kqlParserExecutionContext.resolveFields(ctx).forEach(fieldDef -> fieldConsummer.accept(fieldDef.v1(), fieldDef.v2())); + assert ctx != null : "Field ctx cannot be null"; + String fieldNamePattern = extractText(ctx); + Set fieldNames = kqlParsingContext.resolveFieldNames(fieldNamePattern); + + if (ctx.value.getType() == KqlBaseParser.QUOTED_STRING && Regex.isSimpleMatchPattern(fieldNamePattern)) { + // When using quoted string, wildcards are not expanded. + // No field can match and we can return early. + return; + } + + if (ctx.value.getType() == KqlBaseParser.QUOTED_STRING) { + assert fieldNames.size() < 2 : "expecting only one matching field"; + } + + fieldNames.forEach(fieldName -> { + MappedFieldType fieldType = kqlParsingContext.fieldType(fieldName); + if (isSearchableField(fieldName, fieldType)) { + fieldConsummer.accept(fieldName, fieldType); + } + }); } private QueryBuilder rewriteDisjunctionQuery(BoolQueryBuilder boolQueryBuilder) { diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp index 2b09dd52e95b0..7af37d7e3c3b5 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp @@ -54,4 +54,4 @@ fieldName atn: -[4, 1, 16, 140, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 1, 0, 3, 0, 30, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 40, 8, 1, 10, 1, 12, 1, 43, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 53, 8, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 3, 5, 66, 8, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 4, 8, 79, 8, 8, 11, 8, 12, 8, 80, 1, 8, 3, 8, 84, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 100, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 107, 8, 11, 1, 12, 3, 12, 110, 8, 12, 1, 12, 4, 12, 113, 8, 12, 11, 12, 12, 12, 114, 1, 12, 3, 12, 118, 8, 12, 1, 12, 1, 12, 3, 12, 122, 8, 12, 1, 12, 1, 12, 3, 12, 126, 8, 12, 1, 12, 3, 12, 129, 8, 12, 1, 13, 4, 13, 132, 8, 13, 11, 13, 12, 13, 133, 1, 13, 1, 13, 3, 13, 138, 8, 13, 1, 13, 0, 1, 2, 14, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 0, 4, 1, 0, 2, 3, 1, 0, 6, 9, 2, 0, 14, 14, 16, 16, 1, 0, 2, 4, 150, 0, 29, 1, 0, 0, 0, 2, 33, 1, 0, 0, 0, 4, 52, 1, 0, 0, 0, 6, 54, 1, 0, 0, 0, 8, 57, 1, 0, 0, 0, 10, 65, 1, 0, 0, 0, 12, 69, 1, 0, 0, 0, 14, 73, 1, 0, 0, 0, 16, 83, 1, 0, 0, 0, 18, 85, 1, 0, 0, 0, 20, 99, 1, 0, 0, 0, 22, 106, 1, 0, 0, 0, 24, 128, 1, 0, 0, 0, 26, 137, 1, 0, 0, 0, 28, 30, 3, 2, 1, 0, 29, 28, 1, 0, 0, 0, 29, 30, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 32, 5, 0, 0, 1, 32, 1, 1, 0, 0, 0, 33, 34, 6, 1, -1, 0, 34, 35, 3, 4, 2, 0, 35, 41, 1, 0, 0, 0, 36, 37, 10, 2, 0, 0, 37, 38, 7, 0, 0, 0, 38, 40, 3, 2, 1, 2, 39, 36, 1, 0, 0, 0, 40, 43, 1, 0, 0, 0, 41, 39, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 3, 1, 0, 0, 0, 43, 41, 1, 0, 0, 0, 44, 53, 3, 6, 3, 0, 45, 53, 3, 8, 4, 0, 46, 53, 3, 12, 6, 0, 47, 53, 3, 10, 5, 0, 48, 53, 3, 18, 9, 0, 49, 53, 3, 14, 7, 0, 50, 53, 3, 20, 10, 0, 51, 53, 3, 22, 11, 0, 52, 44, 1, 0, 0, 0, 52, 45, 1, 0, 0, 0, 52, 46, 1, 0, 0, 0, 52, 47, 1, 0, 0, 0, 52, 48, 1, 0, 0, 0, 52, 49, 1, 0, 0, 0, 52, 50, 1, 0, 0, 0, 52, 51, 1, 0, 0, 0, 53, 5, 1, 0, 0, 0, 54, 55, 5, 4, 0, 0, 55, 56, 3, 4, 2, 0, 56, 7, 1, 0, 0, 0, 57, 58, 3, 26, 13, 0, 58, 59, 5, 5, 0, 0, 59, 60, 5, 12, 0, 0, 60, 61, 3, 2, 1, 0, 61, 62, 5, 13, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 16, 0, 0, 64, 66, 5, 5, 0, 0, 65, 63, 1, 0, 0, 0, 65, 66, 1, 0, 0, 0, 66, 67, 1, 0, 0, 0, 67, 68, 5, 16, 0, 0, 68, 11, 1, 0, 0, 0, 69, 70, 5, 10, 0, 0, 70, 71, 3, 2, 1, 0, 71, 72, 5, 11, 0, 0, 72, 13, 1, 0, 0, 0, 73, 74, 3, 26, 13, 0, 74, 75, 7, 1, 0, 0, 75, 76, 3, 16, 8, 0, 76, 15, 1, 0, 0, 0, 77, 79, 7, 2, 0, 0, 78, 77, 1, 0, 0, 0, 79, 80, 1, 0, 0, 0, 80, 78, 1, 0, 0, 0, 80, 81, 1, 0, 0, 0, 81, 84, 1, 0, 0, 0, 82, 84, 5, 15, 0, 0, 83, 78, 1, 0, 0, 0, 83, 82, 1, 0, 0, 0, 84, 17, 1, 0, 0, 0, 85, 86, 3, 26, 13, 0, 86, 87, 5, 5, 0, 0, 87, 88, 5, 16, 0, 0, 88, 19, 1, 0, 0, 0, 89, 90, 3, 26, 13, 0, 90, 91, 5, 5, 0, 0, 91, 92, 3, 24, 12, 0, 92, 100, 1, 0, 0, 0, 93, 94, 3, 26, 13, 0, 94, 95, 5, 5, 0, 0, 95, 96, 5, 10, 0, 0, 96, 97, 3, 24, 12, 0, 97, 98, 5, 11, 0, 0, 98, 100, 1, 0, 0, 0, 99, 89, 1, 0, 0, 0, 99, 93, 1, 0, 0, 0, 100, 21, 1, 0, 0, 0, 101, 107, 3, 24, 12, 0, 102, 103, 5, 10, 0, 0, 103, 104, 3, 24, 12, 0, 104, 105, 5, 11, 0, 0, 105, 107, 1, 0, 0, 0, 106, 101, 1, 0, 0, 0, 106, 102, 1, 0, 0, 0, 107, 23, 1, 0, 0, 0, 108, 110, 7, 3, 0, 0, 109, 108, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 112, 1, 0, 0, 0, 111, 113, 7, 2, 0, 0, 112, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 112, 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115, 117, 1, 0, 0, 0, 116, 118, 7, 3, 0, 0, 117, 116, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 129, 1, 0, 0, 0, 119, 121, 7, 0, 0, 0, 120, 122, 7, 3, 0, 0, 121, 120, 1, 0, 0, 0, 121, 122, 1, 0, 0, 0, 122, 129, 1, 0, 0, 0, 123, 125, 5, 4, 0, 0, 124, 126, 7, 0, 0, 0, 125, 124, 1, 0, 0, 0, 125, 126, 1, 0, 0, 0, 126, 129, 1, 0, 0, 0, 127, 129, 5, 15, 0, 0, 128, 109, 1, 0, 0, 0, 128, 119, 1, 0, 0, 0, 128, 123, 1, 0, 0, 0, 128, 127, 1, 0, 0, 0, 129, 25, 1, 0, 0, 0, 130, 132, 5, 14, 0, 0, 131, 130, 1, 0, 0, 0, 132, 133, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 133, 134, 1, 0, 0, 0, 134, 138, 1, 0, 0, 0, 135, 138, 5, 15, 0, 0, 136, 138, 5, 16, 0, 0, 137, 131, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 137, 136, 1, 0, 0, 0, 138, 27, 1, 0, 0, 0, 16, 29, 41, 52, 65, 80, 83, 99, 106, 109, 114, 117, 121, 125, 128, 133, 137] \ No newline at end of file +[4, 1, 16, 136, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 1, 0, 3, 0, 30, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 40, 8, 1, 10, 1, 12, 1, 43, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 53, 8, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 3, 5, 66, 8, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 4, 8, 79, 8, 8, 11, 8, 12, 8, 80, 1, 8, 3, 8, 84, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 100, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 107, 8, 11, 1, 12, 3, 12, 110, 8, 12, 1, 12, 4, 12, 113, 8, 12, 11, 12, 12, 12, 114, 1, 12, 3, 12, 118, 8, 12, 1, 12, 1, 12, 3, 12, 122, 8, 12, 1, 12, 1, 12, 3, 12, 126, 8, 12, 1, 12, 3, 12, 129, 8, 12, 1, 13, 1, 13, 1, 13, 3, 13, 134, 8, 13, 1, 13, 0, 1, 2, 14, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 0, 4, 1, 0, 2, 3, 1, 0, 6, 9, 2, 0, 14, 14, 16, 16, 1, 0, 2, 4, 145, 0, 29, 1, 0, 0, 0, 2, 33, 1, 0, 0, 0, 4, 52, 1, 0, 0, 0, 6, 54, 1, 0, 0, 0, 8, 57, 1, 0, 0, 0, 10, 65, 1, 0, 0, 0, 12, 69, 1, 0, 0, 0, 14, 73, 1, 0, 0, 0, 16, 83, 1, 0, 0, 0, 18, 85, 1, 0, 0, 0, 20, 99, 1, 0, 0, 0, 22, 106, 1, 0, 0, 0, 24, 128, 1, 0, 0, 0, 26, 133, 1, 0, 0, 0, 28, 30, 3, 2, 1, 0, 29, 28, 1, 0, 0, 0, 29, 30, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 32, 5, 0, 0, 1, 32, 1, 1, 0, 0, 0, 33, 34, 6, 1, -1, 0, 34, 35, 3, 4, 2, 0, 35, 41, 1, 0, 0, 0, 36, 37, 10, 2, 0, 0, 37, 38, 7, 0, 0, 0, 38, 40, 3, 2, 1, 2, 39, 36, 1, 0, 0, 0, 40, 43, 1, 0, 0, 0, 41, 39, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 3, 1, 0, 0, 0, 43, 41, 1, 0, 0, 0, 44, 53, 3, 6, 3, 0, 45, 53, 3, 8, 4, 0, 46, 53, 3, 12, 6, 0, 47, 53, 3, 10, 5, 0, 48, 53, 3, 18, 9, 0, 49, 53, 3, 14, 7, 0, 50, 53, 3, 20, 10, 0, 51, 53, 3, 22, 11, 0, 52, 44, 1, 0, 0, 0, 52, 45, 1, 0, 0, 0, 52, 46, 1, 0, 0, 0, 52, 47, 1, 0, 0, 0, 52, 48, 1, 0, 0, 0, 52, 49, 1, 0, 0, 0, 52, 50, 1, 0, 0, 0, 52, 51, 1, 0, 0, 0, 53, 5, 1, 0, 0, 0, 54, 55, 5, 4, 0, 0, 55, 56, 3, 4, 2, 0, 56, 7, 1, 0, 0, 0, 57, 58, 3, 26, 13, 0, 58, 59, 5, 5, 0, 0, 59, 60, 5, 12, 0, 0, 60, 61, 3, 2, 1, 0, 61, 62, 5, 13, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 16, 0, 0, 64, 66, 5, 5, 0, 0, 65, 63, 1, 0, 0, 0, 65, 66, 1, 0, 0, 0, 66, 67, 1, 0, 0, 0, 67, 68, 5, 16, 0, 0, 68, 11, 1, 0, 0, 0, 69, 70, 5, 10, 0, 0, 70, 71, 3, 2, 1, 0, 71, 72, 5, 11, 0, 0, 72, 13, 1, 0, 0, 0, 73, 74, 3, 26, 13, 0, 74, 75, 7, 1, 0, 0, 75, 76, 3, 16, 8, 0, 76, 15, 1, 0, 0, 0, 77, 79, 7, 2, 0, 0, 78, 77, 1, 0, 0, 0, 79, 80, 1, 0, 0, 0, 80, 78, 1, 0, 0, 0, 80, 81, 1, 0, 0, 0, 81, 84, 1, 0, 0, 0, 82, 84, 5, 15, 0, 0, 83, 78, 1, 0, 0, 0, 83, 82, 1, 0, 0, 0, 84, 17, 1, 0, 0, 0, 85, 86, 3, 26, 13, 0, 86, 87, 5, 5, 0, 0, 87, 88, 5, 16, 0, 0, 88, 19, 1, 0, 0, 0, 89, 90, 3, 26, 13, 0, 90, 91, 5, 5, 0, 0, 91, 92, 3, 24, 12, 0, 92, 100, 1, 0, 0, 0, 93, 94, 3, 26, 13, 0, 94, 95, 5, 5, 0, 0, 95, 96, 5, 10, 0, 0, 96, 97, 3, 24, 12, 0, 97, 98, 5, 11, 0, 0, 98, 100, 1, 0, 0, 0, 99, 89, 1, 0, 0, 0, 99, 93, 1, 0, 0, 0, 100, 21, 1, 0, 0, 0, 101, 107, 3, 24, 12, 0, 102, 103, 5, 10, 0, 0, 103, 104, 3, 24, 12, 0, 104, 105, 5, 11, 0, 0, 105, 107, 1, 0, 0, 0, 106, 101, 1, 0, 0, 0, 106, 102, 1, 0, 0, 0, 107, 23, 1, 0, 0, 0, 108, 110, 7, 3, 0, 0, 109, 108, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 112, 1, 0, 0, 0, 111, 113, 7, 2, 0, 0, 112, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 112, 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115, 117, 1, 0, 0, 0, 116, 118, 7, 3, 0, 0, 117, 116, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 129, 1, 0, 0, 0, 119, 121, 7, 0, 0, 0, 120, 122, 7, 3, 0, 0, 121, 120, 1, 0, 0, 0, 121, 122, 1, 0, 0, 0, 122, 129, 1, 0, 0, 0, 123, 125, 5, 4, 0, 0, 124, 126, 7, 0, 0, 0, 125, 124, 1, 0, 0, 0, 125, 126, 1, 0, 0, 0, 126, 129, 1, 0, 0, 0, 127, 129, 5, 15, 0, 0, 128, 109, 1, 0, 0, 0, 128, 119, 1, 0, 0, 0, 128, 123, 1, 0, 0, 0, 128, 127, 1, 0, 0, 0, 129, 25, 1, 0, 0, 0, 130, 134, 5, 14, 0, 0, 131, 134, 5, 15, 0, 0, 132, 134, 5, 16, 0, 0, 133, 130, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 133, 132, 1, 0, 0, 0, 134, 27, 1, 0, 0, 0, 15, 29, 41, 52, 65, 80, 83, 99, 106, 109, 114, 117, 121, 125, 128, 133] \ No newline at end of file diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java index b4b0a69a82387..118ac32aadd61 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java @@ -1194,10 +1194,7 @@ public final FieldQueryValueContext fieldQueryValue() throws RecognitionExceptio @SuppressWarnings("CheckReturnValue") public static class FieldNameContext extends ParserRuleContext { public Token value; - public List UNQUOTED_LITERAL() { return getTokens(KqlBaseParser.UNQUOTED_LITERAL); } - public TerminalNode UNQUOTED_LITERAL(int i) { - return getToken(KqlBaseParser.UNQUOTED_LITERAL, i); - } + public TerminalNode UNQUOTED_LITERAL() { return getToken(KqlBaseParser.UNQUOTED_LITERAL, 0); } public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); } public TerminalNode WILDCARD() { return getToken(KqlBaseParser.WILDCARD, 0); } public FieldNameContext(ParserRuleContext parent, int invokingState) { @@ -1222,41 +1219,28 @@ public T accept(ParseTreeVisitor visitor) { public final FieldNameContext fieldName() throws RecognitionException { FieldNameContext _localctx = new FieldNameContext(_ctx, getState()); enterRule(_localctx, 26, RULE_fieldName); - int _la; try { - setState(137); + setState(133); _errHandler.sync(this); switch (_input.LA(1)) { case UNQUOTED_LITERAL: enterOuterAlt(_localctx, 1); { - setState(131); - _errHandler.sync(this); - _la = _input.LA(1); - do { - { - { - setState(130); - ((FieldNameContext)_localctx).value = match(UNQUOTED_LITERAL); - } - } - setState(133); - _errHandler.sync(this); - _la = _input.LA(1); - } while ( _la==UNQUOTED_LITERAL ); + setState(130); + ((FieldNameContext)_localctx).value = match(UNQUOTED_LITERAL); } break; case QUOTED_STRING: enterOuterAlt(_localctx, 2); { - setState(135); + setState(131); ((FieldNameContext)_localctx).value = match(QUOTED_STRING); } break; case WILDCARD: enterOuterAlt(_localctx, 3); { - setState(136); + setState(132); ((FieldNameContext)_localctx).value = match(WILDCARD); } break; @@ -1291,7 +1275,7 @@ private boolean query_sempred(QueryContext _localctx, int predIndex) { } public static final String _serializedATN = - "\u0004\u0001\u0010\u008c\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0010\u0088\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ @@ -1309,70 +1293,67 @@ private boolean query_sempred(QueryContext _localctx, int predIndex) { "\nd\b\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0003"+ "\u000bk\b\u000b\u0001\f\u0003\fn\b\f\u0001\f\u0004\fq\b\f\u000b\f\f\f"+ "r\u0001\f\u0003\fv\b\f\u0001\f\u0001\f\u0003\fz\b\f\u0001\f\u0001\f\u0003"+ - "\f~\b\f\u0001\f\u0003\f\u0081\b\f\u0001\r\u0004\r\u0084\b\r\u000b\r\f"+ - "\r\u0085\u0001\r\u0001\r\u0003\r\u008a\b\r\u0001\r\u0000\u0001\u0002\u000e"+ - "\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ - "\u0000\u0004\u0001\u0000\u0002\u0003\u0001\u0000\u0006\t\u0002\u0000\u000e"+ - "\u000e\u0010\u0010\u0001\u0000\u0002\u0004\u0096\u0000\u001d\u0001\u0000"+ - "\u0000\u0000\u0002!\u0001\u0000\u0000\u0000\u00044\u0001\u0000\u0000\u0000"+ - "\u00066\u0001\u0000\u0000\u0000\b9\u0001\u0000\u0000\u0000\nA\u0001\u0000"+ - "\u0000\u0000\fE\u0001\u0000\u0000\u0000\u000eI\u0001\u0000\u0000\u0000"+ - "\u0010S\u0001\u0000\u0000\u0000\u0012U\u0001\u0000\u0000\u0000\u0014c"+ - "\u0001\u0000\u0000\u0000\u0016j\u0001\u0000\u0000\u0000\u0018\u0080\u0001"+ - "\u0000\u0000\u0000\u001a\u0089\u0001\u0000\u0000\u0000\u001c\u001e\u0003"+ - "\u0002\u0001\u0000\u001d\u001c\u0001\u0000\u0000\u0000\u001d\u001e\u0001"+ - "\u0000\u0000\u0000\u001e\u001f\u0001\u0000\u0000\u0000\u001f \u0005\u0000"+ - "\u0000\u0001 \u0001\u0001\u0000\u0000\u0000!\"\u0006\u0001\uffff\uffff"+ - "\u0000\"#\u0003\u0004\u0002\u0000#)\u0001\u0000\u0000\u0000$%\n\u0002"+ - "\u0000\u0000%&\u0007\u0000\u0000\u0000&(\u0003\u0002\u0001\u0002\'$\u0001"+ - "\u0000\u0000\u0000(+\u0001\u0000\u0000\u0000)\'\u0001\u0000\u0000\u0000"+ - ")*\u0001\u0000\u0000\u0000*\u0003\u0001\u0000\u0000\u0000+)\u0001\u0000"+ - "\u0000\u0000,5\u0003\u0006\u0003\u0000-5\u0003\b\u0004\u0000.5\u0003\f"+ - "\u0006\u0000/5\u0003\n\u0005\u000005\u0003\u0012\t\u000015\u0003\u000e"+ - "\u0007\u000025\u0003\u0014\n\u000035\u0003\u0016\u000b\u00004,\u0001\u0000"+ - "\u0000\u00004-\u0001\u0000\u0000\u00004.\u0001\u0000\u0000\u00004/\u0001"+ - "\u0000\u0000\u000040\u0001\u0000\u0000\u000041\u0001\u0000\u0000\u0000"+ - "42\u0001\u0000\u0000\u000043\u0001\u0000\u0000\u00005\u0005\u0001\u0000"+ - "\u0000\u000067\u0005\u0004\u0000\u000078\u0003\u0004\u0002\u00008\u0007"+ - "\u0001\u0000\u0000\u00009:\u0003\u001a\r\u0000:;\u0005\u0005\u0000\u0000"+ - ";<\u0005\f\u0000\u0000<=\u0003\u0002\u0001\u0000=>\u0005\r\u0000\u0000"+ - ">\t\u0001\u0000\u0000\u0000?@\u0005\u0010\u0000\u0000@B\u0005\u0005\u0000"+ - "\u0000A?\u0001\u0000\u0000\u0000AB\u0001\u0000\u0000\u0000BC\u0001\u0000"+ - "\u0000\u0000CD\u0005\u0010\u0000\u0000D\u000b\u0001\u0000\u0000\u0000"+ - "EF\u0005\n\u0000\u0000FG\u0003\u0002\u0001\u0000GH\u0005\u000b\u0000\u0000"+ - "H\r\u0001\u0000\u0000\u0000IJ\u0003\u001a\r\u0000JK\u0007\u0001\u0000"+ - "\u0000KL\u0003\u0010\b\u0000L\u000f\u0001\u0000\u0000\u0000MO\u0007\u0002"+ - "\u0000\u0000NM\u0001\u0000\u0000\u0000OP\u0001\u0000\u0000\u0000PN\u0001"+ - "\u0000\u0000\u0000PQ\u0001\u0000\u0000\u0000QT\u0001\u0000\u0000\u0000"+ - "RT\u0005\u000f\u0000\u0000SN\u0001\u0000\u0000\u0000SR\u0001\u0000\u0000"+ - "\u0000T\u0011\u0001\u0000\u0000\u0000UV\u0003\u001a\r\u0000VW\u0005\u0005"+ - "\u0000\u0000WX\u0005\u0010\u0000\u0000X\u0013\u0001\u0000\u0000\u0000"+ - "YZ\u0003\u001a\r\u0000Z[\u0005\u0005\u0000\u0000[\\\u0003\u0018\f\u0000"+ - "\\d\u0001\u0000\u0000\u0000]^\u0003\u001a\r\u0000^_\u0005\u0005\u0000"+ - "\u0000_`\u0005\n\u0000\u0000`a\u0003\u0018\f\u0000ab\u0005\u000b\u0000"+ - "\u0000bd\u0001\u0000\u0000\u0000cY\u0001\u0000\u0000\u0000c]\u0001\u0000"+ - "\u0000\u0000d\u0015\u0001\u0000\u0000\u0000ek\u0003\u0018\f\u0000fg\u0005"+ - "\n\u0000\u0000gh\u0003\u0018\f\u0000hi\u0005\u000b\u0000\u0000ik\u0001"+ - "\u0000\u0000\u0000je\u0001\u0000\u0000\u0000jf\u0001\u0000\u0000\u0000"+ - "k\u0017\u0001\u0000\u0000\u0000ln\u0007\u0003\u0000\u0000ml\u0001\u0000"+ - "\u0000\u0000mn\u0001\u0000\u0000\u0000np\u0001\u0000\u0000\u0000oq\u0007"+ - "\u0002\u0000\u0000po\u0001\u0000\u0000\u0000qr\u0001\u0000\u0000\u0000"+ - "rp\u0001\u0000\u0000\u0000rs\u0001\u0000\u0000\u0000su\u0001\u0000\u0000"+ - "\u0000tv\u0007\u0003\u0000\u0000ut\u0001\u0000\u0000\u0000uv\u0001\u0000"+ - "\u0000\u0000v\u0081\u0001\u0000\u0000\u0000wy\u0007\u0000\u0000\u0000"+ - "xz\u0007\u0003\u0000\u0000yx\u0001\u0000\u0000\u0000yz\u0001\u0000\u0000"+ - "\u0000z\u0081\u0001\u0000\u0000\u0000{}\u0005\u0004\u0000\u0000|~\u0007"+ - "\u0000\u0000\u0000}|\u0001\u0000\u0000\u0000}~\u0001\u0000\u0000\u0000"+ - "~\u0081\u0001\u0000\u0000\u0000\u007f\u0081\u0005\u000f\u0000\u0000\u0080"+ - "m\u0001\u0000\u0000\u0000\u0080w\u0001\u0000\u0000\u0000\u0080{\u0001"+ - "\u0000\u0000\u0000\u0080\u007f\u0001\u0000\u0000\u0000\u0081\u0019\u0001"+ - "\u0000\u0000\u0000\u0082\u0084\u0005\u000e\u0000\u0000\u0083\u0082\u0001"+ - "\u0000\u0000\u0000\u0084\u0085\u0001\u0000\u0000\u0000\u0085\u0083\u0001"+ - "\u0000\u0000\u0000\u0085\u0086\u0001\u0000\u0000\u0000\u0086\u008a\u0001"+ - "\u0000\u0000\u0000\u0087\u008a\u0005\u000f\u0000\u0000\u0088\u008a\u0005"+ - "\u0010\u0000\u0000\u0089\u0083\u0001\u0000\u0000\u0000\u0089\u0087\u0001"+ - "\u0000\u0000\u0000\u0089\u0088\u0001\u0000\u0000\u0000\u008a\u001b\u0001"+ - "\u0000\u0000\u0000\u0010\u001d)4APScjmruy}\u0080\u0085\u0089"; + "\f~\b\f\u0001\f\u0003\f\u0081\b\f\u0001\r\u0001\r\u0001\r\u0003\r\u0086"+ + "\b\r\u0001\r\u0000\u0001\u0002\u000e\u0000\u0002\u0004\u0006\b\n\f\u000e"+ + "\u0010\u0012\u0014\u0016\u0018\u001a\u0000\u0004\u0001\u0000\u0002\u0003"+ + "\u0001\u0000\u0006\t\u0002\u0000\u000e\u000e\u0010\u0010\u0001\u0000\u0002"+ + "\u0004\u0091\u0000\u001d\u0001\u0000\u0000\u0000\u0002!\u0001\u0000\u0000"+ + "\u0000\u00044\u0001\u0000\u0000\u0000\u00066\u0001\u0000\u0000\u0000\b"+ + "9\u0001\u0000\u0000\u0000\nA\u0001\u0000\u0000\u0000\fE\u0001\u0000\u0000"+ + "\u0000\u000eI\u0001\u0000\u0000\u0000\u0010S\u0001\u0000\u0000\u0000\u0012"+ + "U\u0001\u0000\u0000\u0000\u0014c\u0001\u0000\u0000\u0000\u0016j\u0001"+ + "\u0000\u0000\u0000\u0018\u0080\u0001\u0000\u0000\u0000\u001a\u0085\u0001"+ + "\u0000\u0000\u0000\u001c\u001e\u0003\u0002\u0001\u0000\u001d\u001c\u0001"+ + "\u0000\u0000\u0000\u001d\u001e\u0001\u0000\u0000\u0000\u001e\u001f\u0001"+ + "\u0000\u0000\u0000\u001f \u0005\u0000\u0000\u0001 \u0001\u0001\u0000\u0000"+ + "\u0000!\"\u0006\u0001\uffff\uffff\u0000\"#\u0003\u0004\u0002\u0000#)\u0001"+ + "\u0000\u0000\u0000$%\n\u0002\u0000\u0000%&\u0007\u0000\u0000\u0000&(\u0003"+ + "\u0002\u0001\u0002\'$\u0001\u0000\u0000\u0000(+\u0001\u0000\u0000\u0000"+ + ")\'\u0001\u0000\u0000\u0000)*\u0001\u0000\u0000\u0000*\u0003\u0001\u0000"+ + "\u0000\u0000+)\u0001\u0000\u0000\u0000,5\u0003\u0006\u0003\u0000-5\u0003"+ + "\b\u0004\u0000.5\u0003\f\u0006\u0000/5\u0003\n\u0005\u000005\u0003\u0012"+ + "\t\u000015\u0003\u000e\u0007\u000025\u0003\u0014\n\u000035\u0003\u0016"+ + "\u000b\u00004,\u0001\u0000\u0000\u00004-\u0001\u0000\u0000\u00004.\u0001"+ + "\u0000\u0000\u00004/\u0001\u0000\u0000\u000040\u0001\u0000\u0000\u0000"+ + "41\u0001\u0000\u0000\u000042\u0001\u0000\u0000\u000043\u0001\u0000\u0000"+ + "\u00005\u0005\u0001\u0000\u0000\u000067\u0005\u0004\u0000\u000078\u0003"+ + "\u0004\u0002\u00008\u0007\u0001\u0000\u0000\u00009:\u0003\u001a\r\u0000"+ + ":;\u0005\u0005\u0000\u0000;<\u0005\f\u0000\u0000<=\u0003\u0002\u0001\u0000"+ + "=>\u0005\r\u0000\u0000>\t\u0001\u0000\u0000\u0000?@\u0005\u0010\u0000"+ + "\u0000@B\u0005\u0005\u0000\u0000A?\u0001\u0000\u0000\u0000AB\u0001\u0000"+ + "\u0000\u0000BC\u0001\u0000\u0000\u0000CD\u0005\u0010\u0000\u0000D\u000b"+ + "\u0001\u0000\u0000\u0000EF\u0005\n\u0000\u0000FG\u0003\u0002\u0001\u0000"+ + "GH\u0005\u000b\u0000\u0000H\r\u0001\u0000\u0000\u0000IJ\u0003\u001a\r"+ + "\u0000JK\u0007\u0001\u0000\u0000KL\u0003\u0010\b\u0000L\u000f\u0001\u0000"+ + "\u0000\u0000MO\u0007\u0002\u0000\u0000NM\u0001\u0000\u0000\u0000OP\u0001"+ + "\u0000\u0000\u0000PN\u0001\u0000\u0000\u0000PQ\u0001\u0000\u0000\u0000"+ + "QT\u0001\u0000\u0000\u0000RT\u0005\u000f\u0000\u0000SN\u0001\u0000\u0000"+ + "\u0000SR\u0001\u0000\u0000\u0000T\u0011\u0001\u0000\u0000\u0000UV\u0003"+ + "\u001a\r\u0000VW\u0005\u0005\u0000\u0000WX\u0005\u0010\u0000\u0000X\u0013"+ + "\u0001\u0000\u0000\u0000YZ\u0003\u001a\r\u0000Z[\u0005\u0005\u0000\u0000"+ + "[\\\u0003\u0018\f\u0000\\d\u0001\u0000\u0000\u0000]^\u0003\u001a\r\u0000"+ + "^_\u0005\u0005\u0000\u0000_`\u0005\n\u0000\u0000`a\u0003\u0018\f\u0000"+ + "ab\u0005\u000b\u0000\u0000bd\u0001\u0000\u0000\u0000cY\u0001\u0000\u0000"+ + "\u0000c]\u0001\u0000\u0000\u0000d\u0015\u0001\u0000\u0000\u0000ek\u0003"+ + "\u0018\f\u0000fg\u0005\n\u0000\u0000gh\u0003\u0018\f\u0000hi\u0005\u000b"+ + "\u0000\u0000ik\u0001\u0000\u0000\u0000je\u0001\u0000\u0000\u0000jf\u0001"+ + "\u0000\u0000\u0000k\u0017\u0001\u0000\u0000\u0000ln\u0007\u0003\u0000"+ + "\u0000ml\u0001\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000np\u0001\u0000"+ + "\u0000\u0000oq\u0007\u0002\u0000\u0000po\u0001\u0000\u0000\u0000qr\u0001"+ + "\u0000\u0000\u0000rp\u0001\u0000\u0000\u0000rs\u0001\u0000\u0000\u0000"+ + "su\u0001\u0000\u0000\u0000tv\u0007\u0003\u0000\u0000ut\u0001\u0000\u0000"+ + "\u0000uv\u0001\u0000\u0000\u0000v\u0081\u0001\u0000\u0000\u0000wy\u0007"+ + "\u0000\u0000\u0000xz\u0007\u0003\u0000\u0000yx\u0001\u0000\u0000\u0000"+ + "yz\u0001\u0000\u0000\u0000z\u0081\u0001\u0000\u0000\u0000{}\u0005\u0004"+ + "\u0000\u0000|~\u0007\u0000\u0000\u0000}|\u0001\u0000\u0000\u0000}~\u0001"+ + "\u0000\u0000\u0000~\u0081\u0001\u0000\u0000\u0000\u007f\u0081\u0005\u000f"+ + "\u0000\u0000\u0080m\u0001\u0000\u0000\u0000\u0080w\u0001\u0000\u0000\u0000"+ + "\u0080{\u0001\u0000\u0000\u0000\u0080\u007f\u0001\u0000\u0000\u0000\u0081"+ + "\u0019\u0001\u0000\u0000\u0000\u0082\u0086\u0005\u000e\u0000\u0000\u0083"+ + "\u0086\u0005\u000f\u0000\u0000\u0084\u0086\u0005\u0010\u0000\u0000\u0085"+ + "\u0082\u0001\u0000\u0000\u0000\u0085\u0083\u0001\u0000\u0000\u0000\u0085"+ + "\u0084\u0001\u0000\u0000\u0000\u0086\u001b\u0001\u0000\u0000\u0000\u000f"+ + "\u001d)4APScjmruy}\u0080\u0085"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParser.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParser.java index 1064f901cacb8..6c2d30860221a 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParser.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParser.java @@ -14,8 +14,8 @@ import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.Recognizer; import org.antlr.v4.runtime.atn.PredictionMode; +import org.elasticsearch.core.Strings; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -25,22 +25,14 @@ public class KqlParser { private static final Logger log = LogManager.getLogger(KqlParser.class); - public QueryBuilder parseKqlQuery(String kqlQuery, SearchExecutionContext searchExecutionContext) { - if (log.isDebugEnabled()) { - log.debug("Parsing KQL query: {}", kqlQuery); - } - - return invokeParser( - kqlQuery, - new KqlParserExecutionContext(searchExecutionContext), - KqlBaseParser::topLevelQuery, - KqlAstBuilder::toQueryBuilder - ); + public QueryBuilder parseKqlQuery(String kqlQuery, KqlParsingContext kqlParserContext) { + log.trace("Parsing KQL query: {}", kqlQuery); + return invokeParser(kqlQuery, kqlParserContext, KqlBaseParser::topLevelQuery, KqlAstBuilder::toQueryBuilder); } private T invokeParser( String kqlQuery, - KqlParserExecutionContext kqlParserExecutionContext, + KqlParsingContext kqlParsingContext, Function parseFunction, BiFunction visitor ) { @@ -59,11 +51,9 @@ private T invokeParser( ParserRuleContext tree = parseFunction.apply(parser); - if (log.isTraceEnabled()) { - log.trace("Parse tree: {}", tree.toStringTree()); - } + log.trace(() -> Strings.format("Parse tree: %s", tree.toStringTree())); - return visitor.apply(new KqlAstBuilder(kqlParserExecutionContext), tree); + return visitor.apply(new KqlAstBuilder(kqlParsingContext), tree); } private static final BaseErrorListener ERROR_LISTENER = new BaseErrorListener() { diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParserExecutionContext.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParserExecutionContext.java deleted file mode 100644 index d05c70c6b933f..0000000000000 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParserExecutionContext.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.kql.parser; - -import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.mapper.AbstractScriptFieldType; -import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.query.SearchExecutionContext; - -import java.time.ZoneId; -import java.util.List; -import java.util.function.Predicate; -import java.util.stream.Collectors; - -import static org.elasticsearch.core.Tuple.tuple; - -class KqlParserExecutionContext extends SearchExecutionContext { - - private static final List IGNORED_METADATA_FIELDS = List.of( - "_seq_no", - "_index_mode", - "_routing", - "_ignored", - "_nested_path", - "_field_names" - ); - - private static Predicate> searchableFieldFilter = (fieldDef) -> fieldDef.v2().isSearchable(); - - private static Predicate> ignoredFieldFilter = (fieldDef) -> IGNORED_METADATA_FIELDS.contains( - fieldDef.v1() - ); - - KqlParserExecutionContext(SearchExecutionContext source) { - super(source); - } - - public Iterable> resolveFields(KqlBaseParser.FieldNameContext fieldNameContext) { - // TODO: use index settings default field. - String fieldNamePattern = fieldNameContext != null ? ParserUtils.extractText(fieldNameContext) : "*"; - - if (fieldNameContext != null && fieldNameContext.value != null && fieldNameContext.value.getType() == KqlBaseParser.QUOTED_STRING) { - return isFieldMapped(fieldNamePattern) ? List.of(tuple(fieldNamePattern, getFieldType(fieldNamePattern))) : List.of(); - } - - return getMatchingFieldNames(fieldNamePattern).stream() - .map(fieldName -> tuple(fieldName, getFieldType(fieldName))) - .filter(searchableFieldFilter.and(Predicate.not(ignoredFieldFilter))) - .collect(Collectors.toList()); - } - - public boolean isCaseSensitive() { - // TODO: implementation - return false; - } - - public ZoneId timeZone() { - return null; - } - - public static boolean isRuntimeField(MappedFieldType fieldType) { - return fieldType instanceof AbstractScriptFieldType; - } - - public static boolean isDateField(MappedFieldType fieldType) { - return fieldType.typeName().equals(DateFieldMapper.CONTENT_TYPE); - } - - public static boolean isKeywordField(MappedFieldType fieldType) { - return fieldType.typeName().equals(KeywordFieldMapper.CONTENT_TYPE); - } -} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java new file mode 100644 index 0000000000000..5f88080fb3ed4 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingContext.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.parser; + +import org.elasticsearch.index.mapper.AbstractScriptFieldType; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryRewriteContext; + +import java.time.ZoneId; +import java.util.List; +import java.util.Set; + +public class KqlParsingContext { + + private static final List IGNORED_METADATA_FIELDS = List.of( + "_seq_no", + "_index_mode", + "_routing", + "_ignored", + "_nested_path", + "_field_names" + ); + + public static Builder builder(QueryRewriteContext queryRewriteContext) { + return new Builder(queryRewriteContext); + } + + private QueryRewriteContext queryRewriteContext; + private final boolean caseInsensitive; + private final ZoneId timeZone; + private final String defaultField; + + public KqlParsingContext(QueryRewriteContext queryRewriteContext, boolean caseInsensitive, ZoneId timeZone, String defaultField) { + this.queryRewriteContext = queryRewriteContext; + this.caseInsensitive = caseInsensitive; + this.timeZone = timeZone; + this.defaultField = defaultField; + } + + public boolean caseInsensitive() { + return caseInsensitive; + } + + public ZoneId timeZone() { + return timeZone; + } + + public String defaultField() { + return defaultField; + } + + public Set resolveFieldNames(String fieldNamePattern) { + assert fieldNamePattern != null && fieldNamePattern.isEmpty() == false : "fieldNamePattern cannot be null or empty"; + return queryRewriteContext.getMatchingFieldNames(fieldNamePattern); + } + + public Set resolveDefaultFieldNames() { + return resolveFieldNames(defaultField); + } + + public MappedFieldType fieldType(String fieldName) { + return queryRewriteContext.getFieldType(fieldName); + } + + public static boolean isRuntimeField(MappedFieldType fieldType) { + return fieldType instanceof AbstractScriptFieldType; + } + + public static boolean isDateField(MappedFieldType fieldType) { + return fieldType.typeName().equals(DateFieldMapper.CONTENT_TYPE); + } + + public static boolean isKeywordField(MappedFieldType fieldType) { + return fieldType.typeName().equals(KeywordFieldMapper.CONTENT_TYPE); + } + + public static boolean isSearchableField(String fieldName, MappedFieldType fieldType) { + return IGNORED_METADATA_FIELDS.contains(fieldName) == false && fieldType.isSearchable(); + } + + public boolean isSearchableField(String fieldName) { + return isSearchableField(fieldName, fieldType(fieldName)); + } + + public static class Builder { + private final QueryRewriteContext queryRewriteContext; + private boolean caseInsensitive = true; + private ZoneId timeZone = null; + private String defaultField = null; + + private Builder(QueryRewriteContext queryRewriteContext) { + this.queryRewriteContext = queryRewriteContext; + } + + public KqlParsingContext build() { + return new KqlParsingContext(queryRewriteContext, caseInsensitive, timeZone, defaultField); + } + + public Builder caseInsensitive(boolean caseInsensitive) { + this.caseInsensitive = caseInsensitive; + return this; + } + + public Builder timeZone(ZoneId timeZone) { + this.timeZone = timeZone; + return this; + } + + public Builder defaultField(String defaultField) { + this.defaultField = defaultField; + return this; + } + } +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java index f996a953ea7f7..3319d920a88ee 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java @@ -12,11 +12,11 @@ import org.antlr.v4.runtime.tree.ParseTree; import org.antlr.v4.runtime.tree.ParseTreeVisitor; import org.antlr.v4.runtime.tree.TerminalNode; -import org.apache.logging.log4j.util.Strings; import org.apache.lucene.queryparser.classic.QueryParser; import java.util.ArrayList; import java.util.List; +import java.util.Locale; /** * Utility class for parsing and processing KQL expressions. @@ -211,15 +211,15 @@ private static boolean isEscapedKeywordSequence(String input, int startIndex) { if (startIndex + 1 >= input.length()) { return false; } - String remaining = Strings.toRootLowerCase(input.substring(startIndex)); + String remaining = input.substring(startIndex).toLowerCase(Locale.ROOT); return remaining.startsWith("and") || remaining.startsWith("or") || remaining.startsWith("not"); } private static String handleKeywordSequence(String input, int startIndex) { String remaining = input.substring(startIndex); - if (Strings.toRootLowerCase(remaining).startsWith("and")) return remaining.substring(0, 3); - if (Strings.toRootLowerCase(remaining).startsWith("or")) return remaining.substring(0, 2); - if (Strings.toRootLowerCase(remaining).startsWith("not")) return remaining.substring(0, 3); + if (remaining.toLowerCase(Locale.ROOT).startsWith("and")) return remaining.substring(0, 3); + if (remaining.toLowerCase(Locale.ROOT).startsWith("or")) return remaining.substring(0, 2); + if (remaining.toLowerCase(Locale.ROOT).startsWith("not")) return remaining.substring(0, 3); return ""; } diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilder.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilder.java new file mode 100644 index 0000000000000..5dff9126b6be4 --- /dev/null +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilder.java @@ -0,0 +1,199 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.query; + +import org.apache.lucene.search.Query; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.kql.parser.KqlParser; +import org.elasticsearch.xpack.kql.parser.KqlParsingContext; + +import java.io.IOException; +import java.time.ZoneId; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class KqlQueryBuilder extends AbstractQueryBuilder { + public static final String NAME = "kql"; + public static final ParseField QUERY_FIELD = new ParseField("query"); + private static final ParseField CASE_INSENSITIVE_FIELD = new ParseField("case_insensitive"); + private static final ParseField TIME_ZONE_FIELD = new ParseField("time_zone"); + private static final ParseField DEFAULT_FIELD_FIELD = new ParseField("default_field"); + + private static final Logger log = LogManager.getLogger(KqlQueryBuilder.class); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, a -> { + KqlQueryBuilder kqlQuery = new KqlQueryBuilder((String) a[0]); + + if (a[1] != null) { + kqlQuery.caseInsensitive((Boolean) a[1]); + } + + if (a[2] != null) { + kqlQuery.timeZone((String) a[2]); + } + + if (a[3] != null) { + kqlQuery.defaultField((String) a[3]); + } + + return kqlQuery; + }); + + static { + PARSER.declareString(constructorArg(), QUERY_FIELD); + PARSER.declareBoolean(optionalConstructorArg(), CASE_INSENSITIVE_FIELD); + PARSER.declareString(optionalConstructorArg(), TIME_ZONE_FIELD); + PARSER.declareString(optionalConstructorArg(), DEFAULT_FIELD_FIELD); + declareStandardFields(PARSER); + } + + private final String query; + private boolean caseInsensitive = true; + private ZoneId timeZone; + private String defaultField; + + public KqlQueryBuilder(String query) { + this.query = Objects.requireNonNull(query, "query can not be null"); + } + + public KqlQueryBuilder(StreamInput in) throws IOException { + super(in); + query = in.readString(); + caseInsensitive = in.readBoolean(); + timeZone = in.readOptionalZoneId(); + defaultField = in.readOptionalString(); + } + + public static KqlQueryBuilder fromXContent(XContentParser parser) { + try { + return PARSER.apply(parser, null); + } catch (IllegalArgumentException e) { + throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e); + } + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.KQL_QUERY_ADDED; + } + + public String queryString() { + return query; + } + + public boolean caseInsensitive() { + return caseInsensitive; + } + + public KqlQueryBuilder caseInsensitive(boolean caseInsensitive) { + this.caseInsensitive = caseInsensitive; + return this; + } + + public ZoneId timeZone() { + return timeZone; + } + + public KqlQueryBuilder timeZone(String timeZone) { + this.timeZone = timeZone != null ? ZoneId.of(timeZone) : null; + return this; + } + + public String defaultField() { + return defaultField; + } + + public KqlQueryBuilder defaultField(String defaultField) { + this.defaultField = defaultField; + return this; + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + { + builder.field(QUERY_FIELD.getPreferredName(), query); + builder.field(CASE_INSENSITIVE_FIELD.getPreferredName(), caseInsensitive); + + if (defaultField != null) { + builder.field(DEFAULT_FIELD_FIELD.getPreferredName(), defaultField); + } + + if (timeZone != null) { + builder.field(TIME_ZONE_FIELD.getPreferredName(), timeZone.getId()); + } + + boostAndQueryNameToXContent(builder); + } + builder.endObject(); + } + + @Override + protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) throws IOException { + KqlParser parser = new KqlParser(); + QueryBuilder rewrittenQuery = parser.parseKqlQuery(query, createKqlParserContext(context)); + + log.trace(() -> Strings.format("KQL query %s translated to Query DSL: %s", query, Strings.toString(rewrittenQuery))); + + return rewrittenQuery; + } + + @Override + protected Query doToQuery(SearchExecutionContext context) throws IOException { + throw new IllegalStateException("The query should have been rewritten"); + } + + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(query); + out.writeBoolean(caseInsensitive); + out.writeOptionalZoneId(timeZone); + out.writeOptionalString(defaultField); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected int doHashCode() { + return Objects.hash(query, caseInsensitive, timeZone, defaultField); + } + + @Override + protected boolean doEquals(KqlQueryBuilder other) { + return Objects.equals(query, other.query) + && Objects.equals(timeZone, other.timeZone) + && Objects.equals(defaultField, other.defaultField) + && caseInsensitive == other.caseInsensitive; + } + + private KqlParsingContext createKqlParserContext(QueryRewriteContext queryRewriteContext) { + return KqlParsingContext.builder(queryRewriteContext) + .caseInsensitive(caseInsensitive) + .timeZone(timeZone) + .defaultField(defaultField) + .build(); + } +} diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java index 88c63e9a2585b..ac06a96d49eb4 100644 --- a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/AbstractKqlParserTestCase.java @@ -16,7 +16,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; -import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.WildcardQueryBuilder; import org.elasticsearch.test.AbstractBuilderTestCase; @@ -111,9 +110,8 @@ protected List searchableFields(String fieldNamePattern) { protected QueryBuilder parseKqlQuery(String kqlQuery) { KqlParser parser = new KqlParser(); - SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); - - return parser.parseKqlQuery(kqlQuery, searchExecutionContext); + KqlParsingContext kqlParserContext = KqlParsingContext.builder(createQueryRewriteContext()).build(); + return parser.parseKqlQuery(kqlQuery, kqlParserContext); } protected static void assertMultiMatchQuery(QueryBuilder query, String expectedValue, MultiMatchQueryBuilder.Type expectedType) { diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java new file mode 100644 index 0000000000000..2bc23c7d457dd --- /dev/null +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java @@ -0,0 +1,286 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql.query; + +import org.apache.lucene.search.Query; +import org.elasticsearch.core.Strings; +import org.elasticsearch.index.query.MultiMatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.QueryStringQueryBuilder; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.WildcardQueryBuilder; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.AbstractQueryTestCase; +import org.elasticsearch.xpack.kql.KqlPlugin; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class KqlQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected Collection> getPlugins() { + return List.of(KqlPlugin.class); + } + + @Override + protected KqlQueryBuilder doCreateTestQueryBuilder() { + KqlQueryBuilder kqlQueryBuilder = new KqlQueryBuilder(generateRandomKqlQuery()); + + if (randomBoolean()) { + kqlQueryBuilder.caseInsensitive(randomBoolean()); + } + + if (randomBoolean()) { + kqlQueryBuilder.timeZone(randomTimeZone().getID()); + } + + if (randomBoolean()) { + kqlQueryBuilder.defaultField(randomFrom("*", "mapped_*", KEYWORD_FIELD_NAME, TEXT_FIELD_NAME)); + } + + return kqlQueryBuilder; + } + + @Override + public KqlQueryBuilder mutateInstance(KqlQueryBuilder instance) throws IOException { + if (randomBoolean()) { + // Change name or boost. + return super.mutateInstance(instance); + } + + KqlQueryBuilder kqlQueryBuilder = new KqlQueryBuilder(randomValueOtherThan(instance.queryString(), this::generateRandomKqlQuery)) + .caseInsensitive(instance.caseInsensitive()) + .timeZone(instance.timeZone() != null ? instance.timeZone().getId() : null) + .defaultField(instance.defaultField()); + + if (kqlQueryBuilder.queryString().equals(instance.queryString()) == false) { + return kqlQueryBuilder; + } + + switch (randomInt() % 3) { + case 0 -> { + kqlQueryBuilder.caseInsensitive(instance.caseInsensitive() == false); + } + case 1 -> { + if (randomBoolean() && instance.defaultField() != null) { + kqlQueryBuilder.defaultField(null); + } else { + kqlQueryBuilder.defaultField( + randomValueOtherThan( + instance.defaultField(), + () -> randomFrom("*", "mapped_*", KEYWORD_FIELD_NAME, TEXT_FIELD_NAME) + ) + ); + } + } + default -> { + if (randomBoolean() && instance.timeZone() != null) { + kqlQueryBuilder.timeZone(null); + } else if (instance.timeZone() != null) { + kqlQueryBuilder.timeZone(randomValueOtherThan(instance.timeZone().getId(), () -> randomTimeZone().getID())); + } else { + kqlQueryBuilder.timeZone(randomTimeZone().getID()); + } + } + } + ; + + return kqlQueryBuilder; + } + + @Override + protected void doAssertLuceneQuery(KqlQueryBuilder queryBuilder, Query query, SearchExecutionContext context) throws IOException { + // We're not validating the query content here because it would be too complex. + // Instead, we use ad-hoc parser tests with a predictable output. + } + + private String generateRandomKqlQuery() { + return Stream.generate(() -> { + Stream terms = Stream.generate( + () -> randomValueOtherThanMany(s -> s.toLowerCase(Locale.ROOT).contains("now"), () -> randomAlphaOfLengthBetween(4, 10)) + ).limit(randomIntBetween(1, 5)); + + String subQuery = terms.collect(Collectors.joining(" ")); + + if (randomBoolean() && subQuery.isEmpty() == false) { + String operator = randomFrom(":", "<", "<=", ">", ">="); + String fieldName = randomFrom(KEYWORD_FIELD_NAME, TEXT_FIELD_NAME); + if (operator.equals(":")) { + subQuery = switch (randomFrom(0, 2)) { + case 0 -> subQuery; + case 1 -> '(' + subQuery + ')'; + default -> '"' + subQuery + '"'; + }; + } else { + fieldName = randomFrom(KEYWORD_FIELD_NAME, TEXT_FIELD_NAME, DOUBLE_FIELD_NAME, INT_FIELD_NAME); + if (List.of(DOUBLE_FIELD_NAME, INT_FIELD_NAME).contains(fieldName)) { + subQuery = String.valueOf(randomDouble()); + } + subQuery = randomBoolean() ? '"' + subQuery + '"' : subQuery; + } + + subQuery = fieldName + operator + subQuery; + } + + if (randomBoolean() && subQuery.isEmpty() == false) { + subQuery = '(' + subQuery + ')'; + } + + if (randomBoolean()) { + subQuery = "NOT " + subQuery; + } + + if (randomBoolean() && subQuery.isEmpty() == false) { + subQuery = '(' + subQuery + ')'; + } + + return subQuery; + }).limit(randomIntBetween(0, 5)).collect(Collectors.joining(randomFrom(" OR ", " AND "))); + } + + @Override + public void testMustRewrite() throws IOException { + SearchExecutionContext context = createSearchExecutionContext(); + context.setAllowUnmappedFields(true); + KqlQueryBuilder queryBuilder = createTestQueryBuilder(); + IllegalStateException e = assertThrows(IllegalStateException.class, () -> queryBuilder.toQuery(context)); + assertThat(e.getMessage(), Matchers.containsString("The query should have been rewritten")); + } + + public void testCaseInsensitiveWildcardQuery() throws IOException { + QueryRewriteContext queryRewriteContext = createQueryRewriteContext(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + + for (boolean caseInsensitive : List.of(true, false)) { + KqlQueryBuilder kqlQuery = new KqlQueryBuilder(KEYWORD_FIELD_NAME + ": foo*"); + // Check case case_insensitive is true by default + assertThat(kqlQuery.caseInsensitive(), equalTo(true)); + + kqlQuery.caseInsensitive(caseInsensitive); + + ; + assertThat( + asInstanceOf(WildcardQueryBuilder.class, rewriteQuery(kqlQuery, queryRewriteContext, searchExecutionContext)) + .caseInsensitive(), + equalTo(caseInsensitive) + ); + } + } + + public void testCaseInsensitiveTermQuery() throws IOException { + QueryRewriteContext queryRewriteContext = createQueryRewriteContext(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + + for (boolean caseInsensitive : List.of(true, false)) { + KqlQueryBuilder kqlQuery = new KqlQueryBuilder(KEYWORD_FIELD_NAME + ": foo"); + // Check case case_insensitive is true by default + assertThat(kqlQuery.caseInsensitive(), equalTo(true)); + + kqlQuery.caseInsensitive(caseInsensitive); + + assertThat( + asInstanceOf(TermQueryBuilder.class, rewriteQuery(kqlQuery, queryRewriteContext, searchExecutionContext)).caseInsensitive(), + equalTo(caseInsensitive) + ); + } + } + + public void testTimeZone() throws IOException { + QueryRewriteContext queryRewriteContext = createQueryRewriteContext(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + String timeZone = randomTimeZone().getID(); + + for (String operator : List.of(":", "<", "<=", ">", ">=")) { + KqlQueryBuilder kqlQuery = new KqlQueryBuilder(Strings.format("%s %s %s", DATE_FIELD_NAME, operator, "2018-03-28")); + assertThat(kqlQuery.timeZone(), nullValue()); // timeZone is not set by default. + kqlQuery.timeZone(timeZone); + + assertThat( + asInstanceOf(RangeQueryBuilder.class, rewriteQuery(kqlQuery, queryRewriteContext, searchExecutionContext)).timeZone(), + equalTo(timeZone) + ); + } + } + + public void testDefaultFieldWildcardQuery() throws IOException { + QueryRewriteContext queryRewriteContext = createQueryRewriteContext(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + KqlQueryBuilder kqlQuery = new KqlQueryBuilder(Strings.format("foo*")); + assertThat(kqlQuery.defaultField(), nullValue()); // default_field is not set by default. + + kqlQuery.defaultField(TEXT_FIELD_NAME); + + assertThat( + asInstanceOf(QueryStringQueryBuilder.class, rewriteQuery(kqlQuery, queryRewriteContext, searchExecutionContext)).defaultField(), + equalTo(TEXT_FIELD_NAME) + ); + } + + public void testDefaultFieldMatchQuery() throws IOException { + + QueryRewriteContext queryRewriteContext = createQueryRewriteContext(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + + { + // Using a specific field name + KqlQueryBuilder kqlQuery = new KqlQueryBuilder(Strings.format("foo")); + assertThat(kqlQuery.defaultField(), nullValue()); // default_field is not set by default. + + kqlQuery.defaultField(TEXT_FIELD_NAME); + MultiMatchQueryBuilder rewritenQuery = asInstanceOf( + MultiMatchQueryBuilder.class, + rewriteQuery(kqlQuery, queryRewriteContext, searchExecutionContext) + ); + assertThat(rewritenQuery.fields().keySet(), contains(TEXT_FIELD_NAME)); + } + + { + // Using a pattern for as the field name + KqlQueryBuilder kqlQuery = new KqlQueryBuilder(Strings.format("foo")); + assertThat(kqlQuery.defaultField(), nullValue()); // default_field is not set by default. + + kqlQuery.defaultField("mapped_object.*"); + MultiMatchQueryBuilder rewritenQuery = asInstanceOf( + MultiMatchQueryBuilder.class, + rewriteQuery(kqlQuery, queryRewriteContext, searchExecutionContext) + ); + assertThat(rewritenQuery.fields().keySet(), contains("mapped_object.mapped_date", "mapped_object.mapped_int")); + } + } + + public void testQueryNameIsPreserved() throws IOException { + QueryRewriteContext queryRewriteContext = createQueryRewriteContext(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + + KqlQueryBuilder kqlQuery = new KqlQueryBuilder(generateRandomKqlQuery()).queryName(randomIdentifier()); + QueryBuilder rewrittenQuery = rewriteQuery(kqlQuery, queryRewriteContext, searchExecutionContext); + assertThat(rewrittenQuery.queryName(), equalTo(kqlQuery.queryName())); + } + + public void testQueryBoostIsPreserved() throws IOException { + QueryRewriteContext queryRewriteContext = createQueryRewriteContext(); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + + KqlQueryBuilder kqlQuery = new KqlQueryBuilder(generateRandomKqlQuery()).boost(randomFloatBetween(0, Float.MAX_VALUE, true)); + QueryBuilder rewrittenQuery = rewriteQuery(kqlQuery, queryRewriteContext, searchExecutionContext); + assertThat(rewrittenQuery.boost(), equalTo(kqlQuery.boost())); + } +} diff --git a/x-pack/plugin/kql/src/yamlRestTest/java/org/elasticsearch/xpack/kql/KqlRestIT.java b/x-pack/plugin/kql/src/yamlRestTest/java/org/elasticsearch/xpack/kql/KqlRestIT.java new file mode 100644 index 0000000000000..35df46b0fdcbb --- /dev/null +++ b/x-pack/plugin/kql/src/yamlRestTest/java/org/elasticsearch/xpack/kql/KqlRestIT.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.kql; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; + +public class KqlRestIT extends ESClientYamlSuiteTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .setting("xpack.security.enabled", "false") + .setting("xpack.security.http.ssl.enabled", "false") + .distribution(DistributionType.DEFAULT) + .build(); + + public KqlRestIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(); + } +} diff --git a/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/10_kql_basic_query.yml b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/10_kql_basic_query.yml new file mode 100644 index 0000000000000..bb59c6a48b612 --- /dev/null +++ b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/10_kql_basic_query.yml @@ -0,0 +1,212 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ kql_query ] + test_runner_features: capabilities + reason: KQL query is not available + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + date_field: + type: date + text_field: + type: text + keyword_field: + type: keyword + integer_field: + type: integer + double_field: + type: double + + - do: + bulk: + index: test-index + refresh: true + body: | + { "index" : { "_id": "doc-1" } } + { "text_field": "foo bar", "integer_field": 1, "double_field": 3.5, "date_field": "2010-03-06T14:15:00", "keyword_field": "foo bar" } + { "index" : { "_id": "doc-42" } } + { "text_field": "foo baz", "integer_field": 2, "double_field": 18.9, "date_field": "2018-03-28T20:30:00", "keyword_field": "foo baz" } + +--- +"KQL match all queries": + # KQL empty query are supposed to match all. + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "" } } + } + - match: { hits.total: 2 } + + # Using the *:* syntax + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "*" } } + } + - match: { hits.total: 2 } + + # Using the *:* syntax + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "*:*" } } + } + - match: { hits.total: 2 } + +--- +"KQL match term queries (no field specified)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "bar" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "foo bar" } } + } + - match: { hits.total: 2 } + - match: { hits.hits.0._id: "doc-1" } + + # KQL does not match on the _id field when no field is specified. + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "doc-42" } } + } + - match: { hits.total: 0 } + +--- +"KQL match multiple terms queries (no field specified)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "foo bar" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "(foo bar)" } } + } + - match: { hits.total: 2 } + +--- +"KQL match phrase queries (no field specified)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "\"foo bar\"" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + +--- +"KQL match number queries (no field specified)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "2" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "3.5" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + +--- +"KQL match multiple terms queries (no matches)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "no match" } } + } + - match: { hits.total: 0 } + + +--- +"KQL boolean queries": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field: foo AND integer_field > 1" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field: baz OR keyword_field: foo bar" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "NOT text_field: baz" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + diff --git a/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/20_kql_match_query.yml b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/20_kql_match_query.yml new file mode 100644 index 0000000000000..2e40c73ecf829 --- /dev/null +++ b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/20_kql_match_query.yml @@ -0,0 +1,266 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ kql_query ] + test_runner_features: capabilities + reason: KQL query is not available + + - requires: + "test_runner_features": "contains" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + date_field: + type: date + text_field: + type: text + keyword_field: + type: keyword + integer_field: + type: integer + double_field: + type: double + + - do: + bulk: + index: test-index + refresh: true + body: | + { "index" : { "_id": "doc-1" } } + { "text_field": "foo bar", "integer_field": 1, "double_field": 3.5, "date_field": "2010-03-06T14:15:00", "keyword_field": "foo bar" } + { "index" : { "_id": "doc-42" } } + { "text_field": "foo baz", "integer_field": 2, "double_field": 18.9, "date_field": "2018-03-28T20:30:00", "keyword_field": "foo baz" } + + +--- +"KQL match term queries (text field)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field:bar" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field: foo bar" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field: (foo bar)" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field: \"foo bar\"" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field: bar*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + +--- +"KQL match term queries (integer field)": + - do: + catch: bad_request + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "integer_field: foo" } } + } + - match: { error.type: "search_phase_execution_exception" } + - match: { error.root_cause.0.type: "query_shard_exception" } + - match: { error.root_cause.0.reason: "failed to create query: For input string: \"foo\"" } + - contains: { error.root_cause.0.stack_trace: "Caused by: java.lang.NumberFormatException: For input string: \"foo\"" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "integer_field: 2" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "integer_field: \"2\"" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + +--- +"KQL match term queries (double field)": + - do: + catch: bad_request + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "double_field: foo" } } + } + - match: { error.type: "search_phase_execution_exception" } + - match: { error.root_cause.0.type: "query_shard_exception" } + - match: { error.root_cause.0.reason: "failed to create query: For input string: \"foo\"" } + - contains: { error.root_cause.0.stack_trace: "Caused by: java.lang.NumberFormatException: For input string: \"foo\"" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "double_field: 18.9" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "double_field: \"18.9\"" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + +--- +"KQL match term queries (keyword field)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field:foo bar" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field: \"foo bar\"" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field: foo ba*" } } + } + - match: { hits.total: 2 } + + +--- +"KQL match term queries (date field)": + - do: + catch: bad_request + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field: foo" } } + } + - match: { error.type: "search_phase_execution_exception" } + - match: { error.root_cause.0.type: "parse_exception" } + - contains: { error.root_cause.0.reason: "failed to parse date field [foo]" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field: 2010-03-06" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field: now" } } + } + - match: { hits.total: 0 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field: now/1d" } } + } + - match: { hits.total: 0 } + +--- +"KQL match term queries (search by id)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "_id:doc-1" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } diff --git a/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/30_kql_range_query.yml b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/30_kql_range_query.yml new file mode 100644 index 0000000000000..e03fd41306ba9 --- /dev/null +++ b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/30_kql_range_query.yml @@ -0,0 +1,343 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ kql_query ] + test_runner_features: capabilities + reason: KQL query is not available + + - requires: + "test_runner_features": "contains" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + date_field: + type: date + text_field: + type: text + keyword_field: + type: keyword + integer_field: + type: integer + double_field: + type: double + + - do: + bulk: + index: test-index + refresh: true + body: | + { "index" : { "_id": "doc-1" } } + { "text_field": "bar", "integer_field": 1, "double_field": 3.5, "date_field": "2010-03-06T14:15:00", "keyword_field": "foo bar" } + { "index" : { "_id": "doc-42" } } + { "text_field": "baz", "integer_field": 2, "double_field": 18.9, "date_field": "2018-03-28T20:30:00", "keyword_field": "foo baz" } + + +--- +"KQL match term queries (text field)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field < baz" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field <= baz" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field > bar" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field >= bar" } } + } + - match: { hits.total: 2 } + + +--- +"KQL match term queries (integer field)": + - do: + catch: bad_request + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "integer_field < foo" } } + } + - match: { error.type: "search_phase_execution_exception" } + - match: { error.root_cause.0.type: "query_shard_exception" } + - match: { error.root_cause.0.reason: "failed to create query: For input string: \"foo\"" } + - contains: { error.root_cause.0.stack_trace: "Caused by: java.lang.NumberFormatException: For input string: \"foo\"" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "integer_field >= 1" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "integer_field > 1" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "integer_field <= 2" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "integer_field < 2" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + +--- +"KQL match term queries (double field)": + - do: + catch: bad_request + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "double_field < foo" } } + } + - match: { error.type: "search_phase_execution_exception" } + - match: { error.root_cause.0.type: "query_shard_exception" } + - match: { error.root_cause.0.reason: "failed to create query: For input string: \"foo\"" } + - contains: { error.root_cause.0.stack_trace: "Caused by: java.lang.NumberFormatException: For input string: \"foo\"" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "double_field >= 3.5" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "double_field > 3.5" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "double_field <= 18.9" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "double_field < 18.9" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + +--- +"KQL match term queries (keyword field)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field < foo baz" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field <= foo baz" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field > foo bar" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field >= foo bar" } } + } + - match: { hits.total: 2 } + + +--- +"KQL match term queries (date field)": + - do: + catch: bad_request + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field: foo" } } + } + - match: { error.type: "search_phase_execution_exception" } + - match: { error.root_cause.0.type: "parse_exception" } + - contains: { error.root_cause.0.reason: "failed to parse date field [foo]" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field < 2018-03-28" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field <= 2018-03-28" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field > 2010-03-06" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-42" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field >= 2010-03-06" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field < now" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field <= now" } } + } + - match: { hits.total: 2 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field > now" } } + } + - match: { hits.total: 0 } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field >= now" } } + } + - match: { hits.total: 0 } diff --git a/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/40_kql_exist_query.yml b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/40_kql_exist_query.yml new file mode 100644 index 0000000000000..ca9197d382f64 --- /dev/null +++ b/x-pack/plugin/kql/src/yamlRestTest/resources/rest-api-spec/test/kql/40_kql_exist_query.yml @@ -0,0 +1,182 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ kql_query ] + test_runner_features: capabilities + reason: KQL query is not available + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + date_field: + type: date + text_field: + type: text + keyword_field: + type: keyword + integer_field: + type: integer + double_field: + type: double + + - do: + bulk: + index: test-index + refresh: true + body: | + { "index" : { "_id": "doc-1" } } + { "text_field": "foo bar", "integer_field": 1, "double_field": 3.5, "date_field": "2010-03-06T14:15:00", "keyword_field": "foo bar" } + { "index" : { "_id": "doc-42" } } + { "another_field": "foo"} + +--- +"KQL exists queries - Existing field": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "integer_field:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "double_field:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_*:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "*_field:*" } } + } + - match: { hits.total: 2 } + +--- +"KQL exists queries (existing field)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "text_field:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "integer_field:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "double_field:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "date_field:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "keyword_field:*" } } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "doc-1" } + +--- +"KQL exists queries (non-existing field)": + - do: + search: + index: test-index + rest_total_hits_as_int: true + body: > + { + "query": { "kql": { "query": "non_existing_field:*" } } + } + - match: { hits.total: 0 } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java index 5c385d5920428..62e1eef3e0e97 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java @@ -8,17 +8,22 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.monitor.metrics.IndexModeStatsActionType; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackFeatures; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; @@ -28,13 +33,17 @@ public class LogsDBUsageTransportAction extends XPackUsageFeatureTransportAction { private final ClusterService clusterService; + private final FeatureService featureService; + private final Client client; @Inject public LogsDBUsageTransportAction( TransportService transportService, ClusterService clusterService, + FeatureService featureService, ThreadPool threadPool, ActionFilters actionFilters, + Client client, IndexNameExpressionResolver indexNameExpressionResolver ) { super( @@ -46,6 +55,8 @@ public LogsDBUsageTransportAction( indexNameExpressionResolver ); this.clusterService = clusterService; + this.featureService = featureService; + this.client = client; } @Override @@ -66,8 +77,28 @@ protected void masterOperation( } } final boolean enabled = LogsDBPlugin.CLUSTER_LOGSDB_ENABLED.get(clusterService.getSettings()); - listener.onResponse( - new XPackUsageFeatureResponse(new LogsDBFeatureSetUsage(true, enabled, numIndices, numIndicesWithSyntheticSources)) - ); + if (featureService.clusterHasFeature(state, XPackFeatures.LOGSDB_TELMETRY_STATS)) { + final DiscoveryNode[] nodes = state.nodes().getDataNodes().values().toArray(DiscoveryNode[]::new); + final var statsRequest = new IndexModeStatsActionType.StatsRequest(nodes); + final int finalNumIndices = numIndices; + final int finalNumIndicesWithSyntheticSources = numIndicesWithSyntheticSources; + client.execute(IndexModeStatsActionType.TYPE, statsRequest, listener.map(statsResponse -> { + final var indexStats = statsResponse.stats().get(IndexMode.LOGSDB); + return new XPackUsageFeatureResponse( + new LogsDBFeatureSetUsage( + true, + enabled, + finalNumIndices, + finalNumIndicesWithSyntheticSources, + indexStats.numDocs(), + indexStats.numBytes() + ) + ); + })); + } else { + listener.onResponse( + new XPackUsageFeatureResponse(new LogsDBFeatureSetUsage(true, enabled, numIndices, numIndicesWithSyntheticSources, 0L, 0L)) + ); + } } } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index e7572d6a646e1..e87f10ec19916 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -101,6 +101,20 @@ boolean newIndexHasSyntheticSourceUsage( try { var tmpIndexMetadata = buildIndexMetadataForMapperService(indexName, templateIndexMode, indexTemplateAndCreateRequestSettings); + var indexMode = tmpIndexMetadata.getIndexMode(); + if (SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.exists(tmpIndexMetadata.getSettings()) + || indexMode == IndexMode.LOGSDB + || indexMode == IndexMode.TIME_SERIES) { + // In case when index mode is tsdb or logsdb and only _source.mode mapping attribute is specified, then the default + // could be wrong. However, it doesn't really matter, because if the _source.mode mapping attribute is set to stored, + // then configuring the index.mapping.source.mode setting to stored has no effect. Additionally _source.mode can't be set + // to disabled, because that isn't allowed with logsdb/tsdb. In other words setting index.mapping.source.mode setting to + // stored when _source.mode mapping attribute is stored is fine as it has no effect, but avoids creating MapperService. + var sourceMode = SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(tmpIndexMetadata.getSettings()); + return sourceMode == SourceFieldMapper.Mode.SYNTHETIC; + } + + // TODO: remove this when _source.mode attribute has been removed: try (var mapperService = mapperServiceFactory.apply(tmpIndexMetadata)) { // combinedTemplateMappings can be null when creating system indices // combinedTemplateMappings can be empty when creating a normal index that doesn't match any template and without mapping. @@ -112,7 +126,8 @@ boolean newIndexHasSyntheticSourceUsage( } } catch (AssertionError | Exception e) { // In case invalid mappings or setting are provided, then mapper service creation can fail. - // In that case it is ok to return false here. The index creation will fail anyway later, so need to fallback to stored source. + // In that case it is ok to return false here. The index creation will fail anyway later, so no need to fallback to stored + // source. LOGGER.info(() -> Strings.format("unable to create mapper service for index [%s]", indexName), e); return false; } diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index 2ab77b38b3373..2d8723a0d8c25 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.time.Instant; import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.common.settings.Settings.builder; import static org.hamcrest.Matchers.equalTo; @@ -35,6 +36,7 @@ public class SyntheticSourceIndexSettingsProviderTests extends ESTestCase { private SyntheticSourceLicenseService syntheticSourceLicenseService; private SyntheticSourceIndexSettingsProvider provider; + private final AtomicInteger newMapperServiceCounter = new AtomicInteger(); private static LogsdbIndexModeSettingsProvider getLogsdbIndexModeSettingsProvider(boolean enabled) { return new LogsdbIndexModeSettingsProvider(Settings.builder().put("cluster.logsdb.enabled", enabled).build()); @@ -49,11 +51,11 @@ public void setup() { syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); syntheticSourceLicenseService.setLicenseState(licenseState); - provider = new SyntheticSourceIndexSettingsProvider( - syntheticSourceLicenseService, - im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), - getLogsdbIndexModeSettingsProvider(false) - ); + provider = new SyntheticSourceIndexSettingsProvider(syntheticSourceLicenseService, im -> { + newMapperServiceCounter.incrementAndGet(); + return MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()); + }, getLogsdbIndexModeSettingsProvider(false)); + newMapperServiceCounter.set(0); } public void testNewIndexHasSyntheticSourceUsage() throws IOException { @@ -77,6 +79,7 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { """; boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertTrue(result); + assertThat(newMapperServiceCounter.get(), equalTo(1)); } { String mapping; @@ -110,6 +113,7 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { } boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); + assertThat(newMapperServiceCounter.get(), equalTo(2)); } } @@ -152,15 +156,18 @@ public void testNewIndexHasSyntheticSourceUsageLogsdbIndex() throws IOException Settings settings = Settings.builder().put("index.mode", "logsdb").build(); boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertTrue(result); + assertThat(newMapperServiceCounter.get(), equalTo(0)); } { Settings settings = Settings.builder().put("index.mode", "logsdb").build(); boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of()); assertTrue(result); + assertThat(newMapperServiceCounter.get(), equalTo(0)); } { boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, Settings.EMPTY, List.of()); assertFalse(result); + assertThat(newMapperServiceCounter.get(), equalTo(1)); } { boolean result = provider.newIndexHasSyntheticSourceUsage( @@ -170,6 +177,7 @@ public void testNewIndexHasSyntheticSourceUsageLogsdbIndex() throws IOException List.of(new CompressedXContent(mapping)) ); assertFalse(result); + assertThat(newMapperServiceCounter.get(), equalTo(2)); } } @@ -234,6 +242,7 @@ public void testNewIndexHasSyntheticSourceUsage_invalidSettings() throws IOExcep """; boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); + assertThat(newMapperServiceCounter.get(), equalTo(1)); } { String mapping = """ @@ -249,6 +258,7 @@ public void testNewIndexHasSyntheticSourceUsage_invalidSettings() throws IOExcep """; boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); + assertThat(newMapperServiceCounter.get(), equalTo(2)); } } @@ -278,6 +288,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSource() throws List.of() ); assertThat(result.size(), equalTo(0)); + assertThat(newMapperServiceCounter.get(), equalTo(0)); syntheticSourceLicenseService.setSyntheticSourceFallback(true); result = provider.getAdditionalIndexSettings( @@ -291,6 +302,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSource() throws ); assertThat(result.size(), equalTo(1)); assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + assertThat(newMapperServiceCounter.get(), equalTo(0)); result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 2), @@ -303,6 +315,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSource() throws ); assertThat(result.size(), equalTo(1)); assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + assertThat(newMapperServiceCounter.get(), equalTo(0)); result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 2), @@ -315,6 +328,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSource() throws ); assertThat(result.size(), equalTo(1)); assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + assertThat(newMapperServiceCounter.get(), equalTo(0)); } public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch() throws IOException { @@ -347,6 +361,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch( List.of() ); assertThat(result.size(), equalTo(0)); + assertThat(newMapperServiceCounter.get(), equalTo(0)); dataStreamName = "logs-app1-0"; mb = Metadata.builder( @@ -371,6 +386,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch( ); assertThat(result.size(), equalTo(1)); assertEquals(SourceFieldMapper.Mode.STORED, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(result)); + assertThat(newMapperServiceCounter.get(), equalTo(0)); result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 2), @@ -382,5 +398,6 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch( List.of() ); assertThat(result.size(), equalTo(0)); + assertThat(newMapperServiceCounter.get(), equalTo(0)); } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index e92aff74be463..2e08b845f6593 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.ml.packageloader.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; @@ -56,10 +58,12 @@ */ final class ModelLoaderUtils { + private static final Logger logger = LogManager.getLogger(ModelLoaderUtils.class); + public static String METADATA_FILE_EXTENSION = ".metadata.json"; public static String MODEL_FILE_EXTENSION = ".pt"; - private static ByteSizeValue VOCABULARY_SIZE_LIMIT = new ByteSizeValue(20, ByteSizeUnit.MB); + private static final ByteSizeValue VOCABULARY_SIZE_LIMIT = new ByteSizeValue(20, ByteSizeUnit.MB); private static final String VOCABULARY = "vocabulary"; private static final String MERGES = "merges"; private static final String SCORES = "scores"; @@ -83,6 +87,7 @@ record BytesAndPartIndex(BytesArray bytes, int partIndex) {} private final AtomicInteger currentPart; private final int lastPartNumber; private final byte[] buf; + private final RequestRange range; // TODO debug only HttpStreamChunker(URI uri, RequestRange range, int chunkSize) { var inputStream = getHttpOrHttpsInputStream(uri, range); @@ -91,6 +96,7 @@ record BytesAndPartIndex(BytesArray bytes, int partIndex) {} this.lastPartNumber = range.startPart() + range.numParts(); this.currentPart = new AtomicInteger(range.startPart()); this.buf = new byte[chunkSize]; + this.range = range; } // This ctor exists for testing purposes only. @@ -100,6 +106,7 @@ record BytesAndPartIndex(BytesArray bytes, int partIndex) {} this.lastPartNumber = range.startPart() + range.numParts(); this.currentPart = new AtomicInteger(range.startPart()); this.buf = new byte[chunkSize]; + this.range = range; } public boolean hasNext() { @@ -113,6 +120,7 @@ public BytesAndPartIndex next() throws IOException { int read = inputStream.read(buf, bytesRead, chunkSize - bytesRead); // EOF?? if (read == -1) { + logger.debug("end of stream, " + bytesRead + " bytes read"); break; } bytesRead += read; @@ -122,6 +130,7 @@ public BytesAndPartIndex next() throws IOException { totalBytesRead.addAndGet(bytesRead); return new BytesAndPartIndex(new BytesArray(buf, 0, bytesRead), currentPart.getAndIncrement()); } else { + logger.warn("Empty part in range " + range + ", current part=" + currentPart.get() + ", last part=" + lastPartNumber); return new BytesAndPartIndex(BytesArray.EMPTY, currentPart.get()); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java index 5c9711a6e5d8b..cc997b3804e6b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java @@ -20,7 +20,6 @@ import org.elasticsearch.search.aggregations.metrics.GeoCentroid; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; -import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.aggregations.metrics.Percentiles; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -408,8 +407,8 @@ private boolean processGeoCentroid(GeoCentroid agg) { } private boolean processPercentiles(Percentiles percentiles) { - Iterator percentileIterator = percentiles.iterator(); - boolean aggregationAdded = addMetricIfFinite(percentiles.getName(), percentileIterator.next().value()); + var percentileIterator = percentiles.iterator(); + var aggregationAdded = percentileIterator.hasNext() && addMetricIfFinite(percentiles.getName(), percentileIterator.next().value()); if (percentileIterator.hasNext()) { throw new IllegalArgumentException("Multi-percentile aggregation [" + percentiles.getName() + "] is not supported"); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java index f8755b282c6a1..239f392e9f905 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TrainedModelValidatorTests.java @@ -64,37 +64,6 @@ public void testValidateMinimumVersion() { { ClusterState state = mock(ClusterState.class); - final ModelPackageConfig packageConfigCurrent = new ModelPackageConfig.Builder( - ModelPackageConfigTests.randomModulePackageConfig() - ).setMinimumVersion(MlConfigVersion.CURRENT.toString()).build(); - - DiscoveryNode node = DiscoveryNodeUtils.create( - "node1", - new TransportAddress(InetAddress.getLoopbackAddress(), 9300), - Version.V_8_7_0 - ); - - DiscoveryNodes nodes = DiscoveryNodes.builder().add(node).build(); - - when(state.nodes()).thenReturn(nodes); - - Exception e = expectThrows( - ActionRequestValidationException.class, - () -> TrainedModelValidator.validateMinimumVersion(packageConfigCurrent, state) - ); - - assertEquals( - "Validation Failed: 1: The model [" - + packageConfigCurrent.getPackagedModelId() - + "] requires that all nodes have ML config version [" - + MlConfigVersion.CURRENT - + "] or higher;", - e.getMessage() - ); - } - { - ClusterState state = mock(ClusterState.class); - final ModelPackageConfig packageConfigBroken = new ModelPackageConfig.Builder( ModelPackageConfigTests.randomModulePackageConfig() ).setMinimumVersion("_broken_version_").build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java index fc774a4ee3e48..ab1dee3b91236 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java @@ -505,6 +505,15 @@ public void testProcessGivenBucketAndLeafAggregationsButBucketNotInFields() thro {"time":4400,"my_value":4.0,"doc_count":7}""")); } + public void testProcessGivenEmptyPercentiles() throws IOException { + var histogramBuckets = List.of(createHistogramBucket(1000L, 4, List.of(createMax("time", 1000), createPercentiles("my_field")))); + + var json = aggToString(Sets.newHashSet("my_field"), histogramBuckets); + + assertThat(json, equalTo(""" + {"time":1000,"doc_count":4}""")); + } + public void testProcessGivenSinglePercentilesPerHistogram() throws IOException { List histogramBuckets = Arrays.asList( createHistogramBucket(1000L, 4, Arrays.asList(createMax("time", 1000), createPercentiles("my_field", 1.0))), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java index 112a8c80b0483..f7fb82f5d233c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobNodeSelectorTests.java @@ -88,19 +88,6 @@ public void testNodeNameAndVersionForRecentNode() { assertEquals("{_node_name1}{ML config version=10.0.0}", JobNodeSelector.nodeNameAndVersion(node)); } - public void testNodeNameAndVersionForOldNode() { - TransportAddress ta = new TransportAddress(InetAddress.getLoopbackAddress(), 9300); - Map attributes = Map.of("unrelated", "attribute"); - DiscoveryNode node = DiscoveryNodeUtils.builder("_node_id2") - .name("_node_name2") - .address(ta) - .attributes(attributes) - .roles(ROLES_WITH_ML) - .version(VersionInformation.inferVersions(Version.V_8_7_0)) - .build(); - assertEquals("{_node_name2}{ML config version=8.7.0}", JobNodeSelector.nodeNameAndVersion(node)); - } - public void testNodeNameAndMlAttributes() { TransportAddress ta = new TransportAddress(InetAddress.getLoopbackAddress(), 9300); SortedMap attributes = new TreeMap<>(); diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml index eb5cd6d37af83..ee5145ea42876 100644 --- a/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml @@ -95,6 +95,9 @@ template: k8s.statefulset.name: type: keyword ignore_above: 1024 + k8s.cluster.name: + type: keyword + ignore_above: 1024 service.node.name: type: alias path: resource.attributes.service.instance.id @@ -131,6 +134,10 @@ template: host.os.version: type: alias path: resource.attributes.os.version + orchestrator.cluster.name: + type: alias + path: resource.attributes.k8s.cluster.name +# Below are non-ECS fields that may be used by Kibana. kubernetes.deployment.name: type: alias path: resource.attributes.k8s.deployment.name @@ -170,7 +177,6 @@ template: kubernetes.node.hostname: type: alias path: resource.attributes.k8s.node.hostname -# Below are non-ECS fields that may be used by Kibana. service.language.name: type: alias path: resource.attributes.telemetry.sdk.language diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml index 63966e601a3cb..95a42b137df52 100644 --- a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml +++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml @@ -209,12 +209,13 @@ host.name pass-through: k8s.replicaset.name: myReplicasetName k8s.node.uid: myNodeUid k8s.node.hostname: myNodeHostname + k8s.cluster.name: myClusterName - is_false: errors - do: search: index: logs-generic.otel-default body: - fields: ["kubernetes.container.name", "kubernetes.cronjob.name", "kubernetes.job.name", "kubernetes.statefulset.name", "kubernetes.daemonset.name", "kubernetes.replicaset.name", "kubernetes.node.uid", "kubernetes.node.hostname" ] + fields: ["kubernetes.container.name", "kubernetes.cronjob.name", "kubernetes.job.name", "kubernetes.statefulset.name", "kubernetes.daemonset.name", "kubernetes.replicaset.name", "kubernetes.node.uid", "kubernetes.node.hostname", "orchestrator.cluster.name" ] - length: { hits.hits: 1 } - match: { hits.hits.0.fields.kubernetes\.container\.name : ["myContainerName"] } - match: { hits.hits.0.fields.kubernetes\.cronjob\.name : ["myCronJobName"] } @@ -224,3 +225,4 @@ host.name pass-through: - match: { hits.hits.0.fields.kubernetes\.replicaset\.name : ["myReplicasetName"] } - match: { hits.hits.0.fields.kubernetes\.node\.uid : ["myNodeUid"] } - match: { hits.hits.0.fields.kubernetes\.node\.hostname : ["myNodeHostname"] } + - match: { hits.hits.0.fields.orchestrator\.cluster\.name : ["myClusterName"] } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java index fb20f834937d3..df65aac5b79b8 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java @@ -14,13 +14,20 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; +import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; +import org.elasticsearch.search.retriever.KnnRetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverBuilder; +import org.elasticsearch.search.retriever.StandardRetrieverBuilder; +import org.elasticsearch.search.vectors.KnnSearchBuilder; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,9 +35,11 @@ import org.elasticsearch.xpack.core.XPackPlugin; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; +import java.util.function.Predicate; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -183,6 +192,37 @@ public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorCo return null; } + @Override + public RetrieverBuilder toRetriever(SearchSourceBuilder source, Predicate clusterSupportsFeature) { + if (false == clusterSupportsFeature.test(RRFRetrieverBuilder.RRF_RETRIEVER_COMPOSITION_SUPPORTED)) { + return null; + } + int totalQueries = source.subSearches().size() + source.knnSearch().size(); + if (totalQueries < 2) { + throw new IllegalArgumentException("[rrf] requires at least 2 sub-queries to be defined"); + } + List retrieverSources = new ArrayList<>(totalQueries); + for (int i = 0; i < source.subSearches().size(); i++) { + RetrieverBuilder standardRetriever = new StandardRetrieverBuilder(source.subSearches().get(i).getQueryBuilder()); + standardRetriever.retrieverName(source.subSearches().get(i).getQueryBuilder().queryName()); + retrieverSources.add(new CompoundRetrieverBuilder.RetrieverSource(standardRetriever, null)); + } + for (int i = 0; i < source.knnSearch().size(); i++) { + KnnSearchBuilder knnSearchBuilder = source.knnSearch().get(i); + RetrieverBuilder knnRetriever = new KnnRetrieverBuilder( + knnSearchBuilder.getField(), + knnSearchBuilder.getQueryVector().asFloatVector(), + knnSearchBuilder.getQueryVectorBuilder(), + knnSearchBuilder.k(), + knnSearchBuilder.getNumCands(), + knnSearchBuilder.getSimilarity() + ); + knnRetriever.retrieverName(knnSearchBuilder.queryName()); + retrieverSources.add(new CompoundRetrieverBuilder.RetrieverSource(knnRetriever, null)); + } + return new RRFRetrieverBuilder(retrieverSources, rankWindowSize(), rankConstant()); + } + @Override protected boolean doEquals(RankBuilder other) { return Objects.equals(rankConstant, ((RRFRankBuilder) other).rankConstant); diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml index a5c346b386999..320ba70bfdae5 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml @@ -1,7 +1,11 @@ setup: - requires: - cluster_features: "gte_v8.8.0" - reason: 'rank added in 8.8' + capabilities: + - method: POST + path: /_search + capabilities: [ transform_rank_rrf_to_retriever ] + test_runner_features: capabilities + reason: "Support for transforming deprecated rank_rrf queries to the corresponding rrf retriever is required" - skip: features: "warnings" @@ -212,7 +216,7 @@ setup: "RRF rank should fail if size > rank_window_size": - do: - catch: "/\\[rank\\] requires \\[rank_window_size: 2\\] be greater than or equal to \\[size: 10\\]/" + catch: "/\\[rrf\\] requires \\[rank_window_size: 2\\] be greater than or equal to \\[size: 10\\]/" search: index: test body: @@ -284,3 +288,22 @@ setup: rank_window_size: 10 rank_constant: 0.3 size: 10 + +--- +"RRF rank should fail if we specify both rank and retriever": + - do: + catch: "/Cannot specify both \\[rank\\] and \\[retriever\\]./" + search: + index: test + body: + track_total_hits: true + fields: [ "text", "keyword" ] + retriever: + standard: + query: + match_all: {} + rank: + rrf: + rank_window_size: 10 + rank_constant: 10 + size: 10 diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml index 94a1457a7acc8..7af6db611cd8c 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml @@ -1,7 +1,16 @@ setup: + - skip: + features: + - close_to + - contains + - requires: - cluster_features: "gte_v8.15.0" - reason: 'pagination for rrf was added in 8.15' + capabilities: + - method: POST + path: /_search + capabilities: [ transform_rank_rrf_to_retriever ] + test_runner_features: capabilities + reason: "Support for transforming deprecated rank_rrf queries to the corresponding rrf retriever is required" - do: indices.create: @@ -629,14 +638,13 @@ setup: "Pagination within interleaved results, different result set sizes, rank_window_size covering all results": # perform multiple searches with different "from" parameter, ensuring that results are consistent # rank_window_size covers the entire result set for both queries, so pagination should be consistent - # queryA has a result set of [5, 1] and + # queryA has a result set of [1] and # queryB has a result set of [4, 3, 1, 2] # so for rank_constant=10, the expected order is [1, 5, 4, 3, 2] - requires: cluster_features: ["gte_v8.16.0"] reason: "deprecation added in 8.16" test_runner_features: warnings - - do: warnings: - "Deprecated field [rank] used, replaced by [retriever]" @@ -647,30 +655,15 @@ setup: track_total_hits: true sub_searches: [ { - # this should clause would generate the result set [5, 1] + # this should clause would generate the result set [1] "query": { - bool: { - should: [ - { - term: { - number_val: { - value: "5", - boost: 10.0 - } - } - }, - { - term: { - number_val: { - value: "1", - boost: 9.0 - } - } - } - ] + term: { + number_val: { + value: "1", + boost: 9.0 + } } } - }, { # this should clause would generate the result set [4, 3, 1, 2] @@ -722,10 +715,14 @@ setup: from : 0 size : 2 - - match: { hits.total.value : 5 } + - match: { hits.total.value : 4 } - length: { hits.hits : 2 } - match: { hits.hits.0._id: "1" } - - match: { hits.hits.1._id: "5" } + # score for doc 1 is (1/12 + 1/13) + - close_to: {hits.hits.0._score: {value: 0.1678, error: 0.001}} + - match: { hits.hits.1._id: "4" } + # score for doc 4 is (1/11) + - close_to: {hits.hits.1._score: {value: 0.0909, error: 0.001}} - do: warnings: @@ -737,30 +734,15 @@ setup: track_total_hits: true sub_searches: [ { - # this should clause would generate the result set [5, 1] + # this should clause would generate the result set [1] "query": { - bool: { - should: [ - { - term: { - number_val: { - value: "5", - boost: 10.0 - } - } - }, - { - term: { - number_val: { - value: "1", - boost: 9.0 - } - } - } - ] + term: { + number_val: { + value: "1", + boost: 9.0 + } } } - }, { # this should clause would generate the result set [4, 3, 1, 2] @@ -812,10 +794,14 @@ setup: from : 2 size : 2 - - match: { hits.total.value : 5 } + - match: { hits.total.value : 4 } - length: { hits.hits : 2 } - - match: { hits.hits.0._id: "4" } - - match: { hits.hits.1._id: "3" } + - match: { hits.hits.0._id: "3" } + # score for doc 3 is (1/12) + - close_to: {hits.hits.0._score: {value: 0.0833, error: 0.001}} + - match: { hits.hits.1._id: "2" } + # score for doc 2 is (1/14) + - close_to: {hits.hits.1._score: {value: 0.0714, error: 0.001}} - do: warnings: @@ -827,30 +813,15 @@ setup: track_total_hits: true sub_searches: [ { - # this should clause would generate the result set [5, 1] + # this should clause would generate the result set [1] "query": { - bool: { - should: [ - { - term: { - number_val: { - value: "5", - boost: 10.0 - } - } - }, - { - term: { - number_val: { - value: "1", - boost: 9.0 - } - } - } - ] + term: { + number_val: { + value: "1", + boost: 9.0 + } } } - }, { # this should clause would generate the result set [4, 3, 1, 2] @@ -892,7 +863,6 @@ setup: ] } } - } ] rank: @@ -902,10 +872,8 @@ setup: from: 4 size: 2 - - match: { hits.total.value: 5 } - - length: { hits.hits: 1 } - - match: { hits.hits.0._id: "2" } - + - match: { hits.total.value: 4 } + - length: { hits.hits: 0 } --- "Pagination within interleaved results, different result set sizes, rank_window_size not covering all results": @@ -1008,8 +976,13 @@ setup: - match: { hits.total.value : 5 } - length: { hits.hits : 2 } - - match: { hits.hits.0._id: "5" } - - match: { hits.hits.1._id: "4" } + + - contains: { hits.hits: { _id: "4" } } + - contains: { hits.hits: { _id: "5" } } + + # both docs have the same score (1/11) + - close_to: {hits.hits.0._score: {value: 0.0909, error: 0.001}} + - close_to: {hits.hits.1._score: {value: 0.0909, error: 0.001}} - do: warnings: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml index 36e70581f39f2..1d74d7d67619a 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml @@ -1,8 +1,15 @@ setup: + - skip: + features: + - close_to + - requires: - cluster_features: "gte_v8.8.0" - reason: 'rank added in 8.8' - test_runner_features: "close_to" + capabilities: + - method: POST + path: /_search + capabilities: [ transform_rank_rrf_to_retriever ] + test_runner_features: capabilities + reason: "Support for transforming deprecated rank_rrf queries to the corresponding rrf retriever is required" - do: indices.create: @@ -198,12 +205,12 @@ setup: rank_constant: 1 size: 1 - - match: { hits.total.value: 6 } + - match: { hits.total.value: 5 } - match: { hits.hits.0._id: "5" } - - close_to: { aggregations.sums.value.asc_total: { value: 33.0, error: 0.001 }} - - close_to: { aggregations.sums.value.desc_total: { value: 39.0, error: 0.001 }} + - close_to: { aggregations.sums.value.asc_total: { value: 25.0, error: 0.001 }} + - close_to: { aggregations.sums.value.desc_total: { value: 35.0, error: 0.001 }} --- diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml index 1b74ffee62a11..99553a168c447 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml @@ -113,7 +113,7 @@ setup: - match: {hits.hits.0._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } - match: {hits.hits.0._explanation.details.1.value: 1} - match: {hits.hits.0._explanation.details.1.description: "/rrf.score:.\\[0.5\\].*/" } - - match: {hits.hits.0._explanation.details.1.details.0.description: "/within.top.*/" } + - match: {hits.hits.0._explanation.details.1.details.0.details.0.description: "/found.vector.with.calculated.similarity.*/" } - close_to: { hits.hits.1._explanation.value: { value: 0.5833334, error: 0.000001 } } - match: {hits.hits.1._explanation.description: "/rrf.score:.\\[0.5833334\\].*/" } @@ -122,7 +122,7 @@ setup: - match: {hits.hits.1._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } - match: {hits.hits.1._explanation.details.1.value: 2} - match: {hits.hits.1._explanation.details.1.description: "/rrf.score:.\\[0.33333334\\].*/" } - - match: {hits.hits.1._explanation.details.1.details.0.description: "/within.top.*/" } + - match: {hits.hits.1._explanation.details.1.details.0.details.0.description: "/found.vector.with.calculated.similarity.*/" } - match: {hits.hits.2._explanation.value: 0.5} - match: {hits.hits.2._explanation.description: "/rrf.score:.\\[0.5\\].*/" } @@ -250,7 +250,7 @@ setup: - match: {hits.hits.0._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } - match: {hits.hits.0._explanation.details.1.value: 1} - match: {hits.hits.0._explanation.details.1.description: "/.*my_top_knn.*/" } - - match: {hits.hits.0._explanation.details.1.details.0.description: "/within.top.*/" } + - match: {hits.hits.0._explanation.details.1.details.0.details.0.description: "/found.vector.with.calculated.similarity.*/" } - close_to: { hits.hits.1._explanation.value: { value: 0.5833334, error: 0.000001 } } - match: {hits.hits.1._explanation.description: "/rrf.score:.\\[0.5833334\\].*/" } @@ -259,7 +259,7 @@ setup: - match: {hits.hits.1._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } - match: {hits.hits.1._explanation.details.1.value: 2} - match: {hits.hits.1._explanation.details.1.description: "/.*my_top_knn.*/" } - - match: {hits.hits.1._explanation.details.1.details.0.description: "/within.top.*/" } + - match: {hits.hits.1._explanation.details.1.details.0.details.0.description: "/found.vector.with.calculated.similarity.*/" } - match: {hits.hits.2._explanation.value: 0.5} - match: {hits.hits.2._explanation.description: "/rrf.score:.\\[0.5\\].*/" } @@ -396,6 +396,7 @@ setup: - match: { hits.hits.1._id: "2" } - match: { hits.hits.2._id: "4" } + # this has now been translated to a retriever - close_to: { hits.hits.0._explanation.value: { value: 0.8333334, error: 0.000001 } } - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } - match: {hits.hits.0._explanation.details.0.value: 2} diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml index a9ddb4f902929..24259e3aa2a85 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml @@ -210,14 +210,12 @@ setup: - match: { hits.hits.1._id: "2" } - match: { hits.hits.2._id: "4" } - - exists: profile.shards.0.dfs - - length: { profile.shards.0.dfs.knn: 1 } - - length: { profile.shards.0.dfs.knn.0.query: 1 } - - match: { profile.shards.0.dfs.knn.0.query.0.type: DocAndScoreQuery } - - - match: { profile.shards.0.searches.0.query.0.type: ConstantScoreQuery } - - length: { profile.shards.0.searches.0.query.0.children: 1 } - - match: { profile.shards.0.searches.0.query.0.children.0.type: BooleanQuery } - - length: { profile.shards.0.searches.0.query.0.children.0.children: 2 } - - match: { profile.shards.0.searches.0.query.0.children.0.children.0.type: TermQuery } - - match: { profile.shards.0.searches.0.query.0.children.0.children.1.type: KnnScoreDocQuery } + - not_exists: profile.shards.0.dfs + + - match: { profile.shards.0.searches.0.query.0.type: RankDocsQuery } + - length: { profile.shards.0.searches.0.query.0.children: 2 } + - match: { profile.shards.0.searches.0.query.0.children.0.type: TopQuery } + - match: { profile.shards.0.searches.0.query.0.children.1.type: BooleanQuery } + - length: { profile.shards.0.searches.0.query.0.children.1.children: 2 } + - match: { profile.shards.0.searches.0.query.0.children.1.children.0.type: TermQuery } + - match: { profile.shards.0.searches.0.query.0.children.1.children.1.type: TopQuery } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index 12ef800a7aae7..f3222a74b530c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -652,7 +652,10 @@ public void prepareIndexIfNeededThenExecute(final Consumer consumer, ); if (descriptorForVersion == null) { - final String error = systemIndexDescriptor.getMinimumMappingsVersionMessage("create index"); + final String error = systemIndexDescriptor.getMinimumMappingsVersionMessage( + "create index", + state.minClusterMappingVersion + ); consumer.accept(new IllegalStateException(error)); } else { logger.info( @@ -703,7 +706,10 @@ public void onFailure(Exception e) { ); if (descriptorForVersion == null) { - final String error = systemIndexDescriptor.getMinimumMappingsVersionMessage("updating mapping"); + final String error = systemIndexDescriptor.getMinimumMappingsVersionMessage( + "updating mapping", + state.minClusterMappingVersion + ); consumer.accept(new IllegalStateException(error)); } else { logger.info( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java index 0f895a2db17e0..7b4c290a2f06f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java @@ -10,6 +10,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.refresh.RefreshAction; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.core.TimeValue; @@ -20,10 +22,14 @@ import org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction; import org.elasticsearch.xpack.core.security.support.SecurityMigrationTaskParams; +import java.util.Arrays; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.Executor; +import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + public class SecurityMigrationExecutor extends PersistentTasksExecutor { private static final Logger logger = LogManager.getLogger(SecurityMigrationExecutor.class); @@ -55,20 +61,29 @@ protected void nodeOperation(AllocatedPersistentTask task, SecurityMigrationTask updateMigrationVersion( params.getMigrationVersion(), securityIndexManager.getConcreteIndexName(), - ActionListener.wrap(response -> { + listener.delegateFailureAndWrap((l, response) -> { logger.info("Security migration not needed. Setting current version to: [" + params.getMigrationVersion() + "]"); - listener.onResponse(response); - }, listener::onFailure) + l.onResponse(response); + }) ); return; } - applyOutstandingMigrations(task, params.getMigrationVersion(), listener); + refreshSecurityIndex( + new ThreadedActionListener<>( + this.getExecutor(), + listener.delegateFailureIgnoreResponseAndWrap(l -> applyOutstandingMigrations(task, params.getMigrationVersion(), l)) + ) + ); } - private void applyOutstandingMigrations(AllocatedPersistentTask task, int currentMigrationVersion, ActionListener listener) { + private void applyOutstandingMigrations( + AllocatedPersistentTask task, + int currentMigrationVersion, + ActionListener migrationsListener + ) { if (task.isCancelled()) { - listener.onFailure(new TaskCancelledException("Security migration task cancelled")); + migrationsListener.onFailure(new TaskCancelledException("Security migration task cancelled")); return; } Map.Entry migrationEntry = migrationByVersion.higherEntry(currentMigrationVersion); @@ -79,34 +94,56 @@ private void applyOutstandingMigrations(AllocatedPersistentTask task, int curren .migrate( securityIndexManager, client, - ActionListener.wrap( - response -> updateMigrationVersion( + migrationsListener.delegateFailureIgnoreResponseAndWrap( + updateVersionListener -> updateMigrationVersion( migrationEntry.getKey(), securityIndexManager.getConcreteIndexName(), new ThreadedActionListener<>( this.getExecutor(), - ActionListener.wrap( - updateResponse -> applyOutstandingMigrations(task, migrationEntry.getKey(), listener), - listener::onFailure - ) + updateVersionListener.delegateFailureIgnoreResponseAndWrap(refreshListener -> { + refreshSecurityIndex( + new ThreadedActionListener<>( + this.getExecutor(), + refreshListener.delegateFailureIgnoreResponseAndWrap( + l -> applyOutstandingMigrations(task, migrationEntry.getKey(), l) + ) + ) + ); + }) ) - ), - listener::onFailure + ) ) ); } else { logger.info("Security migrations applied until version: [" + currentMigrationVersion + "]"); - listener.onResponse(null); + migrationsListener.onResponse(null); } } + /** + * Refresh security index to make sure that docs that were migrated are visible to the next migration and to prevent version conflicts + * or unexpected behaviour by APIs relying on migrated docs. + */ + private void refreshSecurityIndex(ActionListener listener) { + RefreshRequest refreshRequest = new RefreshRequest(securityIndexManager.getConcreteIndexName()); + executeAsyncWithOrigin(client, SECURITY_ORIGIN, RefreshAction.INSTANCE, refreshRequest, ActionListener.wrap(response -> { + if (response.getFailedShards() != 0) { + // Log a warning but do not stop migration, since this is not a critical operation + logger.warn("Failed to refresh security index during security migration {}", Arrays.toString(response.getShardFailures())); + } + listener.onResponse(null); + }, exception -> { + // Log a warning but do not stop migration, since this is not a critical operation + logger.warn("Failed to refresh security index during security migration", exception); + listener.onResponse(null); + })); + } + private void updateMigrationVersion(int migrationVersion, String indexName, ActionListener listener) { client.execute( UpdateIndexMigrationVersionAction.INSTANCE, new UpdateIndexMigrationVersionAction.Request(TimeValue.MAX_VALUE, migrationVersion, indexName), - ActionListener.wrap((response) -> { - listener.onResponse(null); - }, listener::onFailure) + listener.delegateFailureIgnoreResponseAndWrap(l -> l.onResponse(null)) ); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 0b98a595a6ab9..15d900e9d06ae 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -82,6 +82,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doReturn; @@ -434,7 +435,7 @@ public void testCannotUpdateIndexMappingsWhenMinMappingVersionTooLow() { final AtomicReference prepareException = new AtomicReference<>(null); // Hard-code a failure here. - doReturn("Nope").when(descriptorSpy).getMinimumMappingsVersionMessage(anyString()); + doReturn("Nope").when(descriptorSpy).getMinimumMappingsVersionMessage(anyString(), any()); doReturn(null).when(descriptorSpy) .getDescriptorCompatibleWith(eq(new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0))); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java index 0f63e5302a5f1..35fb7726ec813 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java @@ -10,6 +10,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -20,10 +22,12 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction; import org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionResponse; import org.elasticsearch.xpack.core.security.support.SecurityMigrationTaskParams; import org.junit.Before; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; @@ -40,8 +44,11 @@ public class SecurityMigrationExecutorTests extends ESTestCase { private SecurityIndexManager securityIndexManager; private int updateIndexMigrationVersionActionInvocations; + private int refreshActionInvocations; - private boolean clientShouldThrowException = false; + private boolean updateVersionShouldThrowException = false; + + private boolean refreshIndexShouldThrowException = false; private AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); @@ -51,6 +58,7 @@ public void setUpMocks() { when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); when(threadPool.generic()).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); updateIndexMigrationVersionActionInvocations = 0; + refreshActionInvocations = 0; client = new NoOpClient(threadPool) { @Override @SuppressWarnings("unchecked") @@ -59,12 +67,27 @@ protected void Request request, ActionListener listener ) { - if (clientShouldThrowException) { - listener.onFailure(new IllegalStateException("Bad client")); - return; + if (request instanceof RefreshRequest) { + if (refreshIndexShouldThrowException) { + if (randomBoolean()) { + listener.onFailure(new IllegalStateException("Refresh index failed")); + } else { + listener.onResponse((Response) new BroadcastResponse(1, 0, 1, List.of())); + } + } else { + refreshActionInvocations++; + listener.onResponse((Response) new BroadcastResponse(1, 1, 0, List.of())); + } + } else if (request instanceof UpdateIndexMigrationVersionAction.Request) { + if (updateVersionShouldThrowException) { + listener.onFailure(new IllegalStateException("Update version failed")); + } else { + updateIndexMigrationVersionActionInvocations++; + listener.onResponse((Response) new UpdateIndexMigrationVersionResponse()); + } + } else { + fail("Unexpected client request"); } - updateIndexMigrationVersionActionInvocations++; - listener.onResponse((Response) new UpdateIndexMigrationVersionResponse()); } }; @@ -85,6 +108,7 @@ public void testSuccessfulMigration() { verify(mockTask, times(1)).markAsCompleted(); verify(mockTask, times(0)).markAsFailed(any()); assertEquals(2, updateIndexMigrationVersionActionInvocations); + assertEquals(3, refreshActionInvocations); assertEquals(2, migrateInvocations[0]); } @@ -111,6 +135,7 @@ public void testNoMigrationMeetsRequirements() { verify(mockTask, times(1)).markAsCompleted(); verify(mockTask, times(0)).markAsFailed(any()); assertEquals(0, updateIndexMigrationVersionActionInvocations); + assertEquals(1, refreshActionInvocations); assertEquals(0, migrateInvocationsCounter[0]); } @@ -140,6 +165,7 @@ public void testPartialMigration() { securityMigrationExecutor.nodeOperation(mockTask, new SecurityMigrationTaskParams(0, true), mock(PersistentTaskState.class)); verify(mockTask, times(1)).markAsCompleted(); verify(mockTask, times(0)).markAsFailed(any()); + assertEquals(3, refreshActionInvocations); assertEquals(2, updateIndexMigrationVersionActionInvocations); assertEquals(2, migrateInvocations[0]); } @@ -158,6 +184,7 @@ public void testNoMigrationNeeded() { verify(mockTask, times(1)).markAsCompleted(); verify(mockTask, times(0)).markAsFailed(any()); assertEquals(0, updateIndexMigrationVersionActionInvocations); + assertEquals(1, refreshActionInvocations); assertEquals(0, migrateInvocations[0]); } @@ -186,14 +213,13 @@ public int minMappingVersion() { })) ); - assertThrows( - IllegalStateException.class, - () -> securityMigrationExecutor.nodeOperation( + securityMigrationExecutor.nodeOperation( mockTask, new SecurityMigrationTaskParams(0, true), mock(PersistentTaskState.class) - ) - ); + ); + verify(mockTask, times(1)).markAsFailed(any()); + verify(mockTask, times(0)).markAsCompleted(); } public void testUpdateMigrationVersionThrowsException() { @@ -205,12 +231,27 @@ public void testUpdateMigrationVersionThrowsException() { client, new TreeMap<>(Map.of(1, generateMigration(migrateInvocations, true), 2, generateMigration(migrateInvocations, true))) ); - clientShouldThrowException = true; + updateVersionShouldThrowException = true; securityMigrationExecutor.nodeOperation(mockTask, new SecurityMigrationTaskParams(0, true), mock(PersistentTaskState.class)); verify(mockTask, times(1)).markAsFailed(any()); verify(mockTask, times(0)).markAsCompleted(); } + public void testRefreshSecurityIndexThrowsException() { + final int[] migrateInvocations = new int[1]; + SecurityMigrationExecutor securityMigrationExecutor = new SecurityMigrationExecutor( + "test-task", + threadPool.generic(), + securityIndexManager, + client, + new TreeMap<>(Map.of(1, generateMigration(migrateInvocations, true), 2, generateMigration(migrateInvocations, true))) + ); + refreshIndexShouldThrowException = true; + securityMigrationExecutor.nodeOperation(mockTask, new SecurityMigrationTaskParams(0, true), mock(PersistentTaskState.class)); + verify(mockTask, times(0)).markAsFailed(any()); + verify(mockTask, times(1)).markAsCompleted(); + } + private SecurityMigrations.SecurityMigration generateMigration(int[] migrateInvocationsCounter, boolean isEligible) { SecurityMigrations.SecurityMigration migration = new SecurityMigrations.SecurityMigration() { @Override diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/S3RepositoryAnalysisRestIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/S3RepositoryAnalysisRestIT.java index 8986cf1059191..c0f2b40f5a10f 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/S3RepositoryAnalysisRestIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/S3RepositoryAnalysisRestIT.java @@ -31,13 +31,6 @@ public class S3RepositoryAnalysisRestIT extends AbstractRepositoryAnalysisRestTe .setting("s3.client.repo_test_kit.protocol", () -> "http", (n) -> USE_FIXTURE) .setting("s3.client.repo_test_kit.endpoint", s3Fixture::getAddress, (n) -> USE_FIXTURE) .setting("xpack.security.enabled", "false") - // Additional tracing related to investigation into https://github.com/elastic/elasticsearch/issues/102294 - .setting("logger.org.elasticsearch.repositories.s3", "TRACE") - .setting("logger.org.elasticsearch.repositories.blobstore.testkit", "TRACE") - .setting("logger.com.amazonaws.request", "DEBUG") - .setting("logger.org.apache.http.wire", "DEBUG") - // Necessary to permit setting the above two restricted loggers to DEBUG - .jvmArg("-Des.insecure_network_trace_enabled=true") .build(); @ClassRule diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroid.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroid.java index e009e07d35aa4..193aeb2950e1f 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroid.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroid.java @@ -10,10 +10,7 @@ import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalCentroid; -import org.elasticsearch.search.aggregations.support.SamplingContext; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.spatial.common.CartesianPoint; import java.io.IOException; @@ -25,14 +22,14 @@ public class InternalCartesianCentroid extends InternalCentroid implements CartesianCentroid { public InternalCartesianCentroid(String name, SpatialPoint centroid, long count, Map metadata) { - super(name, centroid, count, metadata, new FieldExtractor("x", SpatialPoint::getX), new FieldExtractor("y", SpatialPoint::getY)); + super(name, centroid, count, metadata); } /** * Read from a stream. */ public InternalCartesianCentroid(StreamInput in) throws IOException { - super(in, new FieldExtractor("x", SpatialPoint::getX), new FieldExtractor("y", SpatialPoint::getY)); + super(in); } @Override @@ -76,12 +73,22 @@ protected InternalCartesianCentroid copyWith(double firstSum, double secondSum, } @Override - public InternalAggregation finalizeSampling(SamplingContext samplingContext) { - return new InternalCartesianCentroid(name, centroid, samplingContext.scaleUp(count), getMetadata()); + protected String nameFirst() { + return "x"; } - static class Fields { - static final ParseField CENTROID_X = new ParseField("x"); - static final ParseField CENTROID_Y = new ParseField("y"); + @Override + protected double extractFirst(SpatialPoint point) { + return point.getX(); + } + + @Override + protected String nameSecond() { + return "y"; + } + + @Override + protected double extractSecond(SpatialPoint point) { + return point.getY(); } } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml index 959581b18c11a..2cd1595d2d5b3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/180_match_operator.yml @@ -5,7 +5,7 @@ setup: - method: POST path: /_query parameters: [ method, path, parameters, capabilities ] - capabilities: [ match_operator ] + capabilities: [ match_operator_colon ] cluster_features: [ "gte_v8.16.0" ] reason: "Match operator added in 8.16.0" test_runner_features: [capabilities, allowed_warnings_regex] @@ -44,7 +44,7 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | WHERE content MATCH "fox" | KEEP id | SORT id' + query: 'FROM test | WHERE content:"fox" | KEEP id | SORT id' - match: { columns.0.name: "id" } - match: { columns.0.type: "integer" } @@ -59,7 +59,7 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | WHERE content MATCH "fox" AND id > 5 | KEEP id | SORT id' + query: 'FROM test | WHERE content:"fox" AND id > 5 | KEEP id | SORT id' - match: { columns.0.name: "id" } - match: { columns.0.type: "integer" } @@ -73,16 +73,13 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | WHERE content MATCH "fox" OR content MATCH "brown" | KEEP id | SORT id' + query: 'FROM test | WHERE content:"fox" AND content:"brown" | KEEP id | SORT id' - match: { columns.0.name: "id" } - match: { columns.0.type: "integer" } - - length: { values: 5 } + - length: { values: 2 } - match: { values.0.0: 1 } - - match: { values.1.0: 2 } - - match: { values.2.0: 3 } - - match: { values.3.0: 4 } - - match: { values.4.0: 6 } + - match: { values.1.0: 6 } --- "not where match": @@ -91,7 +88,7 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | WHERE NOT content MATCH "brown fox" | KEEP id | SORT id' + query: 'FROM test | WHERE NOT content:"brown fox" | KEEP id | SORT id' - match: { columns.0.name: "id" } - match: { columns.0.type: "integer" } @@ -106,7 +103,7 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | WHERE something match "fox"' + query: 'FROM test | WHERE something:"fox"' - match: { status: 400 } - match: { error.type: verification_exception } @@ -120,11 +117,11 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | EVAL upper_content = to_upper(content) | WHERE upper_content MATCH "FOX" | KEEP id' + query: 'FROM test | EVAL upper_content = to_upper(content) | WHERE upper_content:"FOX" | KEEP id' - match: { status: 400 } - match: { error.type: verification_exception } - - match: { error.reason: "Found 1 problem\nline 1:60: MATCH requires a mapped index field, found [upper_content]" } + - match: { error.reason: "Found 1 problem\nline 1:60: [:] operator cannot operate on [upper_content], which is not a field from an index mapping" } --- "match on overwritten column": @@ -134,11 +131,11 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | DROP content | EVAL content = CONCAT("ID: ", to_str(id)) | WHERE content match "fox"' + query: 'FROM test | DROP content | EVAL content = CONCAT("ID: ", to_str(id)) | WHERE content:"fox"' - match: { status: 400 } - match: { error.type: verification_exception } - - match: { error.reason: "Found 1 problem\nline 1:78: MATCH requires a mapped index field, found [content]" } + - match: { error.reason: "Found 1 problem\nline 1:78: [:] operator cannot operate on [content], which is not a field from an index mapping" } --- "match after stats": @@ -148,7 +145,7 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | STATS count(*) | WHERE content match "fox"' + query: 'FROM test | STATS count(*) | WHERE content:"fox"' - match: { status: 400 } - match: { error.type: verification_exception } @@ -162,11 +159,11 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | WHERE content MATCH "fox" OR to_upper(content) == "FOX"' + query: 'FROM test | WHERE content:"fox" OR to_upper(content) == "FOX"' - match: { status: 400 } - match: { error.type: verification_exception } - - match: { error.reason: "Found 1 problem\nline 1:19: Invalid condition using MATCH" } + - match: { error.reason: "Found 1 problem\nline 1:19: Invalid condition [content:\"fox\" OR to_upper(content) == \"FOX\"]. [:] operator can't be used as part of an or condition" } --- "match within eval": @@ -176,11 +173,11 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | EVAL matches_query = content MATCH "fox"' + query: 'FROM test | EVAL matches_query = content:"fox"' - match: { status: 400 } - match: { error.type: verification_exception } - - match: { error.reason: "Found 1 problem\nline 1:34: EVAL does not support MATCH expressions" } + - match: { error.reason: "Found 1 problem\nline 1:34: [:] operator is only supported in WHERE commands" } --- "match with non text field": @@ -190,8 +187,8 @@ setup: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM test | WHERE id MATCH "fox"' + query: 'FROM test | WHERE id:"fox"' - match: { status: 400 } - match: { error.type: verification_exception } - - match: { error.reason: "Found 1 problem\nline 1:19: MATCH requires a text or keyword field, but [id] has type [integer]" } + - match: { error.reason: "Found 1 problem\nline 1:19: first argument of [id:\"fox\"] must be [string], found value [id] type [integer]" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index b51bbdc4d2f87..bb3345f4118b9 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -30,7 +30,7 @@ setup: - method: POST path: /_query parameters: [] - capabilities: [ snapshot_test_for_telemetry ] + capabilities: [ snapshot_test_for_telemetry, fn_bit_length ] reason: "Test that should only be executed on snapshot versions" - do: {xpack.usage: {}} @@ -91,7 +91,7 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - - length: {esql.functions: 117} # check the "sister" test below for a likely update to the same esql.functions length check + - length: {esql.functions: 119} # check the "sister" test below for a likely update to the same esql.functions length check --- "Basic ESQL usage output (telemetry) non-snapshot version": @@ -101,7 +101,7 @@ setup: - method: POST path: /_query parameters: [] - capabilities: [ non_snapshot_test_for_telemetry ] + capabilities: [ non_snapshot_test_for_telemetry, fn_bit_length ] reason: "Test that should only be executed on release versions" - do: {xpack.usage: {}} @@ -162,4 +162,4 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - - length: {esql.functions: 115} # check the "sister" test above for a likely update to the same esql.functions length check + - length: {esql.functions: 118} # check the "sister" test above for a likely update to the same esql.functions length check diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_usage.yml index 63b9ba71510ed..731082378fe17 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_usage.yml @@ -1,5 +1,8 @@ --- logsdb usage: + - requires: + cluster_features: ["logsdb_telemetry_stats"] + reason: "requires stats" - do: indices.create: index: test1 @@ -7,20 +10,43 @@ logsdb usage: settings: index: mode: logsdb + - do: + bulk: + index: test1 + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2024-02-12T10:30:00Z", "host.name": "foo" } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:31:00Z", "host.name": "bar" } - do: {xpack.usage: {}} - match: { logsdb.available: true } - match: { logsdb.indices_count: 1 } - match: { logsdb.indices_with_synthetic_source: 1 } + - match: { logsdb.num_docs: 2 } + - gt: { logsdb.size_in_bytes: 0} - do: indices.create: index: test2 + - do: + bulk: + index: test2 + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2024-02-12T10:32:00Z", "host.name": "foo" } + - { "index": { } } + - { "@timestamp": "2024-02-12T10:33:00Z", "host.name": "baz" } + - do: {xpack.usage: {}} - match: { logsdb.available: true } - match: { logsdb.indices_count: 1 } - match: { logsdb.indices_with_synthetic_source: 1 } + - match: { logsdb.num_docs: 2 } + - gt: { logsdb.size_in_bytes: 0} - do: indices.create: @@ -31,7 +57,17 @@ logsdb usage: mode: logsdb mapping.source.mode: stored + - do: + bulk: + index: test3 + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2024-02-12T10:32:00Z", "host.name": "foobar"} + - do: {xpack.usage: {}} - match: { logsdb.available: true } - match: { logsdb.indices_count: 2 } - match: { logsdb.indices_with_synthetic_source: 1 } + - match: { logsdb.num_docs: 3 } + - gt: { logsdb.size_in_bytes: 0} diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 38fbf99068a9b..271aadfe4b388 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -88,7 +88,14 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> keystore 'xpack.watcher.encryption_key', file("${project.projectDir}/src/test/resources/system_key") setting 'xpack.watcher.encrypt_sensitive_data', 'true' - extraConfigFile 'operator/settings.json', file("${project.projectDir}/src/test/resources/operator_defined_role_mappings.json") + // file-based settings processing had a bug around applying role mappings on an unrecovered index + // this was fixed in 8.7.0 (https://github.com/elastic/elasticsearch/pull/92173). To avoid flakiness + // in the test, we only set a role mappings file for higher versions. + // TODO move this out into a separate test suite, since operator settings are not relevant for most BWC tests + // and have some side-effects + if (bwcVersion.onOrAfter('8.7.0')) { + extraConfigFile 'operator/settings.json', file("${project.projectDir}/src/test/resources/operator_defined_role_mappings.json") + } // Old versions of the code contain an invalid assertion that trips // during tests. Versions 5.6.9 and 6.2.4 have been fixed by removing diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRoleMappingCleanupIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRoleMappingCleanupIT.java index 82d4050c044b1..915122c97d3f1 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRoleMappingCleanupIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRoleMappingCleanupIT.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.upgrades; +import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; @@ -24,8 +25,14 @@ import static org.hamcrest.Matchers.containsInAnyOrder; public class SecurityIndexRoleMappingCleanupIT extends AbstractUpgradeTestCase { + private static final Version UPGRADE_FROM_VERSION = Version.fromString(System.getProperty("tests.upgrade_from_version")); public void testCleanupDuplicateMappings() throws Exception { + // see build.gradle where we set operator/settings.json for more details on this skip + assumeTrue( + "Cluster requires version higher than since operator/settings.json is only set then: " + Version.V_8_7_0, + UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_7_0) + ); if (CLUSTER_TYPE == ClusterType.OLD) { // If we're in a state where the same operator-defined role mappings can exist both in cluster state and the native store // (V_8_15_0 transport added to security.role_mapping_cleanup feature added), create a state