diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 3283e691f121c..cb8062fef02b4 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.19", "8.12.3", "8.13.0", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 5e7c1a0960789..9992d940e3c97 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -1137,6 +1137,22 @@ steps: env: BWC_VERSION: 7.17.19 + - label: "{{matrix.image}} / 7.17.20 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.20 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.20 + - label: "{{matrix.image}} / 8.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0 timeout_in_minutes: 300 @@ -1873,8 +1889,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.12.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.12.3 + - label: "{{matrix.image}} / 8.13.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.0 timeout_in_minutes: 300 matrix: setup: @@ -1887,10 +1903,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.12.3 + BWC_VERSION: 8.13.0 - - label: "{{matrix.image}} / 8.13.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.0 + - label: "{{matrix.image}} / 8.13.1 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.1 timeout_in_minutes: 300 matrix: setup: @@ -1903,7 +1919,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.0 + BWC_VERSION: 8.13.1 - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 8e1ff14eda792..ff378477f7aa6 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -692,6 +692,16 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.19 + - label: 7.17.20 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.20#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.20 - label: 8.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.0#bwcTest timeout_in_minutes: 300 @@ -1152,8 +1162,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.12.2 - - label: 8.12.3 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.12.3#bwcTest + - label: 8.13.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.0#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -1161,9 +1171,9 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.12.3 - - label: 8.13.0 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.0#bwcTest + BWC_VERSION: 8.13.0 + - label: 8.13.1 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.1#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -1171,7 +1181,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.0 + BWC_VERSION: 8.13.1 - label: 8.14.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.0#bwcTest timeout_in_minutes: 300 @@ -1246,7 +1256,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.19", "8.12.3", "8.13.0", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -1290,7 +1300,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 - BWC_VERSION: ["7.17.19", "8.12.3", "8.13.0", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/pull-request/part-1-arm.yml b/.buildkite/pipelines/pull-request/part-1-arm.yml new file mode 100644 index 0000000000000..5e94f90c67754 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-1-arm.yml @@ -0,0 +1,13 @@ +config: + allow-labels: "test-arm" +steps: + - label: part-1-arm + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart1 + timeout_in_minutes: 300 + agents: + provider: aws + imagePrefix: elasticsearch-ubuntu-2004-aarch64 + instanceType: m6g.8xlarge + diskSizeGb: 350 + diskType: gp3 + diskName: /dev/sda1 diff --git a/.buildkite/pipelines/pull-request/part-2-arm.yml b/.buildkite/pipelines/pull-request/part-2-arm.yml new file mode 100644 index 0000000000000..9a89ddf4a0eac --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-2-arm.yml @@ -0,0 +1,13 @@ +config: + allow-labels: "test-arm" +steps: + - label: part-2-arm + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart2 + timeout_in_minutes: 300 + agents: + provider: aws + imagePrefix: elasticsearch-ubuntu-2004-aarch64 + instanceType: m6g.8xlarge + diskSizeGb: 350 + diskType: gp3 + diskName: /dev/sda1 diff --git a/.buildkite/pipelines/pull-request/part-3-arm.yml b/.buildkite/pipelines/pull-request/part-3-arm.yml new file mode 100644 index 0000000000000..003300597af28 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-3-arm.yml @@ -0,0 +1,13 @@ +config: + allow-labels: "test-arm" +steps: + - label: part-3-arm + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart3 + timeout_in_minutes: 300 + agents: + provider: aws + imagePrefix: elasticsearch-ubuntu-2004-aarch64 + instanceType: m6g.8xlarge + diskSizeGb: 350 + diskType: gp3 + diskName: /dev/sda1 diff --git a/.buildkite/pipelines/pull-request/part-4-arm.yml b/.buildkite/pipelines/pull-request/part-4-arm.yml new file mode 100644 index 0000000000000..6462eba287bd6 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-4-arm.yml @@ -0,0 +1,13 @@ +config: + allow-labels: "test-arm" +steps: + - label: part-4-arm + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart4 + timeout_in_minutes: 300 + agents: + provider: aws + imagePrefix: elasticsearch-ubuntu-2004-aarch64 + instanceType: m6g.8xlarge + diskSizeGb: 350 + diskType: gp3 + diskName: /dev/sda1 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 8b454fa92ab02..a655b5a862683 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -68,6 +68,7 @@ BWC_VERSION: - "7.17.17" - "7.17.18" - "7.17.19" + - "7.17.20" - "8.0.0" - "8.0.1" - "8.1.0" @@ -114,6 +115,6 @@ BWC_VERSION: - "8.12.0" - "8.12.1" - "8.12.2" - - "8.12.3" - "8.13.0" + - "8.13.1" - "8.14.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index d85a432684495..f31603772a7f7 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,4 @@ BWC_VERSION: - - "7.17.19" - - "8.12.3" - - "8.13.0" + - "7.17.20" + - "8.13.1" - "8.14.0" diff --git a/.github/workflows/gradle-wrapper-validation.yml b/.github/workflows/gradle-wrapper-validation.yml index 39bef5e91f954..46762905e2c9a 100644 --- a/.github/workflows/gradle-wrapper-validation.yml +++ b/.github/workflows/gradle-wrapper-validation.yml @@ -10,4 +10,4 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/wrapper-validation-action@699bb18358f12c5b78b37bb0111d3a0e2276e0e2 # Release v2.1.1 diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java index 63686023498c9..d3aef8746b068 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java @@ -147,7 +147,7 @@ private static Operator operator(DriverContext driverContext, String grouping, S }; return new HashAggregationOperator( List.of(supplier(op, dataType, groups.size()).groupingAggregatorFactory(AggregatorMode.SINGLE)), - () -> BlockHash.build(groups, driverContext, 16 * 1024, false), + () -> BlockHash.build(groups, driverContext.blockFactory(), 16 * 1024, false), driverContext ); } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 1765897ba35e7..5b217efbe1ed1 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -8,6 +8,7 @@ package org.elasticsearch.benchmark.compute.operator; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; @@ -26,11 +27,13 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; @@ -58,7 +61,6 @@ @State(Scope.Thread) @Fork(1) public class EvalBenchmark { - private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; // TODO real big arrays? private static final BlockFactory blockFactory = BlockFactory.getInstance( new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE @@ -82,7 +84,9 @@ public class EvalBenchmark { } } - @Param({ "abs", "add", "date_trunc", "equal_to_const", "long_equal_to_long", "long_equal_to_int", "mv_min", "mv_min_ascending" }) + @Param( + { "abs", "add", "date_trunc", "equal_to_const", "long_equal_to_long", "long_equal_to_int", "mv_min", "mv_min_ascending", "rlike" } + ) public String operation; private static Operator operator(String operation) { @@ -134,6 +138,11 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) { FieldAttribute longField = longField(); yield EvalMapper.toEvaluator(new MvMin(Source.EMPTY, longField), layout(longField)).get(driverContext); } + case "rlike" -> { + FieldAttribute keywordField = keywordField(); + RLike rlike = new RLike(Source.EMPTY, keywordField, new RLikePattern(".ar")); + yield EvalMapper.toEvaluator(rlike, layout(keywordField)).get(driverContext); + } default -> throw new UnsupportedOperationException(); }; } @@ -146,6 +155,10 @@ private static FieldAttribute intField() { return new FieldAttribute(Source.EMPTY, "int", new EsField("int", DataTypes.INTEGER, Map.of(), true)); } + private static FieldAttribute keywordField() { + return new FieldAttribute(Source.EMPTY, "keyword", new EsField("keyword", DataTypes.KEYWORD, Map.of(), true)); + } + private static Layout layout(FieldAttribute... fields) { Layout.Builder layout = new Layout.Builder(); layout.append(Arrays.asList(fields)); @@ -205,6 +218,15 @@ private static void checkExpected(String operation, Page actual) { } } } + case "rlike" -> { + BooleanVector v = actual.getBlock(1).asVector(); + for (int i = 0; i < BLOCK_LENGTH; i++) { + boolean expected = i % 2 == 1; + if (v.getBoolean(i) != expected) { + throw new AssertionError("[" + operation + "] expected [" + expected + "] but was [" + v.getBoolean(i) + "]"); + } + } + } default -> throw new UnsupportedOperationException(); } } @@ -250,6 +272,14 @@ private static Page page(String operation) { } yield new Page(builder.build()); } + case "rlike" -> { + var builder = blockFactory.newBytesRefVectorBuilder(BLOCK_LENGTH); + BytesRef[] values = new BytesRef[] { new BytesRef("foo"), new BytesRef("bar") }; + for (int i = 0; i < BLOCK_LENGTH; i++) { + builder.appendBytesRef(values[i % 2]); + } + yield new Page(builder.build().asBlock()); + } default -> throw new UnsupportedOperationException(); }; } diff --git a/branches.json b/branches.json index dc72956c13f80..772693505b9e0 100644 --- a/branches.json +++ b/branches.json @@ -7,9 +7,6 @@ { "branch": "8.13" }, - { - "branch": "8.12" - }, { "branch": "7.17" } diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 24647c366c459..52e72d973f2ed 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -179,6 +179,10 @@ gradlePlugin { id = 'elasticsearch.test.fixtures' implementationClass = 'org.elasticsearch.gradle.internal.testfixtures.TestFixturesPlugin' } + deployTestFixtures { + id = 'elasticsearch.deploy-test-fixtures' + implementationClass = 'org.elasticsearch.gradle.internal.testfixtures.TestFixturesDeployPlugin' + } testBase { id = 'elasticsearch.test-base' implementationClass = 'org.elasticsearch.gradle.internal.ElasticsearchTestBasePlugin' diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index 865f1ba80d1e6..fcbbad6dd644c 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d -distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip +distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java index e0588ed440c57..d342ebc435197 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java @@ -28,7 +28,12 @@ import org.gradle.api.tasks.Input; import org.jetbrains.annotations.NotNull; -import java.io.*; +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; @@ -210,12 +215,15 @@ private static void createBuildArchiveTar(List files, File projectDir, Fil throw new IOException("Support only file!"); } + long entrySize = Files.size(path); TarArchiveEntry tarEntry = new TarArchiveEntry(path.toFile(), calculateArchivePath(path, projectPath)); - tarEntry.setSize(Files.size(path)); + tarEntry.setSize(entrySize); tOut.putArchiveEntry(tarEntry); // copy file to TarArchiveOutputStream - Files.copy(path, tOut); + try (BufferedInputStream bin = new BufferedInputStream(Files.newInputStream(path))) { + IOUtils.copyLarge(bin, tOut, 0, entrySize); + } tOut.closeArchiveEntry(); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index a9b332c3cfd3c..9e2f44323f914 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -8,6 +8,7 @@ package org.elasticsearch.gradle.internal; +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -40,6 +41,7 @@ import javax.inject.Inject; +import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME; import static org.objectweb.asm.Opcodes.V_PREVIEW; public class MrjarPlugin implements Plugin { @@ -58,48 +60,30 @@ public void apply(Project project) { project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); - var srcDir = project.getProjectDir().toPath().resolve("src"); - List mainVersions = new ArrayList<>(); - try (var subdirStream = Files.list(srcDir)) { - for (Path sourceset : subdirStream.toList()) { - assert Files.isDirectory(sourceset); - String sourcesetName = sourceset.getFileName().toString(); - Matcher sourcesetMatcher = MRJAR_SOURCESET_PATTERN.matcher(sourcesetName); - if (sourcesetMatcher.matches()) { - mainVersions.add(Integer.parseInt(sourcesetMatcher.group(1))); - } - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - - Collections.sort(mainVersions); - List parentSourceSets = new ArrayList<>(); - parentSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); + List mainVersions = findSourceVersions(project); + List mainSourceSets = new ArrayList<>(); + mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); + List testSourceSets = new ArrayList<>(mainSourceSets); + testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); for (int javaVersion : mainVersions) { - String sourcesetName = "main" + javaVersion; - addMrjarSourceset(project, javaExtension, sourcesetName, parentSourceSets, javaVersion); - parentSourceSets.add(sourcesetName); + String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; + SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); + configureSourceSetInJar(project, mainSourceSet, javaVersion); + mainSourceSets.add(mainSourceSetName); + testSourceSets.add(mainSourceSetName); + + String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; + SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); + testSourceSets.add(testSourceSetName); + createTestTask(project, testSourceSet, javaVersion, mainSourceSets); } - } - private void addMrjarSourceset( - Project project, - JavaPluginExtension javaExtension, - String sourcesetName, - List parentSourceSets, - int javaVersion - ) { - SourceSet sourceSet = javaExtension.getSourceSets().maybeCreate(sourcesetName); - for (String parentSourceSetName : parentSourceSets) { - GradleUtils.extendSourceSet(project, parentSourceSetName, sourcesetName); - } + configureMrjar(project); + } + private void configureMrjar(Project project) { var jarTask = project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME); - jarTask.configure(task -> { - task.into("META-INF/versions/" + javaVersion, copySpec -> copySpec.from(sourceSet.getOutput())); - task.manifest(manifest -> { manifest.attributes(Map.of("Multi-Release", "true")); }); - }); + jarTask.configure(task -> { task.manifest(manifest -> { manifest.attributes(Map.of("Multi-Release", "true")); }); }); project.getTasks().withType(Test.class).named(JavaPlugin.TEST_TASK_NAME).configure(testTask -> { testTask.dependsOn(jarTask); @@ -109,6 +93,19 @@ private void addMrjarSourceset( FileCollection testRuntime = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME).getRuntimeClasspath(); testTask.setClasspath(testRuntime.minus(mainRuntime).plus(project.files(jarTask))); }); + } + + private SourceSet addSourceSet( + Project project, + JavaPluginExtension javaExtension, + String sourceSetName, + List parentSourceSets, + int javaVersion + ) { + SourceSet sourceSet = javaExtension.getSourceSets().maybeCreate(sourceSetName); + for (String parentSourceSetName : parentSourceSets) { + GradleUtils.extendSourceSet(project, parentSourceSetName, sourceSetName); + } project.getTasks().withType(JavaCompile.class).named(sourceSet.getCompileJavaTaskName()).configure(compileTask -> { compileTask.getJavaCompiler() @@ -121,6 +118,64 @@ private void addMrjarSourceset( compileTask.doLast(t -> { stripPreviewFromFiles(compileTask.getDestinationDirectory().getAsFile().get().toPath()); }); }); + + // Since we configure MRJAR sourcesets to allow preview apis, class signatures for those + // apis are not known by forbidden apis, so we must ignore all missing classes. We could, in theory, + // run forbidden apis in a separate jvm matching the sourceset jvm, but it's not worth + // the complexity (according to forbidden apis author!) + String forbiddenApisTaskName = sourceSet.getTaskName(FORBIDDEN_APIS_TASK_NAME, null); + project.getTasks().withType(CheckForbiddenApisTask.class).named(forbiddenApisTaskName).configure(forbiddenApisTask -> { + forbiddenApisTask.setIgnoreMissingClasses(true); + }); + + return sourceSet; + } + + private void configureSourceSetInJar(Project project, SourceSet sourceSet, int javaVersion) { + var jarTask = project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME); + jarTask.configure(task -> task.into("META-INF/versions/" + javaVersion, copySpec -> copySpec.from(sourceSet.getOutput()))); + } + + private void createTestTask(Project project, SourceSet sourceSet, int javaVersion, List mainSourceSets) { + var jarTask = project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME); + var testTaskProvider = project.getTasks().register(JavaPlugin.TEST_TASK_NAME + javaVersion, Test.class); + testTaskProvider.configure(testTask -> { + testTask.dependsOn(jarTask); + + SourceSetContainer sourceSets = GradleUtils.getJavaSourceSets(project); + FileCollection testRuntime = sourceSet.getRuntimeClasspath(); + for (String mainSourceSetName : mainSourceSets) { + FileCollection mainRuntime = sourceSets.getByName(mainSourceSetName).getOutput(); + testRuntime = testRuntime.minus(mainRuntime); + } + testTask.setClasspath(testRuntime.plus(project.files(jarTask))); + testTask.setTestClassesDirs(sourceSet.getOutput().getClassesDirs()); + + testTask.getJavaLauncher() + .set(javaToolchains.launcherFor(spec -> spec.getLanguageVersion().set(JavaLanguageVersion.of(javaVersion)))); + }); + + project.getTasks().named("check").configure(checkTask -> checkTask.dependsOn(testTaskProvider)); + } + + private static List findSourceVersions(Project project) { + var srcDir = project.getProjectDir().toPath().resolve("src"); + List versions = new ArrayList<>(); + try (var subdirStream = Files.list(srcDir)) { + for (Path sourceSetPath : subdirStream.toList()) { + assert Files.isDirectory(sourceSetPath); + String sourcesetName = sourceSetPath.getFileName().toString(); + Matcher sourcesetMatcher = MRJAR_SOURCESET_PATTERN.matcher(sourcesetName); + if (sourcesetMatcher.matches()) { + versions.add(Integer.parseInt(sourcesetMatcher.group(1))); + } + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + Collections.sort(versions); + return versions; } private static void stripPreviewFromFiles(Path compileDir) { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java new file mode 100644 index 0000000000000..b365142282785 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.shadow; + +import com.github.jengelman.gradle.plugins.shadow.ShadowStats; +import com.github.jengelman.gradle.plugins.shadow.relocation.RelocateClassContext; +import com.github.jengelman.gradle.plugins.shadow.relocation.Relocator; +import com.github.jengelman.gradle.plugins.shadow.transformers.Transformer; +import com.github.jengelman.gradle.plugins.shadow.transformers.TransformerContext; + +import org.apache.commons.io.IOUtils; +import org.apache.tools.zip.ZipEntry; +import org.apache.tools.zip.ZipOutputStream; +import org.gradle.api.file.FileTreeElement; +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +import java.io.BufferedInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.List; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.transform.TransformerException; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; + +public class XmlClassRelocationTransformer implements Transformer { + + boolean hasTransformedResource = false; + + private Document doc; + + private String resource; + + @Override + public boolean canTransformResource(FileTreeElement element) { + String path = element.getRelativePath().getPathString(); + if (resource != null && resource.equals(path)) { + return true; + } + return false; + } + + @Override + public void transform(TransformerContext context) { + try { + BufferedInputStream bis = new BufferedInputStream(context.getIs()); + DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); + DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); + doc = dBuilder.parse(bis); + doc.getDocumentElement().normalize(); + Node root = doc.getDocumentElement(); + walkThroughNodes(root, context); + if (hasTransformedResource == false) { + this.doc = null; + } + } catch (Exception e) { + throw new RuntimeException("Error parsing xml file in " + context.getIs(), e); + } + } + + private static String getRelocatedClass(String className, TransformerContext context) { + List relocators = context.getRelocators(); + ShadowStats stats = context.getStats(); + if (className != null && className.length() > 0 && relocators != null) { + for (Relocator relocator : relocators) { + if (relocator.canRelocateClass(className)) { + RelocateClassContext relocateClassContext = new RelocateClassContext(className, stats); + return relocator.relocateClass(relocateClassContext); + } + } + } + + return className; + } + + private void walkThroughNodes(Node node, TransformerContext context) { + if (node.getNodeType() == Node.TEXT_NODE) { + String nodeValue = node.getNodeValue(); + if (nodeValue.isBlank() == false) { + String relocatedClass = getRelocatedClass(nodeValue, context); + if (relocatedClass.equals(nodeValue) == false) { + node.setNodeValue(relocatedClass); + hasTransformedResource = true; + } + } + } + NodeList nodeList = node.getChildNodes(); + for (int i = 0; i < nodeList.getLength(); i++) { + Node currentNode = nodeList.item(i); + walkThroughNodes(currentNode, context); + } + } + + @Override + public boolean hasTransformedResource() { + return hasTransformedResource; + } + + @Override + public void modifyOutputStream(ZipOutputStream os, boolean preserveFileTimestamps) { + ZipEntry entry = new ZipEntry(resource); + entry.setTime(TransformerContext.getEntryTimestamp(preserveFileTimestamps, entry.getTime())); + + try { + // Write the content back to the XML file + TransformerFactory transformerFactory = TransformerFactory.newInstance(); + DOMSource source = new DOMSource(doc); + + // Result stream will be a ByteArrayOutputStream + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + StreamResult result = new StreamResult(baos); + // Do the transformation and serialization + transformerFactory.newTransformer().transform(source, result); + os.putNextEntry(entry); + IOUtils.write(baos.toByteArray(), os); + os.closeEntry(); + } catch (IOException e) { + throw new RuntimeException(e); + } catch (TransformerException e) { + throw new RuntimeException(e); + } finally { + hasTransformedResource = false; + doc = null; + } + } + + @Override + public String getName() { + return getClass().getSimpleName(); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalJavaRestTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalJavaRestTestPlugin.java index f3950c8646292..1787ebcccf3a9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalJavaRestTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalJavaRestTestPlugin.java @@ -8,7 +8,7 @@ package org.elasticsearch.gradle.internal.test.rest; -import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask; +import org.elasticsearch.gradle.internal.test.RestIntegTestTask; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -40,13 +40,7 @@ public void apply(Project project) { } // setup the javaRestTest task - // we use a StandloneRestIntegTestTask here so that the conventions of RestTestBasePlugin don't create a test cluster - TaskProvider testTask = registerTestTask( - project, - javaTestSourceSet, - SOURCE_SET_NAME, - StandaloneRestIntegTestTask.class - ); + TaskProvider testTask = registerTestTask(project, javaTestSourceSet, SOURCE_SET_NAME, RestIntegTestTask.class); project.getTasks().named(JavaBasePlugin.CHECK_TASK_NAME).configure(check -> check.dependsOn(testTask)); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java index 66d3507f7f9b3..ba40998e2b02a 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java @@ -8,7 +8,7 @@ package org.elasticsearch.gradle.internal.test.rest; -import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask; +import org.elasticsearch.gradle.internal.test.RestIntegTestTask; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -36,12 +36,7 @@ public void apply(Project project) { SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); SourceSet yamlTestSourceSet = sourceSets.create(SOURCE_SET_NAME); - TaskProvider testTask = registerTestTask( - project, - yamlTestSourceSet, - SOURCE_SET_NAME, - StandaloneRestIntegTestTask.class - ); + TaskProvider testTask = registerTestTask(project, yamlTestSourceSet, SOURCE_SET_NAME, RestIntegTestTask.class); project.getTasks().named(JavaBasePlugin.CHECK_TASK_NAME).configure(check -> check.dependsOn(testTask)); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixtureDeployment.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixtureDeployment.java new file mode 100644 index 0000000000000..ee6f70db2b788 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixtureDeployment.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.testfixtures; + +import org.gradle.api.Named; +import org.gradle.api.provider.ListProperty; +import org.gradle.api.provider.Property; + +import java.io.File; + +public abstract class TestFixtureDeployment implements Named { + + private final String name; + + public TestFixtureDeployment(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + public abstract Property getDockerRegistry(); + + public abstract Property getDockerContext(); + + public abstract Property getVersion(); + + public abstract ListProperty getBaseImages(); +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java new file mode 100644 index 0000000000000..362ced9c3234e --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.testfixtures; + +import org.apache.commons.lang.StringUtils; +import org.elasticsearch.gradle.Architecture; +import org.elasticsearch.gradle.internal.docker.DockerBuildTask; +import org.elasticsearch.gradle.internal.info.BuildParams; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; + +import java.util.Arrays; +import java.util.List; + +public class TestFixturesDeployPlugin implements Plugin { + + public static final String DEPLOY_FIXTURE_TASK_NAME = "deployFixtureDockerImages"; + private static String DEFAULT_DOCKER_REGISTRY = "docker.elastic.co/elasticsearch-dev"; + + @Override + public void apply(Project project) { + NamedDomainObjectContainer fixtures = project.container(TestFixtureDeployment.class); + project.getExtensions().add("dockerFixtures", fixtures); + registerDeployTaskPerFixture(project, fixtures); + project.getTasks().register(DEPLOY_FIXTURE_TASK_NAME, task -> task.dependsOn(project.getTasks().withType(DockerBuildTask.class))); + } + + private static void registerDeployTaskPerFixture(Project project, NamedDomainObjectContainer fixtures) { + fixtures.all( + fixture -> project.getTasks() + .register("deploy" + StringUtils.capitalize(fixture.getName()) + "DockerImage", DockerBuildTask.class, task -> { + task.getDockerContext().fileValue(fixture.getDockerContext().get()); + List baseImages = fixture.getBaseImages().get(); + if (baseImages.isEmpty() == false) { + task.setBaseImages(baseImages.toArray(new String[baseImages.size()])); + } + task.setNoCache(BuildParams.isCi()); + task.setTags( + new String[] { + resolveTargetDockerRegistry(fixture) + "/" + fixture.getName() + "-fixture:" + fixture.getVersion().get() } + ); + task.getPush().set(BuildParams.isCi()); + task.getPlatforms().addAll(Arrays.stream(Architecture.values()).map(a -> a.dockerPlatform).toList()); + task.setGroup("Deploy TestFixtures"); + task.setDescription("Deploys the " + fixture.getName() + " test fixture"); + }) + ); + } + + private static String resolveTargetDockerRegistry(TestFixtureDeployment fixture) { + return fixture.getDockerRegistry().getOrElse(DEFAULT_DOCKER_REGISTRY); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java index 0270ee22ca8c5..89a40711c9a19 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java @@ -11,7 +11,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.compress.utils.Lists; import org.gradle.jvm.toolchain.JavaLanguageVersion; import org.gradle.jvm.toolchain.JavaToolchainDownload; import org.gradle.jvm.toolchain.JavaToolchainRequest; @@ -21,17 +20,17 @@ import java.io.IOException; import java.net.URI; import java.net.URL; -import java.util.Comparator; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.StreamSupport; import static org.gradle.jvm.toolchain.JavaToolchainDownload.fromUri; public abstract class AdoptiumJdkToolchainResolver extends AbstractCustomJavaToolchainResolver { // package protected for better testing - final Map> CACHED_SEMVERS = new ConcurrentHashMap<>(); + final Map> CACHED_RELEASES = new ConcurrentHashMap<>(); @Override public Optional resolve(JavaToolchainRequest request) { @@ -39,7 +38,7 @@ public Optional resolve(JavaToolchainRequest request) { return Optional.empty(); } AdoptiumVersionRequest versionRequestKey = toVersionRequest(request); - Optional versionInfo = CACHED_SEMVERS.computeIfAbsent( + Optional versionInfo = CACHED_RELEASES.computeIfAbsent( versionRequestKey, (r) -> resolveAvailableVersion(versionRequestKey) ); @@ -54,12 +53,12 @@ private AdoptiumVersionRequest toVersionRequest(JavaToolchainRequest request) { return new AdoptiumVersionRequest(platform, arch, javaLanguageVersion); } - private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { + private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { ObjectMapper mapper = new ObjectMapper(); try { int languageVersion = requestKey.languageVersion.asInt(); URL source = new URL( - "https://api.adoptium.net/v3/info/release_versions?architecture=" + "https://api.adoptium.net/v3/info/release_names?architecture=" + requestKey.arch + "&image_type=jdk&os=" + requestKey.platform @@ -71,14 +70,8 @@ private Optional resolveAvailableVersion(AdoptiumVersionReq + ")" ); JsonNode jsonNode = mapper.readTree(source); - JsonNode versionsNode = jsonNode.get("versions"); - return Optional.of( - Lists.newArrayList(versionsNode.iterator()) - .stream() - .map(this::toVersionInfo) - .max(Comparator.comparing(AdoptiumVersionInfo::semver)) - .get() - ); + JsonNode versionsNode = jsonNode.get("releases"); + return StreamSupport.stream(versionsNode.spliterator(), false).map(JsonNode::textValue).findFirst(); } catch (FileNotFoundException e) { // request combo not supported (e.g. aarch64 + windows return Optional.empty(); @@ -87,21 +80,10 @@ private Optional resolveAvailableVersion(AdoptiumVersionReq } } - private AdoptiumVersionInfo toVersionInfo(JsonNode node) { - return new AdoptiumVersionInfo( - node.get("build").asInt(), - node.get("major").asInt(), - node.get("minor").asInt(), - node.get("openjdk_version").asText(), - node.get("security").asInt(), - node.get("semver").asText() - ); - } - - private URI resolveDownloadURI(AdoptiumVersionRequest request, AdoptiumVersionInfo versionInfo) { + private URI resolveDownloadURI(AdoptiumVersionRequest request, String version) { return URI.create( - "https://api.adoptium.net/v3/binary/version/jdk-" - + versionInfo.semver + "https://api.adoptium.net/v3/binary/version/" + + version + "/" + request.platform + "/" @@ -118,7 +100,5 @@ private boolean requestIsSupported(JavaToolchainRequest request) { return anyVendorOr(request.getJavaToolchainSpec().getVendor().get(), JvmVendorSpec.ADOPTIUM); } - record AdoptiumVersionInfo(int build, int major, int minor, String openjdkVersion, int security, String semver) {} - record AdoptiumVersionRequest(String platform, String arch, JavaLanguageVersion languageVersion) {} } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java index 818cb040c172e..162895fd486cf 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java @@ -39,11 +39,7 @@ record JdkBuild(JavaLanguageVersion languageVersion, String version, String buil ); // package private so it can be replaced by tests - List builds = List.of( - getBundledJdkBuild(), - // 22 release candidate - new JdkBuild(JavaLanguageVersion.of(22), "22", "36", "830ec9fcccef480bb3e73fb7ecafe059") - ); + List builds = List.of(getBundledJdkBuild()); private JdkBuild getBundledJdkBuild() { String bundledJdkVersion = VersionProperties.getBundledJdkVersion(); diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index f043ef362390f..631c6d36a93a4 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.6 \ No newline at end of file +8.7 \ No newline at end of file diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy index 6383d577f027f..fe4a644ddfc1d 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy @@ -11,7 +11,6 @@ package org.elasticsearch.gradle.internal.toolchain import org.gradle.api.services.BuildServiceParameters import org.gradle.jvm.toolchain.JavaLanguageVersion import org.gradle.jvm.toolchain.JavaToolchainResolver -import org.gradle.platform.OperatingSystem import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toArchString import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toOsString @@ -38,12 +37,7 @@ class AdoptiumJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { toOsString(it[2], it[1]), toArchString(it[3]), languageVersion); - resolver.CACHED_SEMVERS.put(request, Optional.of(new AdoptiumJdkToolchainResolver.AdoptiumVersionInfo(languageVersion.asInt(), - 1, - 1, - "" + languageVersion.asInt() + ".1.1.1+37", - 0, "" + languageVersion.asInt() + ".1.1.1+37.1" - ))) + resolver.CACHED_RELEASES.put(request, Optional.of('jdk-' + languageVersion.asInt() + '.1.1.1+37.1')) } return resolver diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 0883097e75aad..a2e8651810042 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -2,7 +2,7 @@ elasticsearch = 8.14.0 lucene = 9.10.0 bundled_jdk_vendor = openjdk -bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac +bundled_jdk = 22+36@830ec9fcccef480bb3e73fb7ecafe059 # optional dependencies spatial4j = 0.7 jts = 1.15.0 diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java index 9216b538bd313..29833e1f3bb07 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java @@ -213,10 +213,9 @@ public void beforeStart() { // if metrics were not enabled explicitly for gradlew run we should disable them else if (node.getSettingKeys().contains("telemetry.metrics.enabled") == false) { // metrics node.setting("telemetry.metrics.enabled", "false"); - } else if (node.getSettingKeys().contains("telemetry.tracing.enabled") == false - && node.getSettingKeys().contains("tracing.apm.enabled") == false) { // tracing - node.setting("telemetry.tracing.enable", "false"); - } + } else if (node.getSettingKeys().contains("telemetry.tracing.enabled") == false) { // tracing + node.setting("telemetry.tracing.enable", "false"); + } } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmErgonomics.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmErgonomics.java index 926d5727a1b4a..ec39c0fc89ac2 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmErgonomics.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmErgonomics.java @@ -8,6 +8,12 @@ package org.elasticsearch.server.cli; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.node.NodeRoleSettings; + import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; @@ -31,7 +37,7 @@ private JvmErgonomics() { * @param userDefinedJvmOptions A list of JVM options that have been defined by the user. * @return A list of additional JVM options to set. */ - static List choose(final List userDefinedJvmOptions) throws InterruptedException, IOException { + static List choose(final List userDefinedJvmOptions, Settings nodeSettings) throws InterruptedException, IOException { final List ergonomicChoices = new ArrayList<>(); final Map finalJvmOptions = JvmOption.findFinalOptions(userDefinedJvmOptions); final long heapSize = JvmOption.extractMaxHeapSize(finalJvmOptions); @@ -55,6 +61,22 @@ static List choose(final List userDefinedJvmOptions) throws Inte ergonomicChoices.add("-XX:G1ReservePercent=" + tuneG1GCReservePercent); } + boolean isSearchTier = NodeRoleSettings.NODE_ROLES_SETTING.get(nodeSettings).contains(DiscoveryNodeRole.SEARCH_ROLE); + // override new percentage on small heaps on search tier to increase chance of staying free of the real memory circuit breaker limit + if (isSearchTier && heapSize < ByteSizeUnit.GB.toBytes(5)) { + ergonomicChoices.add("-XX:+UnlockExperimentalVMOptions"); + ergonomicChoices.add("-XX:G1NewSizePercent=10"); + } + + // for dedicated search, using just 1 conc gc thread is not always enough to keep us below real memory breaker limit + // jvm use (2+processsors) / 4 (for small processor counts), so only affects 4/5 processors (for now) + if (EsExecutors.NODE_PROCESSORS_SETTING.exists(nodeSettings)) { + int allocated = EsExecutors.allocatedProcessors(nodeSettings); + if (allocated >= 4 && allocated <= 5 && isSearchTier) { + ergonomicChoices.add("-XX:ConcGCThreads=2"); + } + } + return ergonomicChoices; } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java index d312fae4456f1..35f3f62122f0c 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java @@ -139,7 +139,7 @@ private List jvmOptions( new OverridableSystemMemoryInfo(substitutedJvmOptions, new DefaultSystemMemoryInfo()) ); substitutedJvmOptions.addAll(machineDependentHeap.determineHeapSettings(config, substitutedJvmOptions)); - final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions); + final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions, args.nodeSettings()); final List systemJvmOptions = SystemJvmOptions.systemJvmOptions(args.nodeSettings(), cliSysprops); final List apmOptions = APMJvmOptions.apmJvmOptions(args.nodeSettings(), args.secrets(), args.logsDir(), tmpDir); diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java index 0d4edfc384d46..0485697eb128c 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java @@ -8,7 +8,12 @@ package org.elasticsearch.server.cli; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase.SuppressFileSystems; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.node.NodeRoleSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase.WithoutSecurityManager; @@ -26,6 +31,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; @@ -35,6 +41,7 @@ @WithoutSecurityManager @SuppressFileSystems("*") +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106554") public class JvmErgonomicsTests extends ESTestCase { public void testExtractValidHeapSizeUsingXmx() throws Exception { @@ -106,7 +113,7 @@ public void testExtractSystemProperties() { } public void testG1GOptionsForSmallHeap() throws Exception { - List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseG1GC")); + List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseG1GC"), Settings.EMPTY); assertThat(jvmErgonomics, hasItem("-XX:G1HeapRegionSize=4m")); assertThat(jvmErgonomics, hasItem("-XX:InitiatingHeapOccupancyPercent=30")); assertThat(jvmErgonomics, hasItem("-XX:G1ReservePercent=15")); @@ -114,7 +121,8 @@ public void testG1GOptionsForSmallHeap() throws Exception { public void testG1GOptionsForSmallHeapWhenTuningSet() throws Exception { List jvmErgonomics = JvmErgonomics.choose( - Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseG1GC", "-XX:G1HeapRegionSize=4m", "-XX:InitiatingHeapOccupancyPercent=45") + Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseG1GC", "-XX:G1HeapRegionSize=4m", "-XX:InitiatingHeapOccupancyPercent=45"), + Settings.EMPTY ); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1HeapRegionSize=")))); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:InitiatingHeapOccupancyPercent=")))); @@ -122,14 +130,14 @@ public void testG1GOptionsForSmallHeapWhenTuningSet() throws Exception { } public void testG1GOptionsForLargeHeap() throws Exception { - List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms8g", "-Xmx8g", "-XX:+UseG1GC")); + List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms8g", "-Xmx8g", "-XX:+UseG1GC"), Settings.EMPTY); assertThat(jvmErgonomics, hasItem("-XX:InitiatingHeapOccupancyPercent=30")); assertThat(jvmErgonomics, hasItem("-XX:G1ReservePercent=25")); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1HeapRegionSize=")))); } public void testG1GOptionsForSmallHeapWhenOtherGCSet() throws Exception { - List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseParallelGC")); + List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseParallelGC"), Settings.EMPTY); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1HeapRegionSize=")))); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:InitiatingHeapOccupancyPercent=")))); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1ReservePercent=")))); @@ -137,7 +145,8 @@ public void testG1GOptionsForSmallHeapWhenOtherGCSet() throws Exception { public void testG1GOptionsForLargeHeapWhenTuningSet() throws Exception { List jvmErgonomics = JvmErgonomics.choose( - Arrays.asList("-Xms8g", "-Xmx8g", "-XX:+UseG1GC", "-XX:InitiatingHeapOccupancyPercent=60", "-XX:G1ReservePercent=10") + Arrays.asList("-Xms8g", "-Xmx8g", "-XX:+UseG1GC", "-XX:InitiatingHeapOccupancyPercent=60", "-XX:G1ReservePercent=10"), + Settings.EMPTY ); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:InitiatingHeapOccupancyPercent=")))); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1ReservePercent=")))); @@ -168,18 +177,79 @@ public void testMaxDirectMemorySizeChoice() throws Exception { ); final String heapSize = randomFrom(heapMaxDirectMemorySize.keySet().toArray(String[]::new)); assertThat( - JvmErgonomics.choose(Arrays.asList("-Xms" + heapSize, "-Xmx" + heapSize)), + JvmErgonomics.choose(Arrays.asList("-Xms" + heapSize, "-Xmx" + heapSize), Settings.EMPTY), hasItem("-XX:MaxDirectMemorySize=" + heapMaxDirectMemorySize.get(heapSize)) ); } public void testMaxDirectMemorySizeChoiceWhenSet() throws Exception { assertThat( - JvmErgonomics.choose(Arrays.asList("-Xms1g", "-Xmx1g", "-XX:MaxDirectMemorySize=1g")), + JvmErgonomics.choose(Arrays.asList("-Xms1g", "-Xmx1g", "-XX:MaxDirectMemorySize=1g"), Settings.EMPTY), everyItem(not(startsWith("-XX:MaxDirectMemorySize="))) ); } + public void testConcGCThreadsNotSetBasedOnProcessors() throws Exception { + Settings.Builder nodeSettingsBuilder = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName()); + if (randomBoolean()) { + nodeSettingsBuilder.put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), randomBoolean() ? between(1, 3) : between(6, 100)); + } + assertThat(JvmErgonomics.choose(List.of(), nodeSettingsBuilder.build()), everyItem(not(startsWith("-XX:ConcGCThreads=")))); + } + + public void testConcGCThreadsNotSetBasedOnRoles() throws Exception { + Settings.Builder nodeSettingsBuilder = Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), between(4, 5)); + if (randomBoolean()) { + nodeSettingsBuilder.put( + NodeRoleSettings.NODE_ROLES_SETTING.getKey(), + randomValueOtherThan(DiscoveryNodeRole.SEARCH_ROLE, () -> randomFrom(DiscoveryNodeRole.roles())).roleName() + ); + } + assertThat(JvmErgonomics.choose(List.of(), nodeSettingsBuilder.build()), everyItem(not(startsWith("-XX:ConcGCThreads=")))); + + } + + public void testConcGCThreadsSet() throws Exception { + Settings nodeSettings = Settings.builder() + .put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), between(4, 5)) + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName()) + .build(); + assertThat(JvmErgonomics.choose(List.of(), nodeSettings), hasItem("-XX:ConcGCThreads=2")); + } + + public void testMinimumNewSizeNotSetBasedOnHeap() throws Exception { + Settings nodeSettings = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName()) + .build(); + List chosen = JvmErgonomics.choose(List.of("-Xmx" + between(5, 31) + "g"), nodeSettings); + assertThat(chosen, everyItem(not(is("-XX:+UnlockExperimentalVMOptions")))); + assertThat(chosen, everyItem(not(startsWith("-XX:G1NewSizePercent=")))); + } + + public void testMinimumNewSizeNotSetBasedOnRoles() throws Exception { + Settings nodeSettings = randomBoolean() + ? Settings.EMPTY + : Settings.builder() + .put( + NodeRoleSettings.NODE_ROLES_SETTING.getKey(), + randomValueOtherThan(DiscoveryNodeRole.SEARCH_ROLE, () -> randomFrom(DiscoveryNodeRole.roles())).roleName() + ) + .build(); + List chosen = JvmErgonomics.choose(List.of("-Xmx" + between(1, 4) + "g"), nodeSettings); + assertThat(chosen, everyItem(not(is("-XX:+UnlockExperimentalVMOptions")))); + assertThat(chosen, everyItem(not(startsWith("-XX:G1NewSizePercent=")))); + } + + public void testMinimumNewSizeSet() throws Exception { + Settings nodeSettings = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName()) + .build(); + List chosen = JvmErgonomics.choose(List.of("-Xmx" + between(1, 4) + "g"), nodeSettings); + assertThat(chosen, hasItem("-XX:+UnlockExperimentalVMOptions")); + assertThat(chosen, hasItem("-XX:G1NewSizePercent=10")); + } + @SuppressWarnings("ConstantConditions") public void testMissingOptionHandling() { final Map g1GcOn = Map.of("UseG1GC", new JvmOption("true", "")); diff --git a/docs/changelog/100740.yaml b/docs/changelog/100740.yaml deleted file mode 100644 index c93fbf676ef81..0000000000000 --- a/docs/changelog/100740.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 100740 -summary: "ESQL: Referencing expressions that contain backticks requires <>." -area: ES|QL -type: enhancement -issues: - - 100312 diff --git a/docs/changelog/100813.yaml b/docs/changelog/100813.yaml deleted file mode 100644 index 476098b62c106..0000000000000 --- a/docs/changelog/100813.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 100813 -summary: Make `ParentTaskAssigningClient.getRemoteClusterClient` method also return - `ParentTaskAssigningClient` -area: Infra/Transport API -type: enhancement -issues: [] diff --git a/docs/changelog/101209.yaml b/docs/changelog/101209.yaml deleted file mode 100644 index debec27e61307..0000000000000 --- a/docs/changelog/101209.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101209 -summary: "Making `k` and `num_candidates` optional for knn search" -area: Vector Search -type: enhancement -issues: - - 97533 diff --git a/docs/changelog/101487.yaml b/docs/changelog/101487.yaml deleted file mode 100644 index b4531f7fd6f75..0000000000000 --- a/docs/changelog/101487.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101487 -summary: Wait for async searches to finish when shutting down -area: Infra/Node Lifecycle -type: enhancement -issues: [] diff --git a/docs/changelog/101640.yaml b/docs/changelog/101640.yaml deleted file mode 100644 index 6f61a3a3ffd84..0000000000000 --- a/docs/changelog/101640.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101640 -summary: Support cross clusters query in ESQL -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/101656.yaml b/docs/changelog/101656.yaml deleted file mode 100644 index 7cd4f30cae849..0000000000000 --- a/docs/changelog/101656.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101656 -summary: Adjust interception of requests for specific shard IDs -area: Authorization -type: bug -issues: [] diff --git a/docs/changelog/101717.yaml b/docs/changelog/101717.yaml deleted file mode 100644 index 7e97ef1049f88..0000000000000 --- a/docs/changelog/101717.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101717 -summary: Pause shard snapshots on graceful shutdown -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/101872.yaml b/docs/changelog/101872.yaml deleted file mode 100644 index 1c63c2d8b009a..0000000000000 --- a/docs/changelog/101872.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101872 -summary: "Add `require_data_stream` parameter to indexing requests to enforce indexing operations target a data stream" -area: Data streams -type: feature -issues: - - 97032 diff --git a/docs/changelog/102078.yaml b/docs/changelog/102078.yaml deleted file mode 100644 index d031aa0dbf6f7..0000000000000 --- a/docs/changelog/102078.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102078 -summary: Derive expected replica size from primary -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/102207.yaml b/docs/changelog/102207.yaml deleted file mode 100644 index 8b247828845f4..0000000000000 --- a/docs/changelog/102207.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102207 -summary: Fix disk computation when initializing unassigned shards in desired balance - computation -area: Allocation -type: bug -issues: [] diff --git a/docs/changelog/102371.yaml b/docs/changelog/102371.yaml deleted file mode 100644 index 5a698bc9d671a..0000000000000 --- a/docs/changelog/102371.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102371 -summary: Adding threadpool metrics -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/102428.yaml b/docs/changelog/102428.yaml deleted file mode 100644 index 275492fa6a888..0000000000000 --- a/docs/changelog/102428.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102428 -summary: "ESQL: Add option to drop null fields" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102435.yaml b/docs/changelog/102435.yaml deleted file mode 100644 index e8905b08f1adc..0000000000000 --- a/docs/changelog/102435.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102435 -summary: S3 first byte latency metric -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/102557.yaml b/docs/changelog/102557.yaml deleted file mode 100644 index dfca1763064d4..0000000000000 --- a/docs/changelog/102557.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102557 -summary: Metrics for search latencies -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/102559.yaml b/docs/changelog/102559.yaml deleted file mode 100644 index ad0867ab087b9..0000000000000 --- a/docs/changelog/102559.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102559 -summary: "Prune unnecessary information from TransportNodesStatsAction.NodeStatsRequest" -area: Network -type: enhancement -issues: [100878] diff --git a/docs/changelog/102584.yaml b/docs/changelog/102584.yaml deleted file mode 100644 index 44ff5dd9f7461..0000000000000 --- a/docs/changelog/102584.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102584 -summary: Expose some ML metrics via APM -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/102726.yaml b/docs/changelog/102726.yaml deleted file mode 100644 index bc5b311481123..0000000000000 --- a/docs/changelog/102726.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102726 -summary: Resolve Cluster API -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/102759.yaml b/docs/changelog/102759.yaml deleted file mode 100644 index 1c002ef2b678e..0000000000000 --- a/docs/changelog/102759.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102759 -summary: Close rather than stop `HttpServerTransport` on shutdown -area: Infra/Node Lifecycle -type: bug -issues: - - 102501 diff --git a/docs/changelog/102765.yaml b/docs/changelog/102765.yaml deleted file mode 100644 index eb73da2650542..0000000000000 --- a/docs/changelog/102765.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102765 -summary: "Add APM metrics to `HealthPeriodicLogger`" -area: Health -type: enhancement -issues: [] diff --git a/docs/changelog/102782.yaml b/docs/changelog/102782.yaml deleted file mode 100644 index ed0a004765859..0000000000000 --- a/docs/changelog/102782.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102782 -summary: Upgrade to Lucene 9.9.0 -area: Search -type: upgrade -issues: [] diff --git a/docs/changelog/102798.yaml b/docs/changelog/102798.yaml deleted file mode 100644 index 986ad99f96a19..0000000000000 --- a/docs/changelog/102798.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102798 -summary: Hot-reloadable remote cluster credentials -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/102824.yaml b/docs/changelog/102824.yaml deleted file mode 100644 index 21b39a4c3999d..0000000000000 --- a/docs/changelog/102824.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102824 -summary: Change detection aggregation improvements -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/102862.yaml b/docs/changelog/102862.yaml deleted file mode 100644 index bb453163009d5..0000000000000 --- a/docs/changelog/102862.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102862 -summary: Add optional pruning configuration (weighted terms scoring) to text expansion query -area: "Machine Learning" -type: enhancement -issues: [] diff --git a/docs/changelog/102879.yaml b/docs/changelog/102879.yaml deleted file mode 100644 index b35d36dd0a3a9..0000000000000 --- a/docs/changelog/102879.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102879 -summary: Fix disk computation when initializing new shards -area: Allocation -type: bug -issues: [] diff --git a/docs/changelog/102885.yaml b/docs/changelog/102885.yaml deleted file mode 100644 index 7a998c3eb1f66..0000000000000 --- a/docs/changelog/102885.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102885 -summary: Make field limit more predictable -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/103025.yaml b/docs/changelog/103025.yaml deleted file mode 100644 index 856a7c022d5dd..0000000000000 --- a/docs/changelog/103025.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103025 -summary: "Metrics: Allow `AsyncCounters` to switch providers" -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/103032.yaml b/docs/changelog/103032.yaml deleted file mode 100644 index 81d84fca0bdb0..0000000000000 --- a/docs/changelog/103032.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103032 -summary: "x-pack/plugin/apm-data: Map some APM fields as flattened and fix error.grouping_name script" -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/103033.yaml b/docs/changelog/103033.yaml deleted file mode 100644 index 30f8e182b9998..0000000000000 --- a/docs/changelog/103033.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103033 -summary: "X-pack/plugin/core: rename `double_metrics` template" -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/103035.yaml b/docs/changelog/103035.yaml deleted file mode 100644 index 5b1c9d6629767..0000000000000 --- a/docs/changelog/103035.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103035 -summary: "x-pack/plugin/core: add `match_mapping_type` to `ecs@mappings` dynamic templates" -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/103084.yaml b/docs/changelog/103084.yaml deleted file mode 100644 index fb5a718a086de..0000000000000 --- a/docs/changelog/103084.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103084 -summary: Return `matched_queries` in Percolator -area: Percolator -type: enhancement -issues: - - 10163 diff --git a/docs/changelog/103091.yaml b/docs/changelog/103091.yaml deleted file mode 100644 index ae4ac11933d4e..0000000000000 --- a/docs/changelog/103091.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103091 -summary: "Metrics: Handle null observations in observers" -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/103099.yaml b/docs/changelog/103099.yaml deleted file mode 100644 index c3fd3f9d7b8e4..0000000000000 --- a/docs/changelog/103099.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103099 -summary: "ESQL: Simpify IS NULL/IS NOT NULL evaluation" -area: ES|QL -type: enhancement -issues: - - 103097 diff --git a/docs/changelog/103130.yaml b/docs/changelog/103130.yaml deleted file mode 100644 index 3ef56ae84d123..0000000000000 --- a/docs/changelog/103130.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103130 -summary: Create a DSL health indicator as part of the health API -area: Health -type: feature -issues: [] diff --git a/docs/changelog/103160.yaml b/docs/changelog/103160.yaml deleted file mode 100644 index 7701aa2b4a8d4..0000000000000 --- a/docs/changelog/103160.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103160 -summary: Set thread name used by REST client -area: Java Low Level REST Client -type: enhancement -issues: [] diff --git a/docs/changelog/103171.yaml b/docs/changelog/103171.yaml deleted file mode 100644 index 95ad6a1ea77c2..0000000000000 --- a/docs/changelog/103171.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 103171 -summary: "Add `unmatch_mapping_type`, and support array of types" -area: Mapping -type: feature -issues: - - 102807 - - 102795 diff --git a/docs/changelog/103176.yaml b/docs/changelog/103176.yaml deleted file mode 100644 index a0f46c1462f62..0000000000000 --- a/docs/changelog/103176.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103176 -summary: Validate settings in `ReloadSecureSettings` API -area: Client -type: bug -issues: [] diff --git a/docs/changelog/103178.yaml b/docs/changelog/103178.yaml deleted file mode 100644 index 5da0221a68984..0000000000000 --- a/docs/changelog/103178.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103178 -summary: Expose API key authentication metrics -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/103190.yaml b/docs/changelog/103190.yaml deleted file mode 100644 index 5e6927d3eadd7..0000000000000 --- a/docs/changelog/103190.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103190 -summary: ILM/SLM history policies forcemerge in hot and dsl configuration -area: ILM+SLM -type: enhancement -issues: [] diff --git a/docs/changelog/103223.yaml b/docs/changelog/103223.yaml deleted file mode 100644 index c2f4c1b6a2cf4..0000000000000 --- a/docs/changelog/103223.yaml +++ /dev/null @@ -1,10 +0,0 @@ -pr: 103223 -summary: "[Synonyms] Mark Synonyms as GA" -area: "Search" -type: feature -issues: [] -highlight: - title: "GA Release of Synonyms API" - body: |- - Removes the beta label for the Synonyms API to make it GA. - notable: true diff --git a/docs/changelog/103232.yaml b/docs/changelog/103232.yaml deleted file mode 100644 index b955e7abb7683..0000000000000 --- a/docs/changelog/103232.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103232 -summary: "Remove leniency in msearch parsing" -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/103300.yaml b/docs/changelog/103300.yaml deleted file mode 100644 index a536a673b7827..0000000000000 --- a/docs/changelog/103300.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103300 -summary: Retry indefinitely for s3 indices blob read errors -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/103309.yaml b/docs/changelog/103309.yaml deleted file mode 100644 index 94b2a31127870..0000000000000 --- a/docs/changelog/103309.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103309 -summary: Introduce lazy rollover for mapping updates in data streams -area: Data streams -type: enhancement -issues: - - 89346 diff --git a/docs/changelog/103310.yaml b/docs/changelog/103310.yaml deleted file mode 100644 index a7a0746b6b8c4..0000000000000 --- a/docs/changelog/103310.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103310 -summary: Revert "Validate settings in `ReloadSecureSettings` API" -area: Security -type: bug -issues: [] diff --git a/docs/changelog/103316.yaml b/docs/changelog/103316.yaml deleted file mode 100644 index 47eddcc34d924..0000000000000 --- a/docs/changelog/103316.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103316 -summary: Review KEEP logic to prevent duplicate column names -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/103325.yaml b/docs/changelog/103325.yaml deleted file mode 100644 index 7de6c41986490..0000000000000 --- a/docs/changelog/103325.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103325 -summary: Added Duplicate Word Check Feature to Analysis Nori -area: Search -type: feature -issues: - - 103321 diff --git a/docs/changelog/103340.yaml b/docs/changelog/103340.yaml deleted file mode 100644 index 21280dbfc857d..0000000000000 --- a/docs/changelog/103340.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103340 -summary: Avoid humongous blocks -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103387.yaml b/docs/changelog/103387.yaml deleted file mode 100644 index 77239fb9a3778..0000000000000 --- a/docs/changelog/103387.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103387 -summary: Upgrade to Lucene 9.9.1 -area: Search -type: upgrade -issues: [] diff --git a/docs/changelog/103398.yaml b/docs/changelog/103398.yaml deleted file mode 100644 index 69452616ddc99..0000000000000 --- a/docs/changelog/103398.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103398 -summary: ES|QL Async Query API -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103399.yaml b/docs/changelog/103399.yaml deleted file mode 100644 index 440ac90b313f5..0000000000000 --- a/docs/changelog/103399.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103399 -summary: "add validation on _id field when upsert new doc" -area: Search -type: bug -issues: - - 102981 diff --git a/docs/changelog/103434.yaml b/docs/changelog/103434.yaml deleted file mode 100644 index 56af604fe08f7..0000000000000 --- a/docs/changelog/103434.yaml +++ /dev/null @@ -1,11 +0,0 @@ -pr: 103434 -summary: Lower the `look_ahead_time` index setting's max value from 7 days to 2 hours. -area: TSDB -type: breaking -issues: [] -breaking: - title: Lower the `look_ahead_time` index setting's max value - area: Index setting - details: "Lower the `look_ahead_time` index setting's max value from 7 days to 2 hours." - impact: "Any value between 2 hours and 7 days will be as a look ahead time of 2 hours is defined" - notable: false diff --git a/docs/changelog/103453.yaml b/docs/changelog/103453.yaml deleted file mode 100644 index 4b7dab77c8b23..0000000000000 --- a/docs/changelog/103453.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103453 -summary: Add expiration time to update api key api -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/103461.yaml b/docs/changelog/103461.yaml deleted file mode 100644 index 3a1bf30aa90c9..0000000000000 --- a/docs/changelog/103461.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103461 -summary: Add support for Well Known Binary (WKB) in the fields API for spatial fields -area: Geo -type: enhancement -issues: [] diff --git a/docs/changelog/103481.yaml b/docs/changelog/103481.yaml deleted file mode 100644 index f7c7c0b6eecc9..0000000000000 --- a/docs/changelog/103481.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103481 -summary: Redirect failed ingest node operations to a failure store when available -area: Data streams -type: feature -issues: [] diff --git a/docs/changelog/103510.yaml b/docs/changelog/103510.yaml deleted file mode 100644 index 50ec8efd5c440..0000000000000 --- a/docs/changelog/103510.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103510 -summary: "ES|QL: better management of exact subfields for TEXT fields" -area: ES|QL -type: bug -issues: - - 99899 diff --git a/docs/changelog/103520.yaml b/docs/changelog/103520.yaml deleted file mode 100644 index 0ef7124eb1ed2..0000000000000 --- a/docs/changelog/103520.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103520 -summary: Request indexing memory pressure in APM node metrics publisher -area: Distributed -type: bug -issues: [] diff --git a/docs/changelog/103535.yaml b/docs/changelog/103535.yaml deleted file mode 100644 index 80cf6e1ea709a..0000000000000 --- a/docs/changelog/103535.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103535 -summary: Add replay diagnostic dir to system jvm options -area: Infra/CLI -type: enhancement -issues: [] diff --git a/docs/changelog/103538.yaml b/docs/changelog/103538.yaml deleted file mode 100644 index 5aaed771d5ee4..0000000000000 --- a/docs/changelog/103538.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103538 -summary: "ESQL: Improve pushdown of certain filters" -area: ES|QL -type: bug -issues: - - 103536 diff --git a/docs/changelog/103555.yaml b/docs/changelog/103555.yaml deleted file mode 100644 index 2b0dc2692e252..0000000000000 --- a/docs/changelog/103555.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103555 -summary: "[Security Solution] Allow write permission for `kibana_system` role on endpoint\ - \ response index" -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/103592.yaml b/docs/changelog/103592.yaml deleted file mode 100644 index 21e06f1f5a10d..0000000000000 --- a/docs/changelog/103592.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103592 -summary: Remove deprecated Block APIs -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103610.yaml b/docs/changelog/103610.yaml deleted file mode 100644 index 1ed38cc2822bd..0000000000000 --- a/docs/changelog/103610.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103610 -summary: "ESQL: allow `null` in date math" -area: ES|QL -type: bug -issues: - - 103085 diff --git a/docs/changelog/103627.yaml b/docs/changelog/103627.yaml deleted file mode 100644 index 4b0d9e937542e..0000000000000 --- a/docs/changelog/103627.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103627 -summary: Add gradle tasks and code to modify and access mappings between version ids and release versions -area: Infra/Core -type: feature -issues: [] diff --git a/docs/changelog/103628.yaml b/docs/changelog/103628.yaml deleted file mode 100644 index 42259c7bcde46..0000000000000 --- a/docs/changelog/103628.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103628 -summary: Add ES|QL async delete API -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103632.yaml b/docs/changelog/103632.yaml deleted file mode 100644 index 1d83c6528f371..0000000000000 --- a/docs/changelog/103632.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103632 -summary: "ESQL: Check field exists before load from `_source`" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103633.yaml b/docs/changelog/103633.yaml deleted file mode 100644 index 9e36451caafd8..0000000000000 --- a/docs/changelog/103633.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103633 -summary: Update s3 latency metric to use micros -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/103643.yaml b/docs/changelog/103643.yaml deleted file mode 100644 index 966fb57acf566..0000000000000 --- a/docs/changelog/103643.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103643 -summary: "[Profiling] Use shard request cache consistently" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/103646.yaml b/docs/changelog/103646.yaml deleted file mode 100644 index b7a6fae025771..0000000000000 --- a/docs/changelog/103646.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103646 -summary: Add index mapping parameter for `counted_keyword` -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/103648.yaml b/docs/changelog/103648.yaml deleted file mode 100644 index d4fa489a6812c..0000000000000 --- a/docs/changelog/103648.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103648 -summary: Introduce experimental pass-through field type -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/103651.yaml b/docs/changelog/103651.yaml deleted file mode 100644 index 1106044b31fd2..0000000000000 --- a/docs/changelog/103651.yaml +++ /dev/null @@ -1,12 +0,0 @@ -pr: 103651 -summary: Flag in `_field_caps` to return only fields with values in index -area: Search -type: enhancement -issues: [] -highlight: - title: Flag in `_field_caps` to return only fields with values in index - body: |- - We added support for filtering the field capabilities API output by removing - fields that don't have a value. This can be done through the newly added - `include_empty_fields` parameter, which defaults to true. - notable: true diff --git a/docs/changelog/103656.yaml b/docs/changelog/103656.yaml deleted file mode 100644 index 24bd8814029ff..0000000000000 --- a/docs/changelog/103656.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103656 -summary: "ESQL: add =~ operator (case insensitive equality)" -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/103669.yaml b/docs/changelog/103669.yaml deleted file mode 100644 index 57361b9d842e4..0000000000000 --- a/docs/changelog/103669.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103669 -summary: Validate inference model ids -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/103673.yaml b/docs/changelog/103673.yaml deleted file mode 100644 index f786b57eba411..0000000000000 --- a/docs/changelog/103673.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103673 -summary: "ESQL: Infer not null for aggregated fields" -area: ES|QL -type: enhancement -issues: - - 102787 diff --git a/docs/changelog/103681.yaml b/docs/changelog/103681.yaml deleted file mode 100644 index bba73c8e3a7d4..0000000000000 --- a/docs/changelog/103681.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103681 -summary: "ESQL: Expand shallow copy with vecs" -area: ES|QL -type: enhancement -issues: - - 100528 diff --git a/docs/changelog/103682.yaml b/docs/changelog/103682.yaml deleted file mode 100644 index 109e77dd053a5..0000000000000 --- a/docs/changelog/103682.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103682 -summary: Use deduced mappings for determining proper fields' format even if `deduce_mappings==false` -area: Transform -type: bug -issues: - - 103115 diff --git a/docs/changelog/103698.yaml b/docs/changelog/103698.yaml deleted file mode 100644 index d94b70b54e505..0000000000000 --- a/docs/changelog/103698.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103698 -summary: Reading points from source to reduce precision loss -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103710.yaml b/docs/changelog/103710.yaml deleted file mode 100644 index 539b9f553ccc2..0000000000000 --- a/docs/changelog/103710.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103710 -summary: List hidden shard stores by default -area: Store -type: enhancement -issues: [] diff --git a/docs/changelog/103720.yaml b/docs/changelog/103720.yaml deleted file mode 100644 index e0ee879988fa7..0000000000000 --- a/docs/changelog/103720.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103720 -summary: Add "step":"ERROR" to ILM explain response for missing policy -area: ILM+SLM -type: enhancement -issues: - - 99030 diff --git a/docs/changelog/103727.yaml b/docs/changelog/103727.yaml deleted file mode 100644 index f943ee7906d58..0000000000000 --- a/docs/changelog/103727.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103727 -summary: "ESQL: Track the rest of `DocVector`" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103763.yaml b/docs/changelog/103763.yaml deleted file mode 100644 index e4d6556c77077..0000000000000 --- a/docs/changelog/103763.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103763 -summary: Ref count search response bytes -area: Search -type: enhancement -issues: - - 102657 diff --git a/docs/changelog/103783.yaml b/docs/changelog/103783.yaml deleted file mode 100644 index 47c32dd639310..0000000000000 --- a/docs/changelog/103783.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103783 -summary: "[Profiling] Mark all templates as managed" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/103807.yaml b/docs/changelog/103807.yaml deleted file mode 100644 index 3849edcc00ced..0000000000000 --- a/docs/changelog/103807.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103807 -summary: "ESQL: Add single value checks on LIKE/RLIKE pushdown" -area: ES|QL -type: bug -issues: - - 103806 diff --git a/docs/changelog/103821.yaml b/docs/changelog/103821.yaml deleted file mode 100644 index 3279059acbe3e..0000000000000 --- a/docs/changelog/103821.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103821 -summary: "ESQL: Delay finding field load infrastructure" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103846.yaml b/docs/changelog/103846.yaml deleted file mode 100644 index 0d34efabc0278..0000000000000 --- a/docs/changelog/103846.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103846 -summary: Support sampling in `counted_terms` aggregation -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/103898.yaml b/docs/changelog/103898.yaml deleted file mode 100644 index 73d89e49e8812..0000000000000 --- a/docs/changelog/103898.yaml +++ /dev/null @@ -1,14 +0,0 @@ -pr: 103898 -summary: Change `index.look_ahead_time` index setting's default value from 2 hours to 30 minutes. -area: TSDB -type: breaking -issues: [] -breaking: - title: Change `index.look_ahead_time` index setting's default value from 2 hours to 30 minutes. - area: Index setting - details: Lower the `index.look_ahead_time` index setting's max value from 2 hours to 30 minutes. - impact: > - Documents with @timestamp of 30 minutes or more in the future will be rejected. - Before documents with @timestamp of 2 hours or more in the future were rejected. - If the previous behaviour should be kept, then update the `index.look_ahead_time` setting to two hours before performing the upgrade. - notable: false diff --git a/docs/changelog/103903.yaml b/docs/changelog/103903.yaml deleted file mode 100644 index c2e5e710ac439..0000000000000 --- a/docs/changelog/103903.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103903 -summary: Account for reserved disk size -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/103920.yaml b/docs/changelog/103920.yaml deleted file mode 100644 index c4a0d3b06fc82..0000000000000 --- a/docs/changelog/103920.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103920 -summary: Use search to determine if cluster contains data -area: Application -type: bug -issues: [] diff --git a/docs/changelog/103922.yaml b/docs/changelog/103922.yaml deleted file mode 100644 index 4181a6e6b1e8a..0000000000000 --- a/docs/changelog/103922.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103922 -summary: Always test for spikes and dips as well as changes in the change point aggregation -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/103928.yaml b/docs/changelog/103928.yaml deleted file mode 100644 index a9e60ba33a686..0000000000000 --- a/docs/changelog/103928.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103928 -summary: "ESQL: `MV_FIRST` and `MV_LAST`" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103948.yaml b/docs/changelog/103948.yaml deleted file mode 100644 index 3247183fc97bb..0000000000000 --- a/docs/changelog/103948.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103948 -summary: '''elasticsearch-certutil cert'' now verifies the issuing chain of the generated - certificate' -area: TLS -type: enhancement -issues: [] diff --git a/docs/changelog/103949.yaml b/docs/changelog/103949.yaml deleted file mode 100644 index 96bd76d89ceae..0000000000000 --- a/docs/changelog/103949.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103949 -summary: "ESQL: Introduce mode setting for ENRICH" -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/103959.yaml b/docs/changelog/103959.yaml deleted file mode 100644 index 4c8b4413b95f8..0000000000000 --- a/docs/changelog/103959.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103959 -summary: Add `ApiKey` expiration time to audit log -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/103973.yaml b/docs/changelog/103973.yaml deleted file mode 100644 index f3bde76c7a559..0000000000000 --- a/docs/changelog/103973.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103973 -summary: Add stricter validation for api key expiration time -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/103996.yaml b/docs/changelog/103996.yaml deleted file mode 100644 index 699b93fff4f03..0000000000000 --- a/docs/changelog/103996.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103996 -summary: Ensure unique IDs between inference models and trained model deployments -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104006.yaml b/docs/changelog/104006.yaml deleted file mode 100644 index d840502cdefbe..0000000000000 --- a/docs/changelog/104006.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104006 -summary: Add support for more than one `inner_hit` when searching nested vectors -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/104030.yaml b/docs/changelog/104030.yaml deleted file mode 100644 index 8fe30e6258653..0000000000000 --- a/docs/changelog/104030.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104030 -summary: Add the possibility to transform WKT to WKB directly -area: Geo -type: enhancement -issues: [] diff --git a/docs/changelog/104033.yaml b/docs/changelog/104033.yaml deleted file mode 100644 index d3e167665732c..0000000000000 --- a/docs/changelog/104033.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104033 -summary: Add Query Users API -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/104043.yaml b/docs/changelog/104043.yaml deleted file mode 100644 index 86032e52fe208..0000000000000 --- a/docs/changelog/104043.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104043 -summary: Expose service account authentication metrics -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/104063.yaml b/docs/changelog/104063.yaml deleted file mode 100644 index 5f59022472c75..0000000000000 --- a/docs/changelog/104063.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104063 -summary: Add serverless scopes for Connector APIs -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104077.yaml b/docs/changelog/104077.yaml deleted file mode 100644 index 7550e7388a29d..0000000000000 --- a/docs/changelog/104077.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104077 -summary: Retry updates to model snapshot ID on job config -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/104091.yaml b/docs/changelog/104091.yaml deleted file mode 100644 index 42609e42471f8..0000000000000 --- a/docs/changelog/104091.yaml +++ /dev/null @@ -1,11 +0,0 @@ -pr: 104091 -summary: "[ESQL] Remove is_nan, is_finite, and `is_infinite`" -area: ES|QL -type: breaking -issues: [] -breaking: - title: "[ESQL] Remove is_nan, is_finite, and `is_infinite`" - area: REST API - details: Removes the functions `is_nan`, `is_finite`, and `is_infinite`. - impact: Attempting to use the above functions will now be a planner time error. These functions are no longer supported. - notable: false diff --git a/docs/changelog/104092.yaml b/docs/changelog/104092.yaml deleted file mode 100644 index b40637d51765e..0000000000000 --- a/docs/changelog/104092.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104092 -summary: Ingest geoip processor cache 'no results' from the database -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/104099.yaml b/docs/changelog/104099.yaml deleted file mode 100644 index b4164896a5923..0000000000000 --- a/docs/changelog/104099.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104099 -summary: Fix `require_alias` implicit true value on presence -area: Indices APIs -type: bug -issues: - - 103945 diff --git a/docs/changelog/104113.yaml b/docs/changelog/104113.yaml deleted file mode 100644 index 3068291606578..0000000000000 --- a/docs/changelog/104113.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104113 -summary: "X-pack/plugin/apm-data: fix `@custom` pipeline support" -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/104118.yaml b/docs/changelog/104118.yaml deleted file mode 100644 index f5afb199bc5eb..0000000000000 --- a/docs/changelog/104118.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104118 -summary: "ESQL: add `date_diff` function" -area: ES|QL -type: enhancement -issues: - - 101942 diff --git a/docs/changelog/104122.yaml b/docs/changelog/104122.yaml deleted file mode 100644 index a88d7499bd44e..0000000000000 --- a/docs/changelog/104122.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104122 -summary: Consider currently refreshing data in the memory usage of refresh -area: Engine -type: bug -issues: [] diff --git a/docs/changelog/104132.yaml b/docs/changelog/104132.yaml deleted file mode 100644 index 87fe94ddcfcea..0000000000000 --- a/docs/changelog/104132.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104132 -summary: Add support for the `simple_query_string` to the Query API Key API -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/104142.yaml b/docs/changelog/104142.yaml deleted file mode 100644 index 08bf9ef759090..0000000000000 --- a/docs/changelog/104142.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104142 -summary: Expose token authentication metrics -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/104150.yaml b/docs/changelog/104150.yaml deleted file mode 100644 index c910542dcf7f6..0000000000000 --- a/docs/changelog/104150.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104150 -summary: Correct profiled rewrite time for knn with a pre-filter -area: Search -type: bug -issues: [] diff --git a/docs/changelog/104155.yaml b/docs/changelog/104155.yaml deleted file mode 100644 index 04d6a9920310a..0000000000000 --- a/docs/changelog/104155.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104155 -summary: "Updated `missingTrainedModel` message to include: you may need to create\ - \ it" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104182.yaml b/docs/changelog/104182.yaml deleted file mode 100644 index b5cf10f941cc6..0000000000000 --- a/docs/changelog/104182.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104182 -summary: "Apm-data: fix `@custom` component templates" -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/104200.yaml b/docs/changelog/104200.yaml deleted file mode 100644 index bc2aa2507f0ec..0000000000000 --- a/docs/changelog/104200.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104200 -summary: Expose realms authentication metrics -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/104209.yaml b/docs/changelog/104209.yaml deleted file mode 100644 index fabf06fb99c2e..0000000000000 --- a/docs/changelog/104209.yaml +++ /dev/null @@ -1,13 +0,0 @@ -pr: 104209 -summary: '`DesiredNode:` deprecate `node_version` field and make it optional (unused) - in current parser' -area: Distributed -type: deprecation -issues: [] -deprecation: - title: '`DesiredNode:` deprecate `node_version` field and make it optional for the current version' - area: REST API - details: The desired_node API includes a `node_version` field to perform validation on the new node version required. - This kind of check is too broad, and it's better done by external logic, so it has been removed, making the - `node_version` field not necessary. The field will be removed in a later version. - impact: Users should update their usages of `desired_node` to not include the `node_version` field anymore. diff --git a/docs/changelog/104218.yaml b/docs/changelog/104218.yaml deleted file mode 100644 index b3051008dc47b..0000000000000 --- a/docs/changelog/104218.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104218 -summary: "Support ST_CENTROID over spatial points" -area: "ES|QL" -type: enhancement -issues: - - 104656 diff --git a/docs/changelog/104227.yaml b/docs/changelog/104227.yaml deleted file mode 100644 index 64dcf844f23f2..0000000000000 --- a/docs/changelog/104227.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104227 -summary: Avoid wrapping searchers multiple times in mget -area: CRUD -type: enhancement -issues: - - 85069 diff --git a/docs/changelog/104230.yaml b/docs/changelog/104230.yaml deleted file mode 100644 index 94184f64586f5..0000000000000 --- a/docs/changelog/104230.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104230 -summary: Undeploy elser when inference model deleted -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/104265.yaml b/docs/changelog/104265.yaml deleted file mode 100644 index 88c3d72ee81d0..0000000000000 --- a/docs/changelog/104265.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104265 -summary: Remove `hashCode` and `equals` from `OperationModeUpdateTask` -area: ILM+SLM -type: bug -issues: - - 100871 diff --git a/docs/changelog/104269.yaml b/docs/changelog/104269.yaml deleted file mode 100644 index 8d4b0fc5d5198..0000000000000 --- a/docs/changelog/104269.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104269 -summary: "ESQL: Support loading shapes from source into WKB blocks" -area: "ES|QL" -type: enhancement -issues: [] diff --git a/docs/changelog/104309.yaml b/docs/changelog/104309.yaml deleted file mode 100644 index 4467eb6722afc..0000000000000 --- a/docs/changelog/104309.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104309 -summary: "ESQL: Add TO_UPPER and TO_LOWER functions" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104320.yaml b/docs/changelog/104320.yaml deleted file mode 100644 index d2b0d09070fb9..0000000000000 --- a/docs/changelog/104320.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104320 -summary: Hot-reloadable LDAP bind password -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/104334.yaml b/docs/changelog/104334.yaml deleted file mode 100644 index ff242ee15141b..0000000000000 --- a/docs/changelog/104334.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104334 -summary: Automatically download the ELSER model when PUT in `_inference` -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104355.yaml b/docs/changelog/104355.yaml deleted file mode 100644 index 2a100faf3c35f..0000000000000 --- a/docs/changelog/104355.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104355 -summary: Prepare enrich plan to support multi clusters -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104356.yaml b/docs/changelog/104356.yaml deleted file mode 100644 index e0cb2311fbfc9..0000000000000 --- a/docs/changelog/104356.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104356 -summary: "[Profiling] Extract properties faster from source" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104363.yaml b/docs/changelog/104363.yaml deleted file mode 100644 index 9d97991ea7fab..0000000000000 --- a/docs/changelog/104363.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104363 -summary: Apply windowing and chunking to long documents -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104386.yaml b/docs/changelog/104386.yaml deleted file mode 100644 index 41b6a17424bbd..0000000000000 --- a/docs/changelog/104386.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104386 -summary: "X-pack/plugin/apm-data: add dynamic setting for enabling template registry" -area: Data streams -type: enhancement -issues: - - 104385 diff --git a/docs/changelog/104387.yaml b/docs/changelog/104387.yaml deleted file mode 100644 index f10084d8c4b32..0000000000000 --- a/docs/changelog/104387.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104387 -summary: "ESQL: Nested expressions inside stats command" -area: ES|QL -type: enhancement -issues: - - 99828 diff --git a/docs/changelog/104394.yaml b/docs/changelog/104394.yaml deleted file mode 100644 index 39fbfc0c4ea28..0000000000000 --- a/docs/changelog/104394.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104394 -summary: Endpoint to find positions of Grok pattern matches -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104396.yaml b/docs/changelog/104396.yaml deleted file mode 100644 index 586fdc1b22624..0000000000000 --- a/docs/changelog/104396.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104396 -summary: Report current master in `PeerFinder` -area: Cluster Coordination -type: enhancement -issues: [] diff --git a/docs/changelog/104406.yaml b/docs/changelog/104406.yaml deleted file mode 100644 index d26ef664abc07..0000000000000 --- a/docs/changelog/104406.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104406 -summary: Support patch transport version from 8.12 -area: Downsampling -type: enhancement -issues: [] diff --git a/docs/changelog/104407.yaml b/docs/changelog/104407.yaml deleted file mode 100644 index 1ce6b6f97f580..0000000000000 --- a/docs/changelog/104407.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104407 -summary: Set read timeout for fetching IMDSv2 token -area: Discovery-Plugins -type: enhancement -issues: - - 104244 diff --git a/docs/changelog/104408.yaml b/docs/changelog/104408.yaml deleted file mode 100644 index 7303740168ea5..0000000000000 --- a/docs/changelog/104408.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104408 -summary: Move `TransportTermsEnumAction` coordination off transport threads -area: Search -type: bug -issues: [] diff --git a/docs/changelog/104433.yaml b/docs/changelog/104433.yaml deleted file mode 100644 index b3b292923e290..0000000000000 --- a/docs/changelog/104433.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104433 -summary: Added 3 automatic restarts for `pytorch_inference` processes which stop unexpectedly -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104440.yaml b/docs/changelog/104440.yaml deleted file mode 100644 index 4242b7786f05f..0000000000000 --- a/docs/changelog/104440.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104440 -summary: Fix write index resolution when an alias is pointing to a TSDS -area: Data streams -type: bug -issues: - - 104189 diff --git a/docs/changelog/104460.yaml b/docs/changelog/104460.yaml deleted file mode 100644 index c92acdd5cb8ad..0000000000000 --- a/docs/changelog/104460.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104460 -summary: Dyamically adjust node metrics cache expire -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/104483.yaml b/docs/changelog/104483.yaml deleted file mode 100644 index 99917b4e8e017..0000000000000 --- a/docs/changelog/104483.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104483 -summary: Make `task_type` optional in `_inference` APIs -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104500.yaml b/docs/changelog/104500.yaml deleted file mode 100644 index 61c45c6dde3cb..0000000000000 --- a/docs/changelog/104500.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104500 -summary: Thread pool metrics -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/104505.yaml b/docs/changelog/104505.yaml deleted file mode 100644 index 4d0c482a88d85..0000000000000 --- a/docs/changelog/104505.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104505 -summary: "Revert \"x-pack/plugin/apm-data: download geoip DB on pipeline creation\"" -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/104529.yaml b/docs/changelog/104529.yaml deleted file mode 100644 index 5b223a0924d86..0000000000000 --- a/docs/changelog/104529.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104529 -summary: Add rest spec for Query User API -area: Client -type: enhancement -issues: [] diff --git a/docs/changelog/104553.yaml b/docs/changelog/104553.yaml deleted file mode 100644 index e1f5c974bd74e..0000000000000 --- a/docs/changelog/104553.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104553 -summary: "ESQL: Fix a bug loading unindexed text fields" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/104559.yaml b/docs/changelog/104559.yaml deleted file mode 100644 index d6d030783c4cc..0000000000000 --- a/docs/changelog/104559.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104559 -summary: Adding support for Cohere inference service -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104573.yaml b/docs/changelog/104573.yaml deleted file mode 100644 index a333bc3024772..0000000000000 --- a/docs/changelog/104573.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104573 -summary: Fix logger Strings.format calls -area: Distributed -type: bug -issues: [] diff --git a/docs/changelog/104574.yaml b/docs/changelog/104574.yaml deleted file mode 100644 index 68be002142fd9..0000000000000 --- a/docs/changelog/104574.yaml +++ /dev/null @@ -1,10 +0,0 @@ -pr: 104574 -summary: Deprecate `client.type` -area: Infra/Core -type: deprecation -issues: [] -deprecation: - title: Deprecate `client.type` - area: Cluster and node setting - details: The node setting `client.type` has been ignored since the node client was removed in 8.0. The setting is now deprecated and will be removed in a future release. - impact: Remove the `client.type` setting from `elasticsearch.yml` diff --git a/docs/changelog/104575.yaml b/docs/changelog/104575.yaml deleted file mode 100644 index ba17b705fca10..0000000000000 --- a/docs/changelog/104575.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104575 -summary: Introduce Alias.unwrap method -area: "Query Languages" -type: enhancement -issues: [] diff --git a/docs/changelog/104581.yaml b/docs/changelog/104581.yaml deleted file mode 100644 index 5f9b71acbfed7..0000000000000 --- a/docs/changelog/104581.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104581 -summary: Fix bogus assertion tripped by force-executed tasks -area: Infra/Core -type: bug -issues: - - 104580 diff --git a/docs/changelog/104594.yaml b/docs/changelog/104594.yaml deleted file mode 100644 index 7729eb028f68e..0000000000000 --- a/docs/changelog/104594.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104594 -summary: Support of `match` for the Query API Key API -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/104614.yaml b/docs/changelog/104614.yaml deleted file mode 100644 index 9b2c25a643825..0000000000000 --- a/docs/changelog/104614.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104614 -summary: Extend `repository_integrity` health indicator for unknown and invalid repos -area: Health -type: enhancement -issues: - - 103784 diff --git a/docs/changelog/104625.yaml b/docs/changelog/104625.yaml deleted file mode 100644 index 28951936107fb..0000000000000 --- a/docs/changelog/104625.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104625 -summary: "Add support for the `type` parameter, for sorting, to the Query API Key\ - \ API" -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/104636.yaml b/docs/changelog/104636.yaml deleted file mode 100644 index d74682f2eba18..0000000000000 --- a/docs/changelog/104636.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104636 -summary: Modifying request builders -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/104643.yaml b/docs/changelog/104643.yaml deleted file mode 100644 index 5a09cd081b376..0000000000000 --- a/docs/changelog/104643.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104643 -summary: "[Connectors API] Implement update service type action" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104648.yaml b/docs/changelog/104648.yaml deleted file mode 100644 index e8bb5fea392ac..0000000000000 --- a/docs/changelog/104648.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104648 -summary: "[Connector API] Implement update `index_name` action" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104654.yaml b/docs/changelog/104654.yaml deleted file mode 100644 index 1d007ad39a854..0000000000000 --- a/docs/changelog/104654.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104654 -summary: "[Connectors API] Implement update native action endpoint" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104665.yaml b/docs/changelog/104665.yaml deleted file mode 100644 index a7043cbdc9dda..0000000000000 --- a/docs/changelog/104665.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104665 -summary: Restrict usage of certain aggregations when in sort order execution is required -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/104666.yaml b/docs/changelog/104666.yaml deleted file mode 100644 index 5009052bd5b0a..0000000000000 --- a/docs/changelog/104666.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104666 -summary: Require the name field for `inner_hits` for collapse -area: Search -type: bug -issues: [] diff --git a/docs/changelog/104674.yaml b/docs/changelog/104674.yaml deleted file mode 100644 index 12951488f89ce..0000000000000 --- a/docs/changelog/104674.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104674 -summary: "[Profiling] Speed up processing of stacktraces" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104718.yaml b/docs/changelog/104718.yaml deleted file mode 100644 index ffe889bb28a3e..0000000000000 --- a/docs/changelog/104718.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104718 -summary: "ESQL: Fix replacement of nested expressions in aggs with multiple parameters" -area: ES|QL -type: bug -issues: - - 104706 diff --git a/docs/changelog/104721.yaml b/docs/changelog/104721.yaml deleted file mode 100644 index 3bfe8a21646c8..0000000000000 --- a/docs/changelog/104721.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104721 -summary: Add default rollover conditions to ILM explain API response -area: ILM+SLM -type: enhancement -issues: - - 103395 diff --git a/docs/changelog/104730.yaml b/docs/changelog/104730.yaml deleted file mode 100644 index fe5e2e157a004..0000000000000 --- a/docs/changelog/104730.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104730 -summary: "[Profiling] Support downsampling of generic events" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104750.yaml b/docs/changelog/104750.yaml deleted file mode 100644 index 948b19a5eaaa6..0000000000000 --- a/docs/changelog/104750.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104750 -summary: "[Connectors API] Implement connector status update action" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104753.yaml b/docs/changelog/104753.yaml deleted file mode 100644 index f95fd3da44084..0000000000000 --- a/docs/changelog/104753.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104753 -summary: Upgrade to Lucene 9.9.2 -area: Search -type: upgrade -issues: [] diff --git a/docs/changelog/104778.yaml b/docs/changelog/104778.yaml deleted file mode 100644 index 7dae338efc09c..0000000000000 --- a/docs/changelog/104778.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104778 -summary: Adding a `RequestBuilder` interface -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/104784.yaml b/docs/changelog/104784.yaml deleted file mode 100644 index 3d60222c2aa19..0000000000000 --- a/docs/changelog/104784.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104784 -summary: "Fix blob cache race, decay, time dependency" -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/104787.yaml b/docs/changelog/104787.yaml deleted file mode 100644 index 9c4ce688ce6ad..0000000000000 --- a/docs/changelog/104787.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104787 -summary: Add troubleshooting docs link to `PeerFinder` logs -area: Cluster Coordination -type: enhancement -issues: [] diff --git a/docs/changelog/104796.yaml b/docs/changelog/104796.yaml deleted file mode 100644 index a683f9ce22d49..0000000000000 --- a/docs/changelog/104796.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104796 -summary: "ESQL: Pre-allocate rows in TopNOperator" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104840.yaml b/docs/changelog/104840.yaml deleted file mode 100644 index 5b7d83a966dbc..0000000000000 --- a/docs/changelog/104840.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104840 -summary: Support enrich ANY mode in cross clusters query -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104859.yaml b/docs/changelog/104859.yaml deleted file mode 100644 index 55e5758e31ae2..0000000000000 --- a/docs/changelog/104859.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104859 -summary: ES - document observing with rejections -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/104872.yaml b/docs/changelog/104872.yaml deleted file mode 100644 index ad70946be02ae..0000000000000 --- a/docs/changelog/104872.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104872 -summary: Add new int8_flat and flat vector index types -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/104878.yaml b/docs/changelog/104878.yaml deleted file mode 100644 index 2ae6d5c0c1da3..0000000000000 --- a/docs/changelog/104878.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104878 -summary: "Transforms: Adding basic stats API param" -area: Transform -type: enhancement -issues: [] diff --git a/docs/changelog/104893.yaml b/docs/changelog/104893.yaml deleted file mode 100644 index e4685e160f8f8..0000000000000 --- a/docs/changelog/104893.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104893 -summary: Release resources in `BestBucketsDeferringCollector` earlier -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/104895.yaml b/docs/changelog/104895.yaml deleted file mode 100644 index 020dcff891f03..0000000000000 --- a/docs/changelog/104895.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104895 -summary: Aggs support for Query API Key Information API -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/104905.yaml b/docs/changelog/104905.yaml deleted file mode 100644 index 80e06dc3b0cf5..0000000000000 --- a/docs/changelog/104905.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104905 -summary: "Execute lazy rollover with an internal dedicated user #104732" -area: Data streams -type: bug -issues: - - 104732 diff --git a/docs/changelog/104907.yaml b/docs/changelog/104907.yaml new file mode 100644 index 0000000000000..0d8592ae29526 --- /dev/null +++ b/docs/changelog/104907.yaml @@ -0,0 +1,6 @@ +pr: 104907 +summary: Support ST_INTERSECTS between geometry column and other geometry or string +area: "ES|QL" +type: enhancement +issues: +- 104874 diff --git a/docs/changelog/104909.yaml b/docs/changelog/104909.yaml deleted file mode 100644 index 6d250c22a745a..0000000000000 --- a/docs/changelog/104909.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104909 -summary: "[Connectors API] Relax strict response parsing for get/list operations" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104911.yaml b/docs/changelog/104911.yaml deleted file mode 100644 index 17a335337e345..0000000000000 --- a/docs/changelog/104911.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 104911 -summary: "ES|QL: Improve type validation in aggs for UNSIGNED_LONG better support\ - \ for VERSION" -area: ES|QL -type: bug -issues: - - 102961 diff --git a/docs/changelog/104927.yaml b/docs/changelog/104927.yaml deleted file mode 100644 index e0e098ba10b7b..0000000000000 --- a/docs/changelog/104927.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104927 -summary: Adding `ActionRequestLazyBuilder` implementation of `RequestBuilder` -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/104936.yaml b/docs/changelog/104936.yaml deleted file mode 100644 index cfa170f550681..0000000000000 --- a/docs/changelog/104936.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104936 -summary: Support enrich coordinator mode -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104943.yaml b/docs/changelog/104943.yaml deleted file mode 100644 index 094ce66c4f994..0000000000000 --- a/docs/changelog/104943.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104943 -summary: Fix server cli to always pass through exit code -area: Infra/CLI -type: bug -issues: [] diff --git a/docs/changelog/104949.yaml b/docs/changelog/104949.yaml deleted file mode 100644 index c2682fc911f1d..0000000000000 --- a/docs/changelog/104949.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104949 -summary: Add text_embedding inference service with multilingual-e5 and custom eland models -area: Machine Learning -type: enhancement -issues: [ ] diff --git a/docs/changelog/104958.yaml b/docs/changelog/104958.yaml deleted file mode 100644 index 936342db03b45..0000000000000 --- a/docs/changelog/104958.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104958 -summary: "ESQL: Extend STATS command to support aggregate expressions" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104982.yaml b/docs/changelog/104982.yaml deleted file mode 100644 index 62194aa68b80c..0000000000000 --- a/docs/changelog/104982.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104982 -summary: "[Connectors API] Add new field `api_key_secret_id` to Connector" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104993.yaml b/docs/changelog/104993.yaml deleted file mode 100644 index df9875563d5a1..0000000000000 --- a/docs/changelog/104993.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104993 -summary: Support enrich remote mode -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104996.yaml b/docs/changelog/104996.yaml deleted file mode 100644 index b94711111adfe..0000000000000 --- a/docs/changelog/104996.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104996 -summary: "Enhancement: Metrics for Search Took Times using Action Listeners" -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/105015.yaml b/docs/changelog/105015.yaml deleted file mode 100644 index 94ffc2b0e58d5..0000000000000 --- a/docs/changelog/105015.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105015 -summary: Modify name of threadpool metric for rejected -area: Infra/Metrics -type: enhancement -issues: [] diff --git a/docs/changelog/105044.yaml b/docs/changelog/105044.yaml deleted file mode 100644 index 5a9a11f928f98..0000000000000 --- a/docs/changelog/105044.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105044 -summary: Expose `OperationPurpose` via `CustomQueryParameter` to s3 logs -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/105055.yaml b/docs/changelog/105055.yaml deleted file mode 100644 index 0db70a6b9e558..0000000000000 --- a/docs/changelog/105055.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105055 -summary: "Do not enable APM agent 'instrument', it's not required for manual tracing" -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/105062.yaml b/docs/changelog/105062.yaml deleted file mode 100644 index 928786f62381a..0000000000000 --- a/docs/changelog/105062.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105062 -summary: Nest pass-through objects within objects -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/105064.yaml b/docs/changelog/105064.yaml deleted file mode 100644 index 81c62b3148f1c..0000000000000 --- a/docs/changelog/105064.yaml +++ /dev/null @@ -1,17 +0,0 @@ -pr: 105064 -summary: "ES|QL: remove PROJECT keyword from the grammar" -area: ES|QL -type: breaking -issues: [] -breaking: - title: "ES|QL: remove PROJECT keyword from the grammar" - area: REST API - details: "Removes the PROJECT keyword (an alias for KEEP) from ES|QL grammar" - impact: "Before this change, users could use PROJECT as an alias for KEEP in ESQL queries,\ - \ (eg. 'FROM idx | PROJECT name, surname')\ - \ the parser replaced PROJECT with KEEP, emitted a warning:\ - \ 'PROJECT command is no longer supported, please use KEEP instead'\ - \ and the query was executed normally.\ - \ With this change, PROJECT command is no longer recognized by the query parser;\ - \ queries using PROJECT command now return a parsing exception." - notable: false diff --git a/docs/changelog/105081.yaml b/docs/changelog/105081.yaml deleted file mode 100644 index efa686bd7b4a4..0000000000000 --- a/docs/changelog/105081.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105081 -summary: For empty mappings use a `LocalRelation` -area: ES|QL -type: bug -issues: - - 104809 diff --git a/docs/changelog/105088.yaml b/docs/changelog/105088.yaml deleted file mode 100644 index 8b5d1fa7f9e02..0000000000000 --- a/docs/changelog/105088.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105088 -summary: "ESQL: Speed up reading many nulls" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105089.yaml b/docs/changelog/105089.yaml deleted file mode 100644 index 6f43c58af8a41..0000000000000 --- a/docs/changelog/105089.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105089 -summary: Return results in order -area: Transform -type: bug -issues: - - 104847 diff --git a/docs/changelog/105103.yaml b/docs/changelog/105103.yaml deleted file mode 100644 index 599d2e3666e4b..0000000000000 --- a/docs/changelog/105103.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105103 -summary: Do not record s3 http request time when it is not available -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/105105.yaml b/docs/changelog/105105.yaml deleted file mode 100644 index 848a9637d1388..0000000000000 --- a/docs/changelog/105105.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105105 -summary: Add s3 `HeadObject` request to request stats -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/105131.yaml b/docs/changelog/105131.yaml deleted file mode 100644 index 36993527da583..0000000000000 --- a/docs/changelog/105131.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105131 -summary: "[Connector API] Support filtering by name, index name in list action" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/105150.yaml b/docs/changelog/105150.yaml deleted file mode 100644 index d9fc3d337f952..0000000000000 --- a/docs/changelog/105150.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105150 -summary: Remove `SearchException` usages without a proper status code -area: Search -type: bug -issues: [] diff --git a/docs/changelog/105163.yaml b/docs/changelog/105163.yaml deleted file mode 100644 index f28bf4de14792..0000000000000 --- a/docs/changelog/105163.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105163 -summary: Add stable `ThreadPool` constructor to `LogstashInternalBridge` -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/105164.yaml b/docs/changelog/105164.yaml deleted file mode 100644 index 7affb0911bc6d..0000000000000 --- a/docs/changelog/105164.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105164 -summary: Remove duplicate checkpoint audits -area: Transform -type: bug -issues: - - 105106 diff --git a/docs/changelog/105178.yaml b/docs/changelog/105178.yaml deleted file mode 100644 index e8fc9cfd6898f..0000000000000 --- a/docs/changelog/105178.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105178 -summary: "[Connector API] Support filtering connectors by service type and a query" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/105180.yaml b/docs/changelog/105180.yaml deleted file mode 100644 index ac7ed20f151b7..0000000000000 --- a/docs/changelog/105180.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105180 -summary: Use new `ignore_dynamic_beyond_limit` in logs and metric data streams -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/105192.yaml b/docs/changelog/105192.yaml deleted file mode 100644 index b15d58ef40fe7..0000000000000 --- a/docs/changelog/105192.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105192 -summary: Allow transforms to use PIT with remote clusters again -area: Transform -type: enhancement -issues: - - 104518 diff --git a/docs/changelog/105196.yaml b/docs/changelog/105196.yaml deleted file mode 100644 index 8fe7b50cfa989..0000000000000 --- a/docs/changelog/105196.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105196 -summary: Adding a custom exception for problems with the graph of pipelines to be - applied to a document -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/105207.yaml b/docs/changelog/105207.yaml deleted file mode 100644 index 00d227248abfb..0000000000000 --- a/docs/changelog/105207.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105207 -summary: Introduce an `AggregatorReducer` to reduce the footprint of aggregations - in the coordinating node -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/105221.yaml b/docs/changelog/105221.yaml deleted file mode 100644 index 2ef64ef110d95..0000000000000 --- a/docs/changelog/105221.yaml +++ /dev/null @@ -1,14 +0,0 @@ -pr: 105221 -summary: "ESQL: Grammar - FROM METADATA no longer requires []" -area: ES|QL -type: breaking -issues: [] -breaking: - title: "ESQL: Grammar - FROM METADATA no longer requires []" - area: REST API - details: "Remove [ ] for METADATA option inside FROM command statements" - impact: "Previously to return metadata fields, one had to use square brackets:\ - \ (eg. 'FROM index [METADATA _index]').\ - \ This is no longer needed: the [ ] are dropped and do not have to be specified,\ - \ thus simplifying the command above to:'FROM index METADATA _index'." - notable: false diff --git a/docs/changelog/105223.yaml b/docs/changelog/105223.yaml deleted file mode 100644 index e2a95fcd6ba48..0000000000000 --- a/docs/changelog/105223.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105223 -summary: "x-pack/plugin/apm-data: Add a new field transaction.profiler_stack_trace_ids to traces-apm@mappings.yaml" -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/105232.yaml b/docs/changelog/105232.yaml deleted file mode 100644 index a2ad7ad9451e9..0000000000000 --- a/docs/changelog/105232.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105232 -summary: Execute SAML authentication on the generic threadpool -area: Authentication -type: bug -issues: - - 104962 diff --git a/docs/changelog/105249.yaml b/docs/changelog/105249.yaml deleted file mode 100644 index 979253e452008..0000000000000 --- a/docs/changelog/105249.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105249 -summary: "[Connector API] Support updating configuration values only" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/105259.yaml b/docs/changelog/105259.yaml deleted file mode 100644 index a360bc8bc1672..0000000000000 --- a/docs/changelog/105259.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105259 -summary: Lower G1 minimum full GC interval -area: Infra/Circuit Breakers -type: enhancement -issues: [] diff --git a/docs/changelog/105265.yaml b/docs/changelog/105265.yaml deleted file mode 100644 index 70231dbfabc52..0000000000000 --- a/docs/changelog/105265.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105265 -summary: Improving the performance of the ingest simulate verbose API -area: "Ingest Node" -type: enhancement -issues: [] diff --git a/docs/changelog/105269.yaml b/docs/changelog/105269.yaml deleted file mode 100644 index acf05b05ecfc4..0000000000000 --- a/docs/changelog/105269.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105269 -summary: Reserve bytes before serializing page -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105272.yaml b/docs/changelog/105272.yaml deleted file mode 100644 index 1032a17fc10f8..0000000000000 --- a/docs/changelog/105272.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105272 -summary: "Stop the periodic health logger when es is stopping" -area: Health -type: bug -issues: [] diff --git a/docs/changelog/105273.yaml b/docs/changelog/105273.yaml deleted file mode 100644 index 83db9eac2a14a..0000000000000 --- a/docs/changelog/105273.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105273 -summary: "x-pack/plugin/core: make automatic rollovers lazy" -area: Data streams -type: enhancement -issues: - - 104083 diff --git a/docs/changelog/105289.yaml b/docs/changelog/105289.yaml deleted file mode 100644 index a51778a93beb8..0000000000000 --- a/docs/changelog/105289.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105289 -summary: "[Connector API] Change required privileges to indices:data/read(write)" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/105299.yaml b/docs/changelog/105299.yaml deleted file mode 100644 index b1f9b3ac4a2aa..0000000000000 --- a/docs/changelog/105299.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105299 -summary: Conditionally send the dimensions field as part of the openai requests -area: Machine Learning -type: enhancement -issues: - - 105005 diff --git a/docs/changelog/105325.yaml b/docs/changelog/105325.yaml deleted file mode 100644 index ab3724efca30f..0000000000000 --- a/docs/changelog/105325.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105325 -summary: "ESQL: Fix Analyzer to not interpret escaped * as a pattern" -area: ES|QL -type: bug -issues: - - 104955 diff --git a/docs/changelog/105334.yaml b/docs/changelog/105334.yaml deleted file mode 100644 index 498fdf4113b3c..0000000000000 --- a/docs/changelog/105334.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105334 -summary: Upgrade ANTLR4 to 4.13.1 -area: Query Languages -type: upgrade -issues: - - 102953 diff --git a/docs/changelog/105346.yaml b/docs/changelog/105346.yaml deleted file mode 100644 index 7c6eab93f6c10..0000000000000 --- a/docs/changelog/105346.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105346 -summary: Allow GET inference models by user a with read only permission -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/105371.yaml b/docs/changelog/105371.yaml deleted file mode 100644 index 500c64b677a10..0000000000000 --- a/docs/changelog/105371.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105371 -summary: "ESQL: Add plan consistency verification after each optimizer" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105373.yaml b/docs/changelog/105373.yaml deleted file mode 100644 index f9d3c718f7ae3..0000000000000 --- a/docs/changelog/105373.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105373 -summary: "Fix parsing of flattened fields within subobjects: false" -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/105391.yaml b/docs/changelog/105391.yaml deleted file mode 100644 index 6b9b39c00a150..0000000000000 --- a/docs/changelog/105391.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105391 -summary: Catch all the potential exceptions in the ingest processor code -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/105403.yaml b/docs/changelog/105403.yaml deleted file mode 100644 index f855c0e8ed94f..0000000000000 --- a/docs/changelog/105403.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105403 -summary: "ESQL: make `cidr_match` foldable" -area: ES|QL -type: bug -issues: - - 105376 diff --git a/docs/changelog/105427.yaml b/docs/changelog/105427.yaml deleted file mode 100644 index e73853b9dce92..0000000000000 --- a/docs/changelog/105427.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105427 -summary: Adding `executedPipelines` to the `IngestDocument` copy constructor -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/105428.yaml b/docs/changelog/105428.yaml deleted file mode 100644 index 49a80150b4303..0000000000000 --- a/docs/changelog/105428.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105428 -summary: Limiting the number of nested pipelines that can be executed -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/105429.yaml b/docs/changelog/105429.yaml deleted file mode 100644 index 706375649b7ca..0000000000000 --- a/docs/changelog/105429.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105429 -summary: Changed system auditor to use levels -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/105440.yaml b/docs/changelog/105440.yaml deleted file mode 100644 index 8aacac3e641bf..0000000000000 --- a/docs/changelog/105440.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105440 -summary: Avoid false-positive matches on intermediate objects in `ecs@mappings` -area: Data streams -type: bug -issues: - - 102794 diff --git a/docs/changelog/105442.yaml b/docs/changelog/105442.yaml deleted file mode 100644 index b0af1b634d984..0000000000000 --- a/docs/changelog/105442.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105442 -summary: Handling exceptions on watcher reload -area: Watcher -type: bug -issues: - - 69842 diff --git a/docs/changelog/105454.yaml b/docs/changelog/105454.yaml new file mode 100644 index 0000000000000..fc814a343c46b --- /dev/null +++ b/docs/changelog/105454.yaml @@ -0,0 +1,5 @@ +pr: 105454 +summary: "ESQL: Sum of constants" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/105458.yaml b/docs/changelog/105458.yaml deleted file mode 100644 index 2bab415884975..0000000000000 --- a/docs/changelog/105458.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105458 -summary: The OpenAI model parameter should be in service settings not task settings. Move the configuration field to service settings -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/105468.yaml b/docs/changelog/105468.yaml deleted file mode 100644 index 0de36a71862a4..0000000000000 --- a/docs/changelog/105468.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105468 -summary: Include better output in profiling & `toString` for automaton based queries -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/105476.yaml b/docs/changelog/105476.yaml deleted file mode 100644 index 6520df78520e7..0000000000000 --- a/docs/changelog/105476.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105476 -summary: "ESQL: Fix bug in grammar that allowed spaces inside id pattern" -area: ES|QL -type: bug -issues: - - 105441 diff --git a/docs/changelog/105486.yaml b/docs/changelog/105486.yaml deleted file mode 100644 index befdaec2301c6..0000000000000 --- a/docs/changelog/105486.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105486 -summary: Fix use-after-free at event-loop shutdown -area: Network -type: bug -issues: [] diff --git a/docs/changelog/105499.yaml b/docs/changelog/105499.yaml deleted file mode 100644 index bfc297411efa7..0000000000000 --- a/docs/changelog/105499.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105499 -summary: Fix a bug where destination index aliases are not set up for an unattended transform -area: Transform -type: bug -issues: [] diff --git a/docs/changelog/105501.yaml b/docs/changelog/105501.yaml new file mode 100644 index 0000000000000..2e5e375764640 --- /dev/null +++ b/docs/changelog/105501.yaml @@ -0,0 +1,5 @@ +pr: 105501 +summary: Support non-keyword dimensions as routing fields in TSDB +area: TSDB +type: enhancement +issues: [] diff --git a/docs/changelog/105546.yaml b/docs/changelog/105546.yaml deleted file mode 100644 index 0b54e124f2495..0000000000000 --- a/docs/changelog/105546.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105546 -summary: '`GlobalOrdCardinalityAggregator` should use `HyperLogLogPlusPlus` instead - of `HyperLogLogPlusPlusSparse`' -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/105578.yaml b/docs/changelog/105578.yaml deleted file mode 100644 index 1ffa0128c1d0a..0000000000000 --- a/docs/changelog/105578.yaml +++ /dev/null @@ -1,13 +0,0 @@ -pr: 105578 -summary: Upgrade to Lucene 9.10.0 -area: Search -type: enhancement -issues: [] -highlight: - title: New Lucene 9.10 release - body: |- - - https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. - - https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNSW graph search - - https://github.com/apache/lucene/pull/13033: Range queries on numeric/date/ip fields now exit earlier on segments whose values don't intersect with the query range. This should especially help when there are other required clauses in the `bool` query and when the range filter is narrow, e.g. filtering on the last 5 minutes. - - https://github.com/apache/lucene/pull/13026: `bool` queries that mix `filter` and `should` clauses will now propagate minimum competitive scores through the `should` clauses. This should yield speedups when sorting by descending score. - notable: true diff --git a/docs/changelog/105588.yaml b/docs/changelog/105588.yaml deleted file mode 100644 index e43ff8cd75c60..0000000000000 --- a/docs/changelog/105588.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105588 -summary: '`URLRepository` should not block shutdown' -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/105593.yaml b/docs/changelog/105593.yaml deleted file mode 100644 index 4eef0d9404f42..0000000000000 --- a/docs/changelog/105593.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105593 -summary: "ESQL: push down \"[text_field] is not null\"" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105633.yaml b/docs/changelog/105633.yaml deleted file mode 100644 index b19ec67f4602a..0000000000000 --- a/docs/changelog/105633.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105633 -summary: "[Connector API] Bugfix: support list type in filtering advenced snippet\ - \ value" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/105650.yaml b/docs/changelog/105650.yaml deleted file mode 100644 index f43da5b315f4c..0000000000000 --- a/docs/changelog/105650.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105650 -summary: "ESQL: Fix wrong attribute shadowing in pushdown rules" -area: ES|QL -type: bug -issues: - - 105434 diff --git a/docs/changelog/105691.yaml b/docs/changelog/105691.yaml deleted file mode 100644 index 89797782b06ee..0000000000000 --- a/docs/changelog/105691.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105691 -summary: "ES|QL: Disable optimizations that rely on Expression.nullable()" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/105770.yaml b/docs/changelog/105770.yaml deleted file mode 100644 index ec8ae4f380e2f..0000000000000 --- a/docs/changelog/105770.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105770 -summary: Field-caps field has value lookup use map instead of looping array -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/105772.yaml b/docs/changelog/105772.yaml deleted file mode 100644 index 73680aa04e5ab..0000000000000 --- a/docs/changelog/105772.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105772 -summary: "[ILM] Delete step deletes data stream with only one index" -area: ILM+SLM -type: bug -issues: [] diff --git a/docs/changelog/105789.yaml b/docs/changelog/105789.yaml deleted file mode 100644 index 02a6936fa3294..0000000000000 --- a/docs/changelog/105789.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105789 -summary: Make Health API more resilient to multi-version clusters -area: Health -type: bug -issues: - - 90183 diff --git a/docs/changelog/105848.yaml b/docs/changelog/105848.yaml deleted file mode 100644 index 18291066177f6..0000000000000 --- a/docs/changelog/105848.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105848 -summary: '`ProjectOperator` should not retain references to released blocks' -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/105860.yaml b/docs/changelog/105860.yaml new file mode 100644 index 0000000000000..71f3544a02a1f --- /dev/null +++ b/docs/changelog/105860.yaml @@ -0,0 +1,5 @@ +pr: 105860 +summary: "ESQL: Re-enable logical dependency check" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/105941.yaml b/docs/changelog/105941.yaml deleted file mode 100644 index 8e2eea1657208..0000000000000 --- a/docs/changelog/105941.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105941 -summary: Field caps performance pt2 -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/105945.yaml b/docs/changelog/105945.yaml deleted file mode 100644 index ec76faf6ef76f..0000000000000 --- a/docs/changelog/105945.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105945 -summary: "[Connector API] Fix default ordering in `SyncJob` list endpoint" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/105987.yaml b/docs/changelog/105987.yaml deleted file mode 100644 index d09a6907c72bf..0000000000000 --- a/docs/changelog/105987.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105987 -summary: Fix `categorize_text` aggregation nested under empty buckets -area: Machine Learning -type: bug -issues: - - 105836 diff --git a/docs/changelog/105994.yaml b/docs/changelog/105994.yaml deleted file mode 100644 index ef9889d0a47af..0000000000000 --- a/docs/changelog/105994.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105994 -summary: Fix bug when nested knn pre-filter might match nested docs -area: Vector Search -type: bug -issues: [] diff --git a/docs/changelog/106020.yaml b/docs/changelog/106020.yaml deleted file mode 100644 index 094a43b430f89..0000000000000 --- a/docs/changelog/106020.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106020 -summary: Fix resetting a job if the original reset task no longer exists. -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/106057.yaml b/docs/changelog/106057.yaml deleted file mode 100644 index c07f658fbbf8a..0000000000000 --- a/docs/changelog/106057.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106057 -summary: Avoid computing `currentInferenceProcessors` on every cluster state -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/106060.yaml b/docs/changelog/106060.yaml deleted file mode 100644 index 2b6a47372ddd3..0000000000000 --- a/docs/changelog/106060.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106060 -summary: "[Connector API] Fix serialisation of script params in connector index service" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/106062.yaml b/docs/changelog/106062.yaml deleted file mode 100644 index f4ff3df4045e6..0000000000000 --- a/docs/changelog/106062.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106062 -summary: "During ML maintenance, reset jobs in the reset state without a corresponding\ - \ task" -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/106065.yaml b/docs/changelog/106065.yaml new file mode 100644 index 0000000000000..b87f4848fb574 --- /dev/null +++ b/docs/changelog/106065.yaml @@ -0,0 +1,6 @@ +pr: 106065 +summary: "ESQL: Values aggregation function" +area: ES|QL +type: feature +issues: + - 103600 diff --git a/docs/changelog/106105.yaml b/docs/changelog/106105.yaml deleted file mode 100644 index 09f80e9e71e6d..0000000000000 --- a/docs/changelog/106105.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106105 -summary: Respect --pass option in certutil csr mode -area: TLS -type: bug -issues: [] diff --git a/docs/changelog/106156.yaml b/docs/changelog/106156.yaml deleted file mode 100644 index 63232efe6e5fb..0000000000000 --- a/docs/changelog/106156.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106156 -summary: Disable parallel collection for terms aggregation with `min_doc_count` equals - to 0 -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/106243.yaml b/docs/changelog/106243.yaml new file mode 100644 index 0000000000000..6b02e3f1699d4 --- /dev/null +++ b/docs/changelog/106243.yaml @@ -0,0 +1,5 @@ +pr: 106243 +summary: "[Transform] Auto retry Transform start" +area: "Transform" +type: bug +issues: [] diff --git a/docs/changelog/106288.yaml b/docs/changelog/106288.yaml deleted file mode 100644 index 0f14e53c237a1..0000000000000 --- a/docs/changelog/106288.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106288 -summary: Small time series agg improvement -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/106306.yaml b/docs/changelog/106306.yaml new file mode 100644 index 0000000000000..571fe73c31a3e --- /dev/null +++ b/docs/changelog/106306.yaml @@ -0,0 +1,6 @@ +pr: 99961 +summary: "added fix for inconsistent text trimming in Unified Highlighter" +area: Highlighting +type: bug +issues: + - 101803 diff --git a/docs/changelog/106329.yaml b/docs/changelog/106329.yaml deleted file mode 100644 index 78e811e7987b6..0000000000000 --- a/docs/changelog/106329.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106329 -summary: Fix Search Applications bug where deleting an alias before deleting an application intermittently caused errors -area: Application -type: bug -issues: [] diff --git a/docs/changelog/106338.yaml b/docs/changelog/106338.yaml new file mode 100644 index 0000000000000..c05826d87a11f --- /dev/null +++ b/docs/changelog/106338.yaml @@ -0,0 +1,6 @@ +pr: 106338 +summary: Text fields are stored by default in TSDB indices +area: TSDB +type: enhancement +issues: + - 97039 diff --git a/docs/changelog/106351.yaml b/docs/changelog/106351.yaml deleted file mode 100644 index 45868acc3a284..0000000000000 --- a/docs/changelog/106351.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106351 -summary: "Fix error on sorting unsortable `geo_point` and `cartesian_point`" -area: ES|QL -type: bug -issues: - - 106007 diff --git a/docs/changelog/106381.yaml b/docs/changelog/106381.yaml new file mode 100644 index 0000000000000..500f6d5416822 --- /dev/null +++ b/docs/changelog/106381.yaml @@ -0,0 +1,5 @@ +pr: 106381 +summary: Dedupe terms in terms queries +area: Mapping +type: bug +issues: [] diff --git a/docs/changelog/106392.yaml b/docs/changelog/106392.yaml deleted file mode 100644 index ff1a0284ee5db..0000000000000 --- a/docs/changelog/106392.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106392 -summary: Resume driver when failing to fetch pages -area: ES|QL -type: bug -issues: - - 106262 diff --git a/docs/changelog/106396.yaml b/docs/changelog/106396.yaml new file mode 100644 index 0000000000000..7aa06566c75e7 --- /dev/null +++ b/docs/changelog/106396.yaml @@ -0,0 +1,6 @@ +pr: 106396 +summary: "Check preTags and postTags params for empty values" +area: Highlighting +type: bug +issues: + - 69009 diff --git a/docs/changelog/106413.yaml b/docs/changelog/106413.yaml new file mode 100644 index 0000000000000..8e13a839bc41e --- /dev/null +++ b/docs/changelog/106413.yaml @@ -0,0 +1,6 @@ +pr: 106413 +summary: Consolidate permissions checks +area: Transform +type: bug +issues: + - 105794 diff --git a/docs/changelog/106429.yaml b/docs/changelog/106429.yaml new file mode 100644 index 0000000000000..7ac524d13909b --- /dev/null +++ b/docs/changelog/106429.yaml @@ -0,0 +1,5 @@ +pr: 106429 +summary: "ESQL: Regex improvements" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/106435.yaml b/docs/changelog/106435.yaml new file mode 100644 index 0000000000000..5bfe0087a93d3 --- /dev/null +++ b/docs/changelog/106435.yaml @@ -0,0 +1,6 @@ +pr: 106435 +summary: "ENRICH support for TEXT fields" +area: ES|QL +type: enhancement +issues: + - 105384 diff --git a/docs/changelog/106472.yaml b/docs/changelog/106472.yaml new file mode 100644 index 0000000000000..120286c4cd8c7 --- /dev/null +++ b/docs/changelog/106472.yaml @@ -0,0 +1,6 @@ +pr: 106472 +summary: "Fix the position of spike, dip and distribution changes bucket when the\ + \ sibling aggregation includes empty buckets" +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/106511.yaml b/docs/changelog/106511.yaml new file mode 100644 index 0000000000000..bdef7f1aea225 --- /dev/null +++ b/docs/changelog/106511.yaml @@ -0,0 +1,5 @@ +pr: 106511 +summary: Wait indefintely for http connections on shutdown by default +area: Infra/Node Lifecycle +type: bug +issues: [] diff --git a/docs/changelog/106526.yaml b/docs/changelog/106526.yaml new file mode 100644 index 0000000000000..ac98454b5d8b4 --- /dev/null +++ b/docs/changelog/106526.yaml @@ -0,0 +1,5 @@ +pr: 106526 +summary: Enhance search tier GC options +area: Infra/CLI +type: enhancement +issues: [] diff --git a/docs/changelog/106563.yaml b/docs/changelog/106563.yaml new file mode 100644 index 0000000000000..79476f909a04c --- /dev/null +++ b/docs/changelog/106563.yaml @@ -0,0 +1,5 @@ +pr: 106563 +summary: Improve short-circuiting downsample execution +area: TSDB +type: enhancement +issues: [] diff --git a/docs/changelog/106564.yaml b/docs/changelog/106564.yaml new file mode 100644 index 0000000000000..a4e986c4b7d18 --- /dev/null +++ b/docs/changelog/106564.yaml @@ -0,0 +1,5 @@ +pr: 106564 +summary: Fix the background set of significant terms aggregations in case the data is in different shards than the foreground set +area: Search +type: bug +issues: [] diff --git a/docs/changelog/106575.yaml b/docs/changelog/106575.yaml new file mode 100644 index 0000000000000..fb5230a9edb3d --- /dev/null +++ b/docs/changelog/106575.yaml @@ -0,0 +1,5 @@ +pr: 106575 +summary: Unable to retrieve multiple stored field values +area: "Search" +type: bug +issues: [] diff --git a/docs/changelog/106602.yaml b/docs/changelog/106602.yaml new file mode 100644 index 0000000000000..972d7b5d163d3 --- /dev/null +++ b/docs/changelog/106602.yaml @@ -0,0 +1,5 @@ +pr: 106602 +summary: Raise loglevel of events related to transform lifecycle from DEBUG to INFO +area: Transform +type: enhancement +issues: [] diff --git a/docs/changelog/106654.yaml b/docs/changelog/106654.yaml new file mode 100644 index 0000000000000..3443b68482443 --- /dev/null +++ b/docs/changelog/106654.yaml @@ -0,0 +1,6 @@ +pr: 106654 +summary: "ES|QL: Fix usage of IN operator with TEXT fields" +area: ES|QL +type: bug +issues: + - 105379 diff --git a/docs/changelog/106655.yaml b/docs/changelog/106655.yaml new file mode 100644 index 0000000000000..98078595d5f0c --- /dev/null +++ b/docs/changelog/106655.yaml @@ -0,0 +1,5 @@ +pr: 106655 +summary: Fix Array out of bounds exception in the XLM Roberta tokenizer +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/106678.yaml b/docs/changelog/106678.yaml new file mode 100644 index 0000000000000..20bf12d6d4346 --- /dev/null +++ b/docs/changelog/106678.yaml @@ -0,0 +1,6 @@ +pr: 106678 +summary: Fix concurrency bug in `AbstractStringScriptFieldAutomatonQuery` +area: Search +type: bug +issues: + - 105911 diff --git a/docs/changelog/106685.yaml b/docs/changelog/106685.yaml new file mode 100644 index 0000000000000..ed4a16ba0666c --- /dev/null +++ b/docs/changelog/106685.yaml @@ -0,0 +1,5 @@ +pr: 106685 +summary: '`SharedBlobCacheService.maybeFetchRegion` should use `computeCacheFileRegionSize`' +area: Snapshot/Restore +type: bug +issues: [] diff --git a/docs/changelog/106691.yaml b/docs/changelog/106691.yaml new file mode 100644 index 0000000000000..cbae9796e38c7 --- /dev/null +++ b/docs/changelog/106691.yaml @@ -0,0 +1,6 @@ +pr: 106691 +summary: Fix range queries for float/half_float fields when bounds are out of type's + range +area: Search +type: bug +issues: [] diff --git a/docs/changelog/106708.yaml b/docs/changelog/106708.yaml new file mode 100644 index 0000000000000..b8fdd37e5f03f --- /dev/null +++ b/docs/changelog/106708.yaml @@ -0,0 +1,6 @@ +pr: 106708 +summary: Improve error message when rolling over DS alias +area: Data streams +type: bug +issues: + - 106137 diff --git a/docs/changelog/106731.yaml b/docs/changelog/106731.yaml new file mode 100644 index 0000000000000..0d8e16a8f9616 --- /dev/null +++ b/docs/changelog/106731.yaml @@ -0,0 +1,5 @@ +pr: 106731 +summary: Fix field caps and field level security +area: Security +type: bug +issues: [] diff --git a/docs/changelog/106745.yaml b/docs/changelog/106745.yaml new file mode 100644 index 0000000000000..a6cb035bd267a --- /dev/null +++ b/docs/changelog/106745.yaml @@ -0,0 +1,5 @@ +pr: 106745 +summary: Fix `AffixSetting.exists` to include secure settings +area: Infra/Core +type: bug +issues: [] diff --git a/docs/changelog/106767.yaml b/docs/changelog/106767.yaml new file mode 100644 index 0000000000000..8541e1b14f275 --- /dev/null +++ b/docs/changelog/106767.yaml @@ -0,0 +1,5 @@ +pr: 106767 +summary: Handle pass-through subfields with deep nesting +area: Mapping +type: bug +issues: [] diff --git a/docs/changelog/106799.yaml b/docs/changelog/106799.yaml new file mode 100644 index 0000000000000..c75cd5c15e44b --- /dev/null +++ b/docs/changelog/106799.yaml @@ -0,0 +1,5 @@ +pr: 106799 +summary: Add test to exercise reduction of terms aggregation order by key +area: Aggregations +type: bug +issues: [] diff --git a/docs/changelog/106808.yaml b/docs/changelog/106808.yaml new file mode 100644 index 0000000000000..287477fc302fd --- /dev/null +++ b/docs/changelog/106808.yaml @@ -0,0 +1,5 @@ +pr: 106808 +summary: Make OpenAI embeddings parser more flexible +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/106810.yaml b/docs/changelog/106810.yaml new file mode 100644 index 0000000000000..e93e5cf1e5361 --- /dev/null +++ b/docs/changelog/106810.yaml @@ -0,0 +1,5 @@ +pr: 106810 +summary: "ES|QL: Improve support for TEXT fields in functions" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/96235.yaml b/docs/changelog/96235.yaml deleted file mode 100644 index 83d1eaf74916b..0000000000000 --- a/docs/changelog/96235.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 96235 -summary: Add `index.mapping.total_fields.ignore_dynamic_beyond_limit` setting to ignore dynamic fields when field limit is reached -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/97072.yaml b/docs/changelog/97072.yaml new file mode 100644 index 0000000000000..686b30952b646 --- /dev/null +++ b/docs/changelog/97072.yaml @@ -0,0 +1,5 @@ +pr: 97072 +summary: Log when update AffixSetting using addAffixMapUpdateConsumer +area: Infra/Logging +type: bug +issues: [] diff --git a/docs/changelog/99142.yaml b/docs/changelog/99142.yaml deleted file mode 100644 index 885946cec909b..0000000000000 --- a/docs/changelog/99142.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 99142 -summary: Reuse number field mapper tests in other modules -area: Search -type: enhancement -issues: - - 92947 diff --git a/docs/changelog/99747.yaml b/docs/changelog/99747.yaml deleted file mode 100644 index e3e6edc585ca6..0000000000000 --- a/docs/changelog/99747.yaml +++ /dev/null @@ -1,19 +0,0 @@ -pr: 99747 -summary: Improve storage efficiency for non-metric fields in TSDB -area: TSDB -type: enhancement -issues: [] -highlight: - title: Improve storage efficiency for non-metric fields in TSDB - body: |- - Adds a new `doc_values` encoding for non-metric fields in TSDB that takes advantage of TSDB's index sorting. - While terms that are used in multiple documents (such as the host name) are already stored only once in the terms dictionary, - there are a lot of repetitions in the references to the terms dictionary that are stored in `doc_values` (ordinals). - In TSDB, documents (and therefore `doc_values`) are implicitly sorted by dimenstions and timestamp. - This means that for each time series, we are storing long consecutive runs of the same ordinal. - With this change, we are introducing an encoding that detects and efficiently stores runs of the same value (such as `1 1 1 2 2 2 …`), - and runs of cycling values (such as `1 2 1 2 …`). - In our testing, we have seen a reduction in storage size by about 13%. - The effectiveness of this encoding depends on how many non-metric fields, such as dimensions, are used. - The more non-metric fields, the more effective this improvement will be. - notable: true diff --git a/docs/changelog/99961.yaml b/docs/changelog/99961.yaml deleted file mode 100644 index 457f7801ce218..0000000000000 --- a/docs/changelog/99961.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 99961 -summary: "ESQL: Correct out-of-range filter pushdowns" -area: ES|QL -type: bug -issues: - - 99960 diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index ea5942ff71cc8..a89956721a481 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -10,20 +10,70 @@ ### ActionListener -`ActionListener`s are a means off injecting logic into lower layers of the code. They encapsulate a block of code that takes a response -value -- the `onResponse()` method --, and then that block of code (the `ActionListener`) is passed into a function that will eventually -execute the code (call `onResponse()`) when a response value is available. `ActionListener`s are used to pass code down to act on a result, -rather than lower layers returning a result back up to be acted upon by the caller. One of three things can happen to a listener: it can be -executed in the same thread — e.g. `ActionListener.run()` --; it can be passed off to another thread to be executed; or it can be added to -a list someplace, to eventually be executed by some service. `ActionListener`s also define `onFailure()` logic, in case an error is -encountered before a result can be formed. +Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code which +doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become available. +They support several useful control flows: + +- They can be completed immediately on the calling thread. +- They can be completed concurrently on a different thread. +- They can be stored in a data structure and completed later on when the system reaches a particular state. +- Most commonly, they can be passed on to other methods that themselves require a callback. +- They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run + before or after completion, before passing them on. + +`ActionListener` is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. `ActionListener` is +used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes it easier to compose +parts of the system together without needing to build adapters to convert back and forth between different kinds of callback. It also makes +it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely takes practice and is +certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with `ActionListener` instances +themselves, creating new instances out of existing ones and completing them in interesting ways. See for instance: + +- all the static methods on [ActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java) itself +- [`ThreadedActionListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java) for forking work elsewhere +- [`RefCountingListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java) for running work in parallel +- [`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) for constructing flexible workflows + +Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous code +without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too expensive to +waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means that most of our +code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes a callback. The +entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at +[`org.elasticsearch.rest.BaseRestHandler#prepareRequest`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java#L158-L171), +and transport APIs all start at +[`org.elasticsearch.action.support.TransportAction#doExecute`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/TransportAction.java#L65)) +and the whole system fundamentally works in terms of an event loop (a `io.netty.channel.EventLoop`) which processes network events via +callbacks. + +`ActionListener` is not an _ad-hoc_ invention. Formally speaking, it is our implementation of the general concept of a continuation in the +sense of [_continuation-passing style_](https://en.wikipedia.org/wiki/Continuation-passing_style) (CPS): an extra argument to a function +which defines how to continue the computation when the result is available. This is in contrast to _direct style_ which is the more usual +style of calling methods that return values directly back to the caller so they can continue executing as normal. There's essentially two +ways that computation can continue in Java (it can return a value or it can throw an exception) which is why `ActionListener` has both an +`onResponse()` and an `onFailure()` method. + +CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS also +enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in parallel, +perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be satisfied before +proceeding (e.g. +[`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) +amongst many others). Some languages have first-class support for continuations (e.g. the `async` and `await` primitives in C#) allowing the +programmer to write code in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all +the callbacks ourselves. + +Strictly speaking, CPS requires that a computation _only_ continues by calling the continuation. In Elasticsearch, this means that +asynchronous methods must have `void` return type and may not throw any exceptions. This is mostly the case in our code as written today, +and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In +particular, it's not uncommon to permit some methods to throw an exception, using things like +[`ActionListener#run`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java#L381-L390) +(or an equivalent `try ... catch ...` block) further up the stack to handle it. Some methods also take (and may complete) an +`ActionListener` parameter, but still return a value separately for other local synchronous work. This pattern is often used in the transport action layer with the use of the -[ChannelActionListener]([url](https://github.com/elastic/elasticsearch/blob/8.12/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java)) +[ChannelActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java) class, which wraps a `TransportChannel` produced by the transport layer. `TransportChannel` implementations can hold a reference to a Netty -channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, so -a call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, -barring caller timeouts. +channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, so a +call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, barring +caller timeouts. (TODO: add useful starter references and explanations for a range of Listener classes. Reference the Netty section.) diff --git a/docs/reference/connector/apis/connector-apis.asciidoc b/docs/reference/connector/apis/connector-apis.asciidoc index 2c43395a7fba1..6f03ded093479 100644 --- a/docs/reference/connector/apis/connector-apis.asciidoc +++ b/docs/reference/connector/apis/connector-apis.asciidoc @@ -33,6 +33,7 @@ Use the following APIs to manage connectors: * <> * <> * <> +* <> * <> * <> * <> @@ -77,6 +78,7 @@ include::list-connectors-api.asciidoc[] include::list-connector-sync-jobs-api.asciidoc[] include::set-connector-sync-job-error-api.asciidoc[] include::set-connector-sync-job-stats-api.asciidoc[] +include::update-connector-api-key-id-api.asciidoc[] include::update-connector-configuration-api.asciidoc[] include::update-connector-error-api.asciidoc[] include::update-connector-filtering-api.asciidoc[] diff --git a/docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc b/docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc new file mode 100644 index 0000000000000..e15f57a3e2b1e --- /dev/null +++ b/docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc @@ -0,0 +1,97 @@ +[[update-connector-api-key-id-api]] +=== Update connector API key ID API +++++ +Update connector API key id +++++ + +preview::[] + +Updates the `api_key_id` and/or `api_key_secret_id` field(s) of a connector, specifying: + +. The ID of the API key used for authorization +. The ID of the Connector Secret where the API key is stored + +The Connector Secret ID is only required for native connectors. +Connector clients do not use this field. +See the documentation for {enterprise-search-ref}/native-connectors.html#native-connectors-manage-API-keys-programmatically[managing native connector API keys programmatically^] for more details. + +[[update-connector-api-key-id-api-request]] +==== {api-request-title} + +`PUT _connector//_api_key_id` + +[[update-connector-api-key-id-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. +* The `api_key_id` parameter should reference an existing API key. +* The `api_key_secret_id` parameter should reference an existing Connector Secret containing an encoded API key value. + +[[update-connector-api-key-id-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[update-connector-api-key-id-api-request-body]] +==== {api-request-body-title} + +`api_key_id`:: +(Optional, string) ID of the API key that the connector will use to authorize access to required indices. Each connector can be associated with at most one API key. + +`api_key_secret_id`:: +(Optional, string) ID of the Connector Secret that contains the encoded API key. This should be the same API key as `api_key_id` references. This is only required for native connectors. + +[[update-connector-api-key-id-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector `api_key_id` and/or `api_key_secret_id` field(s) successfully updated. + +`400`:: +The `connector_id` was not provided or the request payload was malformed. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[update-connector-api-key-id-api-example]] +==== {api-examples-title} + +The following example updates the `api_key_id` and `api_key_secret_id` field(s) for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_api_key_id +{ + "api_key_id": "my-api-key-id", + "api_key_secret_id": "my-connector-secret-id" +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc index 3e81f0fda2ce7..04c40ebf9fa4e 100644 --- a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc @@ -55,32 +55,32 @@ Contains the set of rules that are actively used for sync jobs. The `active` obj The value to be used in conjunction with the rule for matching the contents of the document's field. ** `order` (Required, number) + The order in which the rules are applied. The first rule to match has its policy applied. - ** `created_at` (Optional, datetime) + + ** `created_at` (Required, datetime) + The timestamp when the rule was added. - ** `updated_at` (Optional, datetime) + + ** `updated_at` (Required, datetime) + The timestamp when the rule was last edited. - * `advanced_snippet` (Optional, object) + + * `advanced_snippet` (Required, object) + Used for {enterprise-search-ref}/sync-rules.html#sync-rules-advanced[advanced filtering] at query time, with the following sub-attributes: ** `value` (Required, object) + A JSON object passed directly to the connector for advanced filtering. - ** `created_at` (Optional, datetime) + + ** `created_at` (Required, datetime) + The timestamp when this JSON object was created. - ** `updated_at` (Optional, datetime) + + ** `updated_at` (Required, datetime) + The timestamp when this JSON object was last edited. - * `validation` (Optional, object) + + * `validation` (Required, object) + Provides validation status for the rules, including: ** `state` (Required, string) + Indicates the validation state: "edited", "valid", or "invalid". - ** `errors` (Optional, object) + + ** `errors` (Required, object) + Contains details about any validation errors, with sub-attributes: *** `ids` (Required, string) + The ID(s) of any rules deemed invalid. *** `messages` (Required, string) + Messages explaining what is invalid about the rules. -- `draft` (Optional, object) + +- `draft` (Required, object) + An object identical in structure to the `active` object, but used for drafting and editing filtering rules before they become active. diff --git a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc index f20a3393c191c..1bda7d8959bee 100644 --- a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc @@ -130,14 +130,18 @@ The response will look like the following: "name": "my-data-stream-1", "lifecycle": { "enabled": true, - "data_retention": "7d" + "data_retention": "7d", + "effective_retention": "7d", + "retention_determined_by": "data_stream_configuration" } }, { "name": "my-data-stream-2", "lifecycle": { "enabled": true, - "data_retention": "7d" + "data_retention": "7d", + "effective_retention": "7d", + "retention_determined_by": "data_stream_configuration" } } ] diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc index 5670faaade3ce..7be2b30b9b83c 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc @@ -74,7 +74,9 @@ The response will look like: "generation_time": "6.84s", <9> "lifecycle": { "enabled": true, - "data_retention": "30d" <10> + "data_retention": "30d", + "effective_retention": "30d" <10> + "retention_determined_by": "data_stream_configuration" } } } diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc index 6f1d81ab6ead2..ecfdc16884082 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc @@ -93,10 +93,12 @@ The result will look like this: { "data_streams": [ { - "name": "my-data-stream",<1> + "name": "my-data-stream", <1> "lifecycle": { - "enabled": true, <2> - "data_retention": "7d" <3> + "enabled": true, <2> + "data_retention": "7d", <3> + "effective_retention": "7d", <4> + "retention_determined_by": "data_stream_configuration" <5> } } ] @@ -104,8 +106,11 @@ The result will look like this: -------------------------------------------------- <1> The name of your data stream. <2> Shows if the data stream lifecycle is enabled for this data stream. -<3> The retention period of the data indexed in this data stream, this means that the data in this data stream will +<3> The desired retention period of the data indexed in this data stream, this means that if there are no other limitations +the data for this data stream will be preserved for at least 7 days. +<4> The effective retention, this means that the data in this data stream will be kept at least for 7 days. After that {es} can delete it at its own discretion. +<5> The configuration that determined the effective retention. If you want to see more information about how the data stream lifecycle is applied on individual backing indices use the <>: @@ -128,7 +133,9 @@ The result will look like this: "time_since_index_creation": "1.6m", <3> "lifecycle": { <4> "enabled": true, - "data_retention": "7d" + "data_retention": "7d", + "effective_retention": "7d", + "retention_determined_by": "data_stream_configuration" } } } diff --git a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc index 3125c82120d8d..65eaf472890f4 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc @@ -200,10 +200,10 @@ PUT _index_template/dsl-data-stream-template "template": { "settings": { "index.lifecycle.name": "pre-dsl-ilm-policy", - "index.lifecycle.prefer_ilm": false <1> + "index.lifecycle.prefer_ilm": false <1> }, - "lifecycle": { - "data_retention": "7d" <2> + "lifecycle": { <2> + "data_retention": "7d" <3> } } } @@ -215,6 +215,8 @@ PUT _index_template/dsl-data-stream-template precedence over data stream lifecycle. <2> We're configuring the data stream lifecycle so _new_ data streams will be managed by data stream lifecycle. +<3> The desired retention, meaning that this data stream should keep the data for at least 7 days, +if this retention is possible. We've now made sure that new data streams will be managed by data stream lifecycle. @@ -268,7 +270,9 @@ GET _data_stream/dsl-data-stream "template": "dsl-data-stream-template", "lifecycle": { "enabled": true, - "data_retention": "7d" + "data_retention": "7d", + "effective_retention": "7d", + "retention_determined_by": "data_stream_configuration" }, "ilm_policy": "pre-dsl-ilm-policy", "next_generation_managed_by": "Data stream lifecycle", <3> @@ -346,7 +350,9 @@ GET _data_stream/dsl-data-stream "template": "dsl-data-stream-template", "lifecycle": { "enabled": true, - "data_retention": "7d" + "data_retention": "7d", + "effective_retention": "7d", + "retention_determined_by": "data_stream_configuration" }, "ilm_policy": "pre-dsl-ilm-policy", "next_generation_managed_by": "Data stream lifecycle", diff --git a/docs/reference/esql/functions/README.md b/docs/reference/esql/functions/README.md new file mode 100644 index 0000000000000..0f0f3b6e3cbb8 --- /dev/null +++ b/docs/reference/esql/functions/README.md @@ -0,0 +1,21 @@ +The files in these subdirectories are generated by ESQL's test suite: +* `description` - description of each function scraped from `@FunctionInfo#description` +* `examples` - examples of each function scraped from `@FunctionInfo#examples` +* `parameters` - description of each function's parameters scraped from `@Param` +* `signature` - railroad diagram of the syntax to invoke each function +* `types` - a table of each combination of support type for each parameter. These are generated from tests. +* `layout` - a fully generated description for each function + +Most functions can use the generated docs generated in the `layout` directory. +If we need something more custom for the function we can make a file in this +directory that can `include::` any parts of the files above. + +To regenerate the files for a function run its tests using gradle: +``` +./gradlew :x-pack:plugin:esql:tests -Dtests.class='*SinTests' +``` + +To regenerate the files for all functions run all of ESQL's tests using gradle: +``` +./gradlew :x-pack:plugin:esql:tests +``` diff --git a/docs/reference/esql/functions/abs.asciidoc b/docs/reference/esql/functions/abs.asciidoc deleted file mode 100644 index 32b49bc287a83..0000000000000 --- a/docs/reference/esql/functions/abs.asciidoc +++ /dev/null @@ -1,41 +0,0 @@ -[discrete] -[[esql-abs]] -=== `ABS` - -*Syntax* - -[.text-center] -image::esql/functions/signature/abs.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the absolute value. - -*Supported types* - -include::types/abs.asciidoc[] - -*Examples* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=docsAbs] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=docsAbs-result] -|=== - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=docsAbsEmployees] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=docsAbsEmployees-result] -|=== \ No newline at end of file diff --git a/docs/reference/esql/functions/acos.asciidoc b/docs/reference/esql/functions/acos.asciidoc deleted file mode 100644 index e4d04bd169c78..0000000000000 --- a/docs/reference/esql/functions/acos.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-acos]] -=== `ACOS` - -*Syntax* - -[.text-center] -image::esql/functions/signature/acos.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an -angle, expressed in radians. - -*Supported types* - -include::types/acos.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=acos] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=acos-result] -|=== diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 91293728fd45c..373b1c140a896 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -18,6 +18,7 @@ The <> function supports these aggregate functions: * <> * <> * <> +* <> // end::agg_list[] include::avg.asciidoc[] @@ -30,3 +31,4 @@ include::min.asciidoc[] include::percentile.asciidoc[] include::st_centroid.asciidoc[] include::sum.asciidoc[] +include::values.asciidoc[] diff --git a/docs/reference/esql/functions/asin.asciidoc b/docs/reference/esql/functions/asin.asciidoc deleted file mode 100644 index a326852e9b016..0000000000000 --- a/docs/reference/esql/functions/asin.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-asin]] -=== `ASIN` - -*Syntax* - -[.text-center] -image::esql/functions/signature/asin.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input -numeric expression as an angle, expressed in radians. - -*Supported types* - -include::types/asin.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=asin] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=asin-result] -|=== diff --git a/docs/reference/esql/functions/atan.asciidoc b/docs/reference/esql/functions/atan.asciidoc deleted file mode 100644 index 604fc4d0bbecc..0000000000000 --- a/docs/reference/esql/functions/atan.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-atan]] -=== `ATAN` - -*Syntax* - -[.text-center] -image::esql/functions/signature/atan.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input -numeric expression as an angle, expressed in radians. - -*Supported types* - -include::types/atan.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=atan] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=atan-result] -|=== \ No newline at end of file diff --git a/docs/reference/esql/functions/atan2.asciidoc b/docs/reference/esql/functions/atan2.asciidoc deleted file mode 100644 index 1920b4b7ac1a0..0000000000000 --- a/docs/reference/esql/functions/atan2.asciidoc +++ /dev/null @@ -1,36 +0,0 @@ -[discrete] -[[esql-atan2]] -=== `ATAN2` - -*Syntax* - -[.text-center] -image::esql/functions/signature/atan2.svg[Embedded,opts=inline] - -*Parameters* - -`y`:: -Numeric expression. If `null`, the function returns `null`. - -`x`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the -origin to the point (x , y) in the Cartesian plane, expressed in radians. - -*Supported types* - -include::types/atan2.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=atan2] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=atan2-result] -|=== diff --git a/docs/reference/esql/functions/binary.asciidoc b/docs/reference/esql/functions/binary.asciidoc index 2d4daa6ad2eca..431efab1c924a 100644 --- a/docs/reference/esql/functions/binary.asciidoc +++ b/docs/reference/esql/functions/binary.asciidoc @@ -23,70 +23,52 @@ include::types/not_equals.asciidoc[] [.text-center] image::esql/functions/signature/less_than.svg[Embedded,opts=inline] -Supported types: - include::types/less_than.asciidoc[] ==== Less than or equal to `<=` [.text-center] image::esql/functions/signature/less_than_or_equal.svg[Embedded,opts=inline] -Supported types: - include::types/less_than_or_equal.asciidoc[] ==== Greater than `>` [.text-center] image::esql/functions/signature/greater_than.svg[Embedded,opts=inline] -Supported types: - include::types/greater_than.asciidoc[] ==== Greater than or equal to `>=` [.text-center] image::esql/functions/signature/greater_than_or_equal.svg[Embedded,opts=inline] -Supported types: - include::types/greater_than_or_equal.asciidoc[] ==== Add `+` [.text-center] image::esql/functions/signature/add.svg[Embedded,opts=inline] -Supported types: - include::types/add.asciidoc[] ==== Subtract `-` [.text-center] image::esql/functions/signature/sub.svg[Embedded,opts=inline] -Supported types: - include::types/sub.asciidoc[] ==== Multiply `*` [.text-center] image::esql/functions/signature/mul.svg[Embedded,opts=inline] -Supported types: - include::types/mul.asciidoc[] ==== Divide `/` [.text-center] image::esql/functions/signature/div.svg[Embedded,opts=inline] -Supported types: - include::types/div.asciidoc[] ==== Modulus `%` [.text-center] image::esql/functions/signature/mod.svg[Embedded,opts=inline] -Supported types: - include::types/mod.asciidoc[] diff --git a/docs/reference/esql/functions/ceil.asciidoc b/docs/reference/esql/functions/ceil.asciidoc deleted file mode 100644 index bc132e6bf47e6..0000000000000 --- a/docs/reference/esql/functions/ceil.asciidoc +++ /dev/null @@ -1,37 +0,0 @@ -[discrete] -[[esql-ceil]] -=== `CEIL` - -*Syntax* - -[.text-center] -image::esql/functions/signature/ceil.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Round a number up to the nearest integer. - -NOTE: This is a noop for `long` (including unsigned) and `integer`. - For `double` this picks the closest `double` value to the integer - similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. - -*Supported types* - -include::types/ceil.asciidoc[] - - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=ceil] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=ceil-result] -|=== diff --git a/docs/reference/esql/functions/coalesce.asciidoc b/docs/reference/esql/functions/coalesce.asciidoc index 1121a75209151..2d8c0f379c82e 100644 --- a/docs/reference/esql/functions/coalesce.asciidoc +++ b/docs/reference/esql/functions/coalesce.asciidoc @@ -8,24 +8,6 @@ ---- COALESCE(expression1 [, ..., expressionN]) ---- - -*Parameters* - -`expressionX`:: -Expression to evaluate. - -*Description* - -Returns the first of its arguments that is not null. If all arguments are null, -it returns `null`. - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/null.csv-spec[tag=coalesce] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/null.csv-spec[tag=coalesce-result] -|=== +include::parameters/coalesce.asciidoc[] +include::description/coalesce.asciidoc[] +include::examples/coalesce.asciidoc[] diff --git a/docs/reference/esql/functions/cos.asciidoc b/docs/reference/esql/functions/cos.asciidoc index a5a0251bbd70a..7fa1d973c86b6 100644 --- a/docs/reference/esql/functions/cos.asciidoc +++ b/docs/reference/esql/functions/cos.asciidoc @@ -17,8 +17,6 @@ Numeric expression. If `null`, the function returns `null`. Returns the {wikipedia}/Sine_and_cosine[cosine] of `n`. Input expected in radians. -*Supported types* - include::types/cos.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/cosh.asciidoc b/docs/reference/esql/functions/cosh.asciidoc index 5883bc4b9d0c4..252064b60e13f 100644 --- a/docs/reference/esql/functions/cosh.asciidoc +++ b/docs/reference/esql/functions/cosh.asciidoc @@ -16,8 +16,6 @@ Numeric expression. If `null`, the function returns `null`. Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine]. -*Supported types* - include::types/cosh.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/date_diff.asciidoc b/docs/reference/esql/functions/date_diff.asciidoc index fa51e6f906110..0d24da9069250 100644 --- a/docs/reference/esql/functions/date_diff.asciidoc +++ b/docs/reference/esql/functions/date_diff.asciidoc @@ -46,8 +46,6 @@ s|abbreviations | nanosecond | nanoseconds, ns |=== -*Supported types* - include::types/date_diff.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/description/abs.asciidoc b/docs/reference/esql/functions/description/abs.asciidoc new file mode 100644 index 0000000000000..1070c024d53a2 --- /dev/null +++ b/docs/reference/esql/functions/description/abs.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the absolute value. diff --git a/docs/reference/esql/functions/description/acos.asciidoc b/docs/reference/esql/functions/description/acos.asciidoc new file mode 100644 index 0000000000000..d31016b4dd565 --- /dev/null +++ b/docs/reference/esql/functions/description/acos.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians. diff --git a/docs/reference/esql/functions/description/asin.asciidoc b/docs/reference/esql/functions/description/asin.asciidoc new file mode 100644 index 0000000000000..e2cd7302243c1 --- /dev/null +++ b/docs/reference/esql/functions/description/asin.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians. diff --git a/docs/reference/esql/functions/description/atan.asciidoc b/docs/reference/esql/functions/description/atan.asciidoc new file mode 100644 index 0000000000000..650632bcdfd2e --- /dev/null +++ b/docs/reference/esql/functions/description/atan.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians. diff --git a/docs/reference/esql/functions/description/atan2.asciidoc b/docs/reference/esql/functions/description/atan2.asciidoc new file mode 100644 index 0000000000000..5d7bb4cdda63b --- /dev/null +++ b/docs/reference/esql/functions/description/atan2.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians. diff --git a/docs/reference/esql/functions/description/auto_bucket.asciidoc b/docs/reference/esql/functions/description/auto_bucket.asciidoc new file mode 100644 index 0000000000000..0c1d9d3ea1ffd --- /dev/null +++ b/docs/reference/esql/functions/description/auto_bucket.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. diff --git a/docs/reference/esql/functions/description/case.asciidoc b/docs/reference/esql/functions/description/case.asciidoc new file mode 100644 index 0000000000000..5c98a7a2620d0 --- /dev/null +++ b/docs/reference/esql/functions/description/case.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. diff --git a/docs/reference/esql/functions/description/ceil.asciidoc b/docs/reference/esql/functions/description/ceil.asciidoc new file mode 100644 index 0000000000000..b39a4c81df95c --- /dev/null +++ b/docs/reference/esql/functions/description/ceil.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Round a number up to the nearest integer. + +NOTE: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. diff --git a/docs/reference/esql/functions/description/coalesce.asciidoc b/docs/reference/esql/functions/description/coalesce.asciidoc new file mode 100644 index 0000000000000..e85b4e04eeaaa --- /dev/null +++ b/docs/reference/esql/functions/description/coalesce.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. diff --git a/docs/reference/esql/functions/description/concat.asciidoc b/docs/reference/esql/functions/description/concat.asciidoc new file mode 100644 index 0000000000000..a8c136c6336dd --- /dev/null +++ b/docs/reference/esql/functions/description/concat.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Concatenates two or more strings. diff --git a/docs/reference/esql/functions/description/cos.asciidoc b/docs/reference/esql/functions/description/cos.asciidoc new file mode 100644 index 0000000000000..e46d651b34c00 --- /dev/null +++ b/docs/reference/esql/functions/description/cos.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the trigonometric cosine of an angle diff --git a/docs/reference/esql/functions/description/cosh.asciidoc b/docs/reference/esql/functions/description/cosh.asciidoc new file mode 100644 index 0000000000000..deaf780addb93 --- /dev/null +++ b/docs/reference/esql/functions/description/cosh.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the hyperbolic cosine of a number diff --git a/docs/reference/esql/functions/description/date_diff.asciidoc b/docs/reference/esql/functions/description/date_diff.asciidoc new file mode 100644 index 0000000000000..d48c7d492cb68 --- /dev/null +++ b/docs/reference/esql/functions/description/date_diff.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument diff --git a/docs/reference/esql/functions/description/date_extract.asciidoc b/docs/reference/esql/functions/description/date_extract.asciidoc new file mode 100644 index 0000000000000..e1c68d34d9266 --- /dev/null +++ b/docs/reference/esql/functions/description/date_extract.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Extracts parts of a date, like year, month, day, hour. diff --git a/docs/reference/esql/functions/description/date_format.asciidoc b/docs/reference/esql/functions/description/date_format.asciidoc new file mode 100644 index 0000000000000..ef9873bdeffe6 --- /dev/null +++ b/docs/reference/esql/functions/description/date_format.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a string representation of a date, in the provided format. diff --git a/docs/reference/esql/functions/description/date_parse.asciidoc b/docs/reference/esql/functions/description/date_parse.asciidoc new file mode 100644 index 0000000000000..6360f9f3e55d7 --- /dev/null +++ b/docs/reference/esql/functions/description/date_parse.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Parses a string into a date value diff --git a/docs/reference/esql/functions/description/e.asciidoc b/docs/reference/esql/functions/description/e.asciidoc new file mode 100644 index 0000000000000..787de53c32ef6 --- /dev/null +++ b/docs/reference/esql/functions/description/e.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Euler’s number. diff --git a/docs/reference/esql/functions/description/ends_with.asciidoc b/docs/reference/esql/functions/description/ends_with.asciidoc new file mode 100644 index 0000000000000..8695a0467d683 --- /dev/null +++ b/docs/reference/esql/functions/description/ends_with.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a boolean that indicates whether a keyword string ends with another string diff --git a/docs/reference/esql/functions/description/floor.asciidoc b/docs/reference/esql/functions/description/floor.asciidoc new file mode 100644 index 0000000000000..139b8b57dafb9 --- /dev/null +++ b/docs/reference/esql/functions/description/floor.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Round a number down to the nearest integer. diff --git a/docs/reference/esql/functions/description/greatest.asciidoc b/docs/reference/esql/functions/description/greatest.asciidoc new file mode 100644 index 0000000000000..3c7cfd3bfb14c --- /dev/null +++ b/docs/reference/esql/functions/description/greatest.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the maximum value from many columns. diff --git a/docs/reference/esql/functions/description/least.asciidoc b/docs/reference/esql/functions/description/least.asciidoc new file mode 100644 index 0000000000000..2aeb1f85aa51a --- /dev/null +++ b/docs/reference/esql/functions/description/least.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the minimum value from many columns. diff --git a/docs/reference/esql/functions/description/left.asciidoc b/docs/reference/esql/functions/description/left.asciidoc new file mode 100644 index 0000000000000..bdd34d2d21285 --- /dev/null +++ b/docs/reference/esql/functions/description/left.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the substring that extracts 'length' chars from 'string' starting from the left. diff --git a/docs/reference/esql/functions/description/length.asciidoc b/docs/reference/esql/functions/description/length.asciidoc new file mode 100644 index 0000000000000..bf976e3d6e507 --- /dev/null +++ b/docs/reference/esql/functions/description/length.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the character length of a string. diff --git a/docs/reference/esql/functions/description/log.asciidoc b/docs/reference/esql/functions/description/log.asciidoc new file mode 100644 index 0000000000000..9e88e2fa90621 --- /dev/null +++ b/docs/reference/esql/functions/description/log.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the logarithm of a number to a base. diff --git a/docs/reference/esql/functions/description/log10.asciidoc b/docs/reference/esql/functions/description/log10.asciidoc new file mode 100644 index 0000000000000..fd5d9ce16ee5b --- /dev/null +++ b/docs/reference/esql/functions/description/log10.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the log base 10. diff --git a/docs/reference/esql/functions/description/ltrim.asciidoc b/docs/reference/esql/functions/description/ltrim.asciidoc new file mode 100644 index 0000000000000..95e3d316131fe --- /dev/null +++ b/docs/reference/esql/functions/description/ltrim.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Removes leading whitespaces from a string. diff --git a/docs/reference/esql/functions/description/mv_avg.asciidoc b/docs/reference/esql/functions/description/mv_avg.asciidoc new file mode 100644 index 0000000000000..e6ddd6d43502d --- /dev/null +++ b/docs/reference/esql/functions/description/mv_avg.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts a multivalued field into a single valued field containing the average of all of the values. diff --git a/docs/reference/esql/functions/description/mv_concat.asciidoc b/docs/reference/esql/functions/description/mv_concat.asciidoc new file mode 100644 index 0000000000000..8c3b24f858604 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_concat.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Reduce a multivalued string field to a single valued field by concatenating all values. diff --git a/docs/reference/esql/functions/description/mv_count.asciidoc b/docs/reference/esql/functions/description/mv_count.asciidoc new file mode 100644 index 0000000000000..7f311e6938818 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_count.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Reduce a multivalued field to a single valued field containing the count of values. diff --git a/docs/reference/esql/functions/description/mv_dedupe.asciidoc b/docs/reference/esql/functions/description/mv_dedupe.asciidoc new file mode 100644 index 0000000000000..0d8c49f1f77be --- /dev/null +++ b/docs/reference/esql/functions/description/mv_dedupe.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Remove duplicate values from a multivalued field. diff --git a/docs/reference/esql/functions/description/mv_first.asciidoc b/docs/reference/esql/functions/description/mv_first.asciidoc new file mode 100644 index 0000000000000..01901f19bf1bf --- /dev/null +++ b/docs/reference/esql/functions/description/mv_first.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Reduce a multivalued field to a single valued field containing the first value. diff --git a/docs/reference/esql/functions/description/mv_last.asciidoc b/docs/reference/esql/functions/description/mv_last.asciidoc new file mode 100644 index 0000000000000..55ad684a80cab --- /dev/null +++ b/docs/reference/esql/functions/description/mv_last.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Reduce a multivalued field to a single valued field containing the last value. diff --git a/docs/reference/esql/functions/description/mv_max.asciidoc b/docs/reference/esql/functions/description/mv_max.asciidoc new file mode 100644 index 0000000000000..b0a725d439698 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_max.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Reduce a multivalued field to a single valued field containing the maximum value. diff --git a/docs/reference/esql/functions/description/mv_median.asciidoc b/docs/reference/esql/functions/description/mv_median.asciidoc new file mode 100644 index 0000000000000..2167142d0c266 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_median.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts a multivalued field into a single valued field containing the median value. diff --git a/docs/reference/esql/functions/description/mv_min.asciidoc b/docs/reference/esql/functions/description/mv_min.asciidoc new file mode 100644 index 0000000000000..502fce5ce4024 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_min.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Reduce a multivalued field to a single valued field containing the minimum value. diff --git a/docs/reference/esql/functions/description/mv_slice.asciidoc b/docs/reference/esql/functions/description/mv_slice.asciidoc new file mode 100644 index 0000000000000..24d3183b6f40e --- /dev/null +++ b/docs/reference/esql/functions/description/mv_slice.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a subset of the multivalued field using the start and end index values. diff --git a/docs/reference/esql/functions/description/mv_sort.asciidoc b/docs/reference/esql/functions/description/mv_sort.asciidoc new file mode 100644 index 0000000000000..ad319a332ab27 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_sort.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Sorts a multivalued field in lexicographical order. diff --git a/docs/reference/esql/functions/description/mv_sum.asciidoc b/docs/reference/esql/functions/description/mv_sum.asciidoc new file mode 100644 index 0000000000000..e38ee29b68123 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_sum.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts a multivalued field into a single valued field containing the sum of all of the values. diff --git a/docs/reference/esql/functions/description/mv_zip.asciidoc b/docs/reference/esql/functions/description/mv_zip.asciidoc new file mode 100644 index 0000000000000..898cb4e557640 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_zip.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Combines the values from two multivalued fields with a delimiter that joins them together. diff --git a/docs/reference/esql/functions/description/pi.asciidoc b/docs/reference/esql/functions/description/pi.asciidoc new file mode 100644 index 0000000000000..c2b9b737126e1 --- /dev/null +++ b/docs/reference/esql/functions/description/pi.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +The ratio of a circle’s circumference to its diameter. diff --git a/docs/reference/esql/functions/description/pow.asciidoc b/docs/reference/esql/functions/description/pow.asciidoc new file mode 100644 index 0000000000000..fd05421eae005 --- /dev/null +++ b/docs/reference/esql/functions/description/pow.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the value of a base raised to the power of an exponent. diff --git a/docs/reference/esql/functions/description/replace.asciidoc b/docs/reference/esql/functions/description/replace.asciidoc new file mode 100644 index 0000000000000..e621526925870 --- /dev/null +++ b/docs/reference/esql/functions/description/replace.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +The function substitutes in the string any match of the regular expression with the replacement string. diff --git a/docs/reference/esql/functions/description/right.asciidoc b/docs/reference/esql/functions/description/right.asciidoc new file mode 100644 index 0000000000000..e67003f8376d8 --- /dev/null +++ b/docs/reference/esql/functions/description/right.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Return the substring that extracts length chars from the string starting from the right. diff --git a/docs/reference/esql/functions/description/round.asciidoc b/docs/reference/esql/functions/description/round.asciidoc new file mode 100644 index 0000000000000..3dfec1b30565d --- /dev/null +++ b/docs/reference/esql/functions/description/round.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Rounds a number to the closest number with the specified number of digits. diff --git a/docs/reference/esql/functions/description/rtrim.asciidoc b/docs/reference/esql/functions/description/rtrim.asciidoc new file mode 100644 index 0000000000000..7458f8dd44f9a --- /dev/null +++ b/docs/reference/esql/functions/description/rtrim.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Removes trailing whitespaces from a string. diff --git a/docs/reference/esql/functions/description/sin.asciidoc b/docs/reference/esql/functions/description/sin.asciidoc new file mode 100644 index 0000000000000..4a5f04732fccc --- /dev/null +++ b/docs/reference/esql/functions/description/sin.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the trigonometric sine of an angle diff --git a/docs/reference/esql/functions/description/sinh.asciidoc b/docs/reference/esql/functions/description/sinh.asciidoc new file mode 100644 index 0000000000000..a51b88c7d446e --- /dev/null +++ b/docs/reference/esql/functions/description/sinh.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the hyperbolic sine of a number diff --git a/docs/reference/esql/functions/description/split.asciidoc b/docs/reference/esql/functions/description/split.asciidoc new file mode 100644 index 0000000000000..b1b32c1d4de5e --- /dev/null +++ b/docs/reference/esql/functions/description/split.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Split a single valued string into multiple strings. diff --git a/docs/reference/esql/functions/description/sqrt.asciidoc b/docs/reference/esql/functions/description/sqrt.asciidoc new file mode 100644 index 0000000000000..0963e0041280b --- /dev/null +++ b/docs/reference/esql/functions/description/sqrt.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the square root of a number. diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc new file mode 100644 index 0000000000000..b736ba29a6c8b --- /dev/null +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns whether the two geometries or geometry columns intersect. diff --git a/docs/reference/esql/functions/description/st_x.asciidoc b/docs/reference/esql/functions/description/st_x.asciidoc new file mode 100644 index 0000000000000..beb077bea332c --- /dev/null +++ b/docs/reference/esql/functions/description/st_x.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Extracts the x-coordinate from a point geometry. diff --git a/docs/reference/esql/functions/description/st_y.asciidoc b/docs/reference/esql/functions/description/st_y.asciidoc new file mode 100644 index 0000000000000..19c371d2ef931 --- /dev/null +++ b/docs/reference/esql/functions/description/st_y.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Extracts the y-coordinate from a point geometry. diff --git a/docs/reference/esql/functions/description/starts_with.asciidoc b/docs/reference/esql/functions/description/starts_with.asciidoc new file mode 100644 index 0000000000000..f21cd724be6ef --- /dev/null +++ b/docs/reference/esql/functions/description/starts_with.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a boolean that indicates whether a keyword string starts with another string diff --git a/docs/reference/esql/functions/description/substring.asciidoc b/docs/reference/esql/functions/description/substring.asciidoc new file mode 100644 index 0000000000000..edb97b219bbe0 --- /dev/null +++ b/docs/reference/esql/functions/description/substring.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a substring of a string, specified by a start position and an optional length diff --git a/docs/reference/esql/functions/description/tan.asciidoc b/docs/reference/esql/functions/description/tan.asciidoc new file mode 100644 index 0000000000000..1f6a4f96f59f1 --- /dev/null +++ b/docs/reference/esql/functions/description/tan.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the trigonometric tangent of an angle diff --git a/docs/reference/esql/functions/description/tanh.asciidoc b/docs/reference/esql/functions/description/tanh.asciidoc new file mode 100644 index 0000000000000..277783f7f70fe --- /dev/null +++ b/docs/reference/esql/functions/description/tanh.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the hyperbolic tangent of a number diff --git a/docs/reference/esql/functions/description/tau.asciidoc b/docs/reference/esql/functions/description/tau.asciidoc new file mode 100644 index 0000000000000..bb720c2fa737c --- /dev/null +++ b/docs/reference/esql/functions/description/tau.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +The ratio of a circle’s circumference to its radius. diff --git a/docs/reference/esql/functions/description/to_boolean.asciidoc b/docs/reference/esql/functions/description/to_boolean.asciidoc new file mode 100644 index 0000000000000..88c9d1707b6b9 --- /dev/null +++ b/docs/reference/esql/functions/description/to_boolean.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input value to a boolean value. diff --git a/docs/reference/esql/functions/description/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/description/to_cartesianpoint.asciidoc new file mode 100644 index 0000000000000..26f69a3ba8b9c --- /dev/null +++ b/docs/reference/esql/functions/description/to_cartesianpoint.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input value to a point value. diff --git a/docs/reference/esql/functions/description/to_cartesianshape.asciidoc b/docs/reference/esql/functions/description/to_cartesianshape.asciidoc new file mode 100644 index 0000000000000..82d4fcb944093 --- /dev/null +++ b/docs/reference/esql/functions/description/to_cartesianshape.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input value to a shape value. diff --git a/docs/reference/esql/functions/description/to_datetime.asciidoc b/docs/reference/esql/functions/description/to_datetime.asciidoc new file mode 100644 index 0000000000000..0cdca76c6462a --- /dev/null +++ b/docs/reference/esql/functions/description/to_datetime.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input value to a date value. diff --git a/docs/reference/esql/functions/description/to_degrees.asciidoc b/docs/reference/esql/functions/description/to_degrees.asciidoc new file mode 100644 index 0000000000000..6e59a3e8d1893 --- /dev/null +++ b/docs/reference/esql/functions/description/to_degrees.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts a number in radians to degrees. diff --git a/docs/reference/esql/functions/description/to_double.asciidoc b/docs/reference/esql/functions/description/to_double.asciidoc new file mode 100644 index 0000000000000..b02142d80c61e --- /dev/null +++ b/docs/reference/esql/functions/description/to_double.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input value to a double value. diff --git a/docs/reference/esql/functions/description/to_geopoint.asciidoc b/docs/reference/esql/functions/description/to_geopoint.asciidoc new file mode 100644 index 0000000000000..de59c1b65bb89 --- /dev/null +++ b/docs/reference/esql/functions/description/to_geopoint.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input value to a geo_point value. diff --git a/docs/reference/esql/functions/description/to_geoshape.asciidoc b/docs/reference/esql/functions/description/to_geoshape.asciidoc new file mode 100644 index 0000000000000..17b3959f681c2 --- /dev/null +++ b/docs/reference/esql/functions/description/to_geoshape.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input value to a geo_shape value. diff --git a/docs/reference/esql/functions/description/to_integer.asciidoc b/docs/reference/esql/functions/description/to_integer.asciidoc new file mode 100644 index 0000000000000..8a5bfd2a57927 --- /dev/null +++ b/docs/reference/esql/functions/description/to_integer.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input value to an integer value. diff --git a/docs/reference/esql/functions/description/to_ip.asciidoc b/docs/reference/esql/functions/description/to_ip.asciidoc new file mode 100644 index 0000000000000..f15cb84bd2c4d --- /dev/null +++ b/docs/reference/esql/functions/description/to_ip.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input string to an IP value. diff --git a/docs/reference/esql/functions/description/to_long.asciidoc b/docs/reference/esql/functions/description/to_long.asciidoc new file mode 100644 index 0000000000000..5ec67005fad35 --- /dev/null +++ b/docs/reference/esql/functions/description/to_long.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input value to a long value. diff --git a/docs/reference/esql/functions/description/to_lower.asciidoc b/docs/reference/esql/functions/description/to_lower.asciidoc new file mode 100644 index 0000000000000..93f42d4201bc8 --- /dev/null +++ b/docs/reference/esql/functions/description/to_lower.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a new string representing the input string converted to lower case. diff --git a/docs/reference/esql/functions/description/to_radians.asciidoc b/docs/reference/esql/functions/description/to_radians.asciidoc new file mode 100644 index 0000000000000..961a418a751e2 --- /dev/null +++ b/docs/reference/esql/functions/description/to_radians.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts a number in degrees to radians. diff --git a/docs/reference/esql/functions/description/to_string.asciidoc b/docs/reference/esql/functions/description/to_string.asciidoc new file mode 100644 index 0000000000000..31557adf86013 --- /dev/null +++ b/docs/reference/esql/functions/description/to_string.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts a field into a string. diff --git a/docs/reference/esql/functions/description/to_unsigned_long.asciidoc b/docs/reference/esql/functions/description/to_unsigned_long.asciidoc new file mode 100644 index 0000000000000..f3087d5e576d1 --- /dev/null +++ b/docs/reference/esql/functions/description/to_unsigned_long.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input value to an unsigned long value. diff --git a/docs/reference/esql/functions/description/to_upper.asciidoc b/docs/reference/esql/functions/description/to_upper.asciidoc new file mode 100644 index 0000000000000..067f645ede6f6 --- /dev/null +++ b/docs/reference/esql/functions/description/to_upper.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a new string representing the input string converted to upper case. diff --git a/docs/reference/esql/functions/description/to_version.asciidoc b/docs/reference/esql/functions/description/to_version.asciidoc new file mode 100644 index 0000000000000..78bacc4073e13 --- /dev/null +++ b/docs/reference/esql/functions/description/to_version.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Converts an input string to a version value. diff --git a/docs/reference/esql/functions/description/trim.asciidoc b/docs/reference/esql/functions/description/trim.asciidoc new file mode 100644 index 0000000000000..888189746bf20 --- /dev/null +++ b/docs/reference/esql/functions/description/trim.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Removes leading and trailing whitespaces from a string. diff --git a/docs/reference/esql/functions/ends_with.asciidoc b/docs/reference/esql/functions/ends_with.asciidoc index 49477996ada19..23ad8df65b8fd 100644 --- a/docs/reference/esql/functions/ends_with.asciidoc +++ b/docs/reference/esql/functions/ends_with.asciidoc @@ -20,8 +20,6 @@ String expression. If `null`, the function returns `null`. Returns a boolean that indicates whether a keyword string ends with another string. -*Supported types* - include::types/ends_with.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/examples/abs.asciidoc b/docs/reference/esql/functions/examples/abs.asciidoc new file mode 100644 index 0000000000000..744ac944719b9 --- /dev/null +++ b/docs/reference/esql/functions/examples/abs.asciidoc @@ -0,0 +1,21 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=abs] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=abs-result] +|=== +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=abs-employees] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=abs-employees-result] +|=== + diff --git a/docs/reference/esql/functions/examples/acos.asciidoc b/docs/reference/esql/functions/examples/acos.asciidoc new file mode 100644 index 0000000000000..0b3fe6219ab43 --- /dev/null +++ b/docs/reference/esql/functions/examples/acos.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=acos] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=acos-result] +|=== + diff --git a/docs/reference/esql/functions/examples/asin.asciidoc b/docs/reference/esql/functions/examples/asin.asciidoc new file mode 100644 index 0000000000000..16a717172e45d --- /dev/null +++ b/docs/reference/esql/functions/examples/asin.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=asin] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=asin-result] +|=== + diff --git a/docs/reference/esql/functions/examples/atan.asciidoc b/docs/reference/esql/functions/examples/atan.asciidoc new file mode 100644 index 0000000000000..e98a8195a71b8 --- /dev/null +++ b/docs/reference/esql/functions/examples/atan.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=atan] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=atan-result] +|=== + diff --git a/docs/reference/esql/functions/examples/atan2.asciidoc b/docs/reference/esql/functions/examples/atan2.asciidoc new file mode 100644 index 0000000000000..de803ac3e5859 --- /dev/null +++ b/docs/reference/esql/functions/examples/atan2.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=atan2] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=atan2-result] +|=== + diff --git a/docs/reference/esql/functions/examples/ceil.asciidoc b/docs/reference/esql/functions/examples/ceil.asciidoc new file mode 100644 index 0000000000000..7404eecbbfe0f --- /dev/null +++ b/docs/reference/esql/functions/examples/ceil.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=ceil] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=ceil-result] +|=== + diff --git a/docs/reference/esql/functions/examples/coalesce.asciidoc b/docs/reference/esql/functions/examples/coalesce.asciidoc new file mode 100644 index 0000000000000..7ec2c747837aa --- /dev/null +++ b/docs/reference/esql/functions/examples/coalesce.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/null.csv-spec[tag=coalesce] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/null.csv-spec[tag=coalesce-result] +|=== + diff --git a/docs/reference/esql/functions/examples/left.asciidoc b/docs/reference/esql/functions/examples/left.asciidoc new file mode 100644 index 0000000000000..1189a5c28047d --- /dev/null +++ b/docs/reference/esql/functions/examples/left.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=left] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=left-result] +|=== + diff --git a/docs/reference/esql/functions/floor.asciidoc b/docs/reference/esql/functions/floor.asciidoc index 0730a87e595fd..69d8657d008b2 100644 --- a/docs/reference/esql/functions/floor.asciidoc +++ b/docs/reference/esql/functions/floor.asciidoc @@ -17,11 +17,9 @@ Numeric expression. If `null`, the function returns `null`. Rounds a number down to the nearest integer. NOTE: This is a noop for `long` (including unsigned) and `integer`. - For `double` this picks the closest `double` value to the integer + For `double` this picks the closest `double` value to the integer similar to {javadoc}/java.base/java/lang/Math.html#floor(double)[Math.floor]. -*Supported types* - include::types/floor.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/greatest.asciidoc b/docs/reference/esql/functions/greatest.asciidoc index b9fc114d39ec6..003f1f46e6db5 100644 --- a/docs/reference/esql/functions/greatest.asciidoc +++ b/docs/reference/esql/functions/greatest.asciidoc @@ -24,8 +24,6 @@ NOTE: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. -*Supported types* - include::types/greatest.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/layout/abs.asciidoc b/docs/reference/esql/functions/layout/abs.asciidoc new file mode 100644 index 0000000000000..cc3c1d5634c7e --- /dev/null +++ b/docs/reference/esql/functions/layout/abs.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-abs]] +=== `ABS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/abs.svg[Embedded,opts=inline] + +include::../parameters/abs.asciidoc[] +include::../description/abs.asciidoc[] +include::../types/abs.asciidoc[] +include::../examples/abs.asciidoc[] diff --git a/docs/reference/esql/functions/layout/acos.asciidoc b/docs/reference/esql/functions/layout/acos.asciidoc new file mode 100644 index 0000000000000..8bc834460f01a --- /dev/null +++ b/docs/reference/esql/functions/layout/acos.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-acos]] +=== `ACOS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/acos.svg[Embedded,opts=inline] + +include::../parameters/acos.asciidoc[] +include::../description/acos.asciidoc[] +include::../types/acos.asciidoc[] +include::../examples/acos.asciidoc[] diff --git a/docs/reference/esql/functions/layout/asin.asciidoc b/docs/reference/esql/functions/layout/asin.asciidoc new file mode 100644 index 0000000000000..a092f3c13d72c --- /dev/null +++ b/docs/reference/esql/functions/layout/asin.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-asin]] +=== `ASIN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/asin.svg[Embedded,opts=inline] + +include::../parameters/asin.asciidoc[] +include::../description/asin.asciidoc[] +include::../types/asin.asciidoc[] +include::../examples/asin.asciidoc[] diff --git a/docs/reference/esql/functions/layout/atan.asciidoc b/docs/reference/esql/functions/layout/atan.asciidoc new file mode 100644 index 0000000000000..c92523ef39fae --- /dev/null +++ b/docs/reference/esql/functions/layout/atan.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-atan]] +=== `ATAN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/atan.svg[Embedded,opts=inline] + +include::../parameters/atan.asciidoc[] +include::../description/atan.asciidoc[] +include::../types/atan.asciidoc[] +include::../examples/atan.asciidoc[] diff --git a/docs/reference/esql/functions/layout/atan2.asciidoc b/docs/reference/esql/functions/layout/atan2.asciidoc new file mode 100644 index 0000000000000..b23aa95aa56b8 --- /dev/null +++ b/docs/reference/esql/functions/layout/atan2.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-atan2]] +=== `ATAN2` + +*Syntax* + +[.text-center] +image::esql/functions/signature/atan2.svg[Embedded,opts=inline] + +include::../parameters/atan2.asciidoc[] +include::../description/atan2.asciidoc[] +include::../types/atan2.asciidoc[] +include::../examples/atan2.asciidoc[] diff --git a/docs/reference/esql/functions/layout/auto_bucket.asciidoc b/docs/reference/esql/functions/layout/auto_bucket.asciidoc new file mode 100644 index 0000000000000..82e05ab5d215c --- /dev/null +++ b/docs/reference/esql/functions/layout/auto_bucket.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-auto_bucket]] +=== `AUTO_BUCKET` + +*Syntax* + +[.text-center] +image::esql/functions/signature/auto_bucket.svg[Embedded,opts=inline] + +include::../parameters/auto_bucket.asciidoc[] +include::../description/auto_bucket.asciidoc[] +include::../types/auto_bucket.asciidoc[] diff --git a/docs/reference/esql/functions/layout/case.asciidoc b/docs/reference/esql/functions/layout/case.asciidoc new file mode 100644 index 0000000000000..192e74522b8d3 --- /dev/null +++ b/docs/reference/esql/functions/layout/case.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-case]] +=== `CASE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/case.svg[Embedded,opts=inline] + +include::../parameters/case.asciidoc[] +include::../description/case.asciidoc[] +include::../types/case.asciidoc[] diff --git a/docs/reference/esql/functions/layout/ceil.asciidoc b/docs/reference/esql/functions/layout/ceil.asciidoc new file mode 100644 index 0000000000000..2e49dc848ae59 --- /dev/null +++ b/docs/reference/esql/functions/layout/ceil.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-ceil]] +=== `CEIL` + +*Syntax* + +[.text-center] +image::esql/functions/signature/ceil.svg[Embedded,opts=inline] + +include::../parameters/ceil.asciidoc[] +include::../description/ceil.asciidoc[] +include::../types/ceil.asciidoc[] +include::../examples/ceil.asciidoc[] diff --git a/docs/reference/esql/functions/layout/coalesce.asciidoc b/docs/reference/esql/functions/layout/coalesce.asciidoc new file mode 100644 index 0000000000000..47cae18197402 --- /dev/null +++ b/docs/reference/esql/functions/layout/coalesce.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-coalesce]] +=== `COALESCE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/coalesce.svg[Embedded,opts=inline] + +include::../parameters/coalesce.asciidoc[] +include::../description/coalesce.asciidoc[] +include::../types/coalesce.asciidoc[] +include::../examples/coalesce.asciidoc[] diff --git a/docs/reference/esql/functions/layout/concat.asciidoc b/docs/reference/esql/functions/layout/concat.asciidoc new file mode 100644 index 0000000000000..fe3b544a8a9c3 --- /dev/null +++ b/docs/reference/esql/functions/layout/concat.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-concat]] +=== `CONCAT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/concat.svg[Embedded,opts=inline] + +include::../parameters/concat.asciidoc[] +include::../description/concat.asciidoc[] +include::../types/concat.asciidoc[] diff --git a/docs/reference/esql/functions/layout/cos.asciidoc b/docs/reference/esql/functions/layout/cos.asciidoc new file mode 100644 index 0000000000000..7b97f40529096 --- /dev/null +++ b/docs/reference/esql/functions/layout/cos.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-cos]] +=== `COS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/cos.svg[Embedded,opts=inline] + +include::../parameters/cos.asciidoc[] +include::../description/cos.asciidoc[] +include::../types/cos.asciidoc[] diff --git a/docs/reference/esql/functions/layout/cosh.asciidoc b/docs/reference/esql/functions/layout/cosh.asciidoc new file mode 100644 index 0000000000000..e36a96e0eb324 --- /dev/null +++ b/docs/reference/esql/functions/layout/cosh.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-cosh]] +=== `COSH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/cosh.svg[Embedded,opts=inline] + +include::../parameters/cosh.asciidoc[] +include::../description/cosh.asciidoc[] +include::../types/cosh.asciidoc[] diff --git a/docs/reference/esql/functions/layout/date_diff.asciidoc b/docs/reference/esql/functions/layout/date_diff.asciidoc new file mode 100644 index 0000000000000..37ef5ea874853 --- /dev/null +++ b/docs/reference/esql/functions/layout/date_diff.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_diff]] +=== `DATE_DIFF` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_diff.svg[Embedded,opts=inline] + +include::../parameters/date_diff.asciidoc[] +include::../description/date_diff.asciidoc[] +include::../types/date_diff.asciidoc[] diff --git a/docs/reference/esql/functions/layout/date_extract.asciidoc b/docs/reference/esql/functions/layout/date_extract.asciidoc new file mode 100644 index 0000000000000..90c5fa68c9e1d --- /dev/null +++ b/docs/reference/esql/functions/layout/date_extract.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_extract]] +=== `DATE_EXTRACT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_extract.svg[Embedded,opts=inline] + +include::../parameters/date_extract.asciidoc[] +include::../description/date_extract.asciidoc[] +include::../types/date_extract.asciidoc[] diff --git a/docs/reference/esql/functions/layout/date_format.asciidoc b/docs/reference/esql/functions/layout/date_format.asciidoc new file mode 100644 index 0000000000000..1f9199afc812c --- /dev/null +++ b/docs/reference/esql/functions/layout/date_format.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_format]] +=== `DATE_FORMAT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_format.svg[Embedded,opts=inline] + +include::../parameters/date_format.asciidoc[] +include::../description/date_format.asciidoc[] +include::../types/date_format.asciidoc[] diff --git a/docs/reference/esql/functions/layout/date_parse.asciidoc b/docs/reference/esql/functions/layout/date_parse.asciidoc new file mode 100644 index 0000000000000..172208196a329 --- /dev/null +++ b/docs/reference/esql/functions/layout/date_parse.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_parse]] +=== `DATE_PARSE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_parse.svg[Embedded,opts=inline] + +include::../parameters/date_parse.asciidoc[] +include::../description/date_parse.asciidoc[] +include::../types/date_parse.asciidoc[] diff --git a/docs/reference/esql/functions/layout/e.asciidoc b/docs/reference/esql/functions/layout/e.asciidoc new file mode 100644 index 0000000000000..a0e1ca3830e30 --- /dev/null +++ b/docs/reference/esql/functions/layout/e.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-e]] +=== `E` + +*Syntax* + +[.text-center] +image::esql/functions/signature/e.svg[Embedded,opts=inline] + +include::../parameters/e.asciidoc[] +include::../description/e.asciidoc[] +include::../types/e.asciidoc[] diff --git a/docs/reference/esql/functions/layout/ends_with.asciidoc b/docs/reference/esql/functions/layout/ends_with.asciidoc new file mode 100644 index 0000000000000..b2ff1268a951d --- /dev/null +++ b/docs/reference/esql/functions/layout/ends_with.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-ends_with]] +=== `ENDS_WITH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/ends_with.svg[Embedded,opts=inline] + +include::../parameters/ends_with.asciidoc[] +include::../description/ends_with.asciidoc[] +include::../types/ends_with.asciidoc[] diff --git a/docs/reference/esql/functions/layout/floor.asciidoc b/docs/reference/esql/functions/layout/floor.asciidoc new file mode 100644 index 0000000000000..f3f05260989dc --- /dev/null +++ b/docs/reference/esql/functions/layout/floor.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-floor]] +=== `FLOOR` + +*Syntax* + +[.text-center] +image::esql/functions/signature/floor.svg[Embedded,opts=inline] + +include::../parameters/floor.asciidoc[] +include::../description/floor.asciidoc[] +include::../types/floor.asciidoc[] diff --git a/docs/reference/esql/functions/layout/greatest.asciidoc b/docs/reference/esql/functions/layout/greatest.asciidoc new file mode 100644 index 0000000000000..1ff17f3c3adfe --- /dev/null +++ b/docs/reference/esql/functions/layout/greatest.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-greatest]] +=== `GREATEST` + +*Syntax* + +[.text-center] +image::esql/functions/signature/greatest.svg[Embedded,opts=inline] + +include::../parameters/greatest.asciidoc[] +include::../description/greatest.asciidoc[] +include::../types/greatest.asciidoc[] diff --git a/docs/reference/esql/functions/layout/least.asciidoc b/docs/reference/esql/functions/layout/least.asciidoc new file mode 100644 index 0000000000000..a14a166c8bfe4 --- /dev/null +++ b/docs/reference/esql/functions/layout/least.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-least]] +=== `LEAST` + +*Syntax* + +[.text-center] +image::esql/functions/signature/least.svg[Embedded,opts=inline] + +include::../parameters/least.asciidoc[] +include::../description/least.asciidoc[] +include::../types/least.asciidoc[] diff --git a/docs/reference/esql/functions/layout/left.asciidoc b/docs/reference/esql/functions/layout/left.asciidoc new file mode 100644 index 0000000000000..3a995a2f9a247 --- /dev/null +++ b/docs/reference/esql/functions/layout/left.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-left]] +=== `LEFT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/left.svg[Embedded,opts=inline] + +include::../parameters/left.asciidoc[] +include::../description/left.asciidoc[] +include::../types/left.asciidoc[] +include::../examples/left.asciidoc[] diff --git a/docs/reference/esql/functions/layout/length.asciidoc b/docs/reference/esql/functions/layout/length.asciidoc new file mode 100644 index 0000000000000..03d81bb2f931f --- /dev/null +++ b/docs/reference/esql/functions/layout/length.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-length]] +=== `LENGTH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/length.svg[Embedded,opts=inline] + +include::../parameters/length.asciidoc[] +include::../description/length.asciidoc[] +include::../types/length.asciidoc[] diff --git a/docs/reference/esql/functions/layout/log.asciidoc b/docs/reference/esql/functions/layout/log.asciidoc new file mode 100644 index 0000000000000..d5ce98c524421 --- /dev/null +++ b/docs/reference/esql/functions/layout/log.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-log]] +=== `LOG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/log.svg[Embedded,opts=inline] + +include::../parameters/log.asciidoc[] +include::../description/log.asciidoc[] +include::../types/log.asciidoc[] diff --git a/docs/reference/esql/functions/layout/log10.asciidoc b/docs/reference/esql/functions/layout/log10.asciidoc new file mode 100644 index 0000000000000..3de3008e5b91a --- /dev/null +++ b/docs/reference/esql/functions/layout/log10.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-log10]] +=== `LOG10` + +*Syntax* + +[.text-center] +image::esql/functions/signature/log10.svg[Embedded,opts=inline] + +include::../parameters/log10.asciidoc[] +include::../description/log10.asciidoc[] +include::../types/log10.asciidoc[] diff --git a/docs/reference/esql/functions/layout/ltrim.asciidoc b/docs/reference/esql/functions/layout/ltrim.asciidoc new file mode 100644 index 0000000000000..54025dee54b7a --- /dev/null +++ b/docs/reference/esql/functions/layout/ltrim.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-ltrim]] +=== `LTRIM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/ltrim.svg[Embedded,opts=inline] + +include::../parameters/ltrim.asciidoc[] +include::../description/ltrim.asciidoc[] +include::../types/ltrim.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_avg.asciidoc b/docs/reference/esql/functions/layout/mv_avg.asciidoc new file mode 100644 index 0000000000000..dc1913e53c26a --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_avg.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_avg]] +=== `MV_AVG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_avg.svg[Embedded,opts=inline] + +include::../parameters/mv_avg.asciidoc[] +include::../description/mv_avg.asciidoc[] +include::../types/mv_avg.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_concat.asciidoc b/docs/reference/esql/functions/layout/mv_concat.asciidoc new file mode 100644 index 0000000000000..d5d3b98e59f59 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_concat.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_concat]] +=== `MV_CONCAT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_concat.svg[Embedded,opts=inline] + +include::../parameters/mv_concat.asciidoc[] +include::../description/mv_concat.asciidoc[] +include::../types/mv_concat.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_count.asciidoc b/docs/reference/esql/functions/layout/mv_count.asciidoc new file mode 100644 index 0000000000000..a8a0286c114d0 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_count.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_count]] +=== `MV_COUNT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_count.svg[Embedded,opts=inline] + +include::../parameters/mv_count.asciidoc[] +include::../description/mv_count.asciidoc[] +include::../types/mv_count.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_dedupe.asciidoc b/docs/reference/esql/functions/layout/mv_dedupe.asciidoc new file mode 100644 index 0000000000000..332cdfc32ace5 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_dedupe.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_dedupe]] +=== `MV_DEDUPE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_dedupe.svg[Embedded,opts=inline] + +include::../parameters/mv_dedupe.asciidoc[] +include::../description/mv_dedupe.asciidoc[] +include::../types/mv_dedupe.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_first.asciidoc b/docs/reference/esql/functions/layout/mv_first.asciidoc new file mode 100644 index 0000000000000..270861cf99e5f --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_first.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_first]] +=== `MV_FIRST` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_first.svg[Embedded,opts=inline] + +include::../parameters/mv_first.asciidoc[] +include::../description/mv_first.asciidoc[] +include::../types/mv_first.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_last.asciidoc b/docs/reference/esql/functions/layout/mv_last.asciidoc new file mode 100644 index 0000000000000..f1c183d0723ad --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_last.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_last]] +=== `MV_LAST` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_last.svg[Embedded,opts=inline] + +include::../parameters/mv_last.asciidoc[] +include::../description/mv_last.asciidoc[] +include::../types/mv_last.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_max.asciidoc b/docs/reference/esql/functions/layout/mv_max.asciidoc new file mode 100644 index 0000000000000..7c5155b97b7ac --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_max.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_max]] +=== `MV_MAX` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_max.svg[Embedded,opts=inline] + +include::../parameters/mv_max.asciidoc[] +include::../description/mv_max.asciidoc[] +include::../types/mv_max.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_median.asciidoc b/docs/reference/esql/functions/layout/mv_median.asciidoc new file mode 100644 index 0000000000000..70c84319bdbfc --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_median.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_median]] +=== `MV_MEDIAN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_median.svg[Embedded,opts=inline] + +include::../parameters/mv_median.asciidoc[] +include::../description/mv_median.asciidoc[] +include::../types/mv_median.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_min.asciidoc b/docs/reference/esql/functions/layout/mv_min.asciidoc new file mode 100644 index 0000000000000..78b74318d0dc1 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_min.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_min]] +=== `MV_MIN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_min.svg[Embedded,opts=inline] + +include::../parameters/mv_min.asciidoc[] +include::../description/mv_min.asciidoc[] +include::../types/mv_min.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_slice.asciidoc b/docs/reference/esql/functions/layout/mv_slice.asciidoc new file mode 100644 index 0000000000000..87c5d26e7747b --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_slice.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_slice]] +=== `MV_SLICE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_slice.svg[Embedded,opts=inline] + +include::../parameters/mv_slice.asciidoc[] +include::../description/mv_slice.asciidoc[] +include::../types/mv_slice.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_sort.asciidoc b/docs/reference/esql/functions/layout/mv_sort.asciidoc new file mode 100644 index 0000000000000..1207b915b33c0 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_sort.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_sort]] +=== `MV_SORT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_sort.svg[Embedded,opts=inline] + +include::../parameters/mv_sort.asciidoc[] +include::../description/mv_sort.asciidoc[] +include::../types/mv_sort.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_sum.asciidoc b/docs/reference/esql/functions/layout/mv_sum.asciidoc new file mode 100644 index 0000000000000..963a936ee4111 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_sum.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_sum]] +=== `MV_SUM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_sum.svg[Embedded,opts=inline] + +include::../parameters/mv_sum.asciidoc[] +include::../description/mv_sum.asciidoc[] +include::../types/mv_sum.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_zip.asciidoc b/docs/reference/esql/functions/layout/mv_zip.asciidoc new file mode 100644 index 0000000000000..29d9273423264 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_zip.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_zip]] +=== `MV_ZIP` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_zip.svg[Embedded,opts=inline] + +include::../parameters/mv_zip.asciidoc[] +include::../description/mv_zip.asciidoc[] +include::../types/mv_zip.asciidoc[] diff --git a/docs/reference/esql/functions/layout/pi.asciidoc b/docs/reference/esql/functions/layout/pi.asciidoc new file mode 100644 index 0000000000000..402e7b28481d6 --- /dev/null +++ b/docs/reference/esql/functions/layout/pi.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-pi]] +=== `PI` + +*Syntax* + +[.text-center] +image::esql/functions/signature/pi.svg[Embedded,opts=inline] + +include::../parameters/pi.asciidoc[] +include::../description/pi.asciidoc[] +include::../types/pi.asciidoc[] diff --git a/docs/reference/esql/functions/layout/pow.asciidoc b/docs/reference/esql/functions/layout/pow.asciidoc new file mode 100644 index 0000000000000..019c17b7a03c6 --- /dev/null +++ b/docs/reference/esql/functions/layout/pow.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-pow]] +=== `POW` + +*Syntax* + +[.text-center] +image::esql/functions/signature/pow.svg[Embedded,opts=inline] + +include::../parameters/pow.asciidoc[] +include::../description/pow.asciidoc[] +include::../types/pow.asciidoc[] diff --git a/docs/reference/esql/functions/layout/replace.asciidoc b/docs/reference/esql/functions/layout/replace.asciidoc new file mode 100644 index 0000000000000..9f51c544e2c6a --- /dev/null +++ b/docs/reference/esql/functions/layout/replace.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-replace]] +=== `REPLACE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/replace.svg[Embedded,opts=inline] + +include::../parameters/replace.asciidoc[] +include::../description/replace.asciidoc[] +include::../types/replace.asciidoc[] diff --git a/docs/reference/esql/functions/layout/right.asciidoc b/docs/reference/esql/functions/layout/right.asciidoc new file mode 100644 index 0000000000000..86e059f5ad4f2 --- /dev/null +++ b/docs/reference/esql/functions/layout/right.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-right]] +=== `RIGHT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/right.svg[Embedded,opts=inline] + +include::../parameters/right.asciidoc[] +include::../description/right.asciidoc[] +include::../types/right.asciidoc[] diff --git a/docs/reference/esql/functions/layout/round.asciidoc b/docs/reference/esql/functions/layout/round.asciidoc new file mode 100644 index 0000000000000..8424432052750 --- /dev/null +++ b/docs/reference/esql/functions/layout/round.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-round]] +=== `ROUND` + +*Syntax* + +[.text-center] +image::esql/functions/signature/round.svg[Embedded,opts=inline] + +include::../parameters/round.asciidoc[] +include::../description/round.asciidoc[] +include::../types/round.asciidoc[] diff --git a/docs/reference/esql/functions/layout/rtrim.asciidoc b/docs/reference/esql/functions/layout/rtrim.asciidoc new file mode 100644 index 0000000000000..984b1432ccc9b --- /dev/null +++ b/docs/reference/esql/functions/layout/rtrim.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-rtrim]] +=== `RTRIM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/rtrim.svg[Embedded,opts=inline] + +include::../parameters/rtrim.asciidoc[] +include::../description/rtrim.asciidoc[] +include::../types/rtrim.asciidoc[] diff --git a/docs/reference/esql/functions/layout/sin.asciidoc b/docs/reference/esql/functions/layout/sin.asciidoc new file mode 100644 index 0000000000000..802045d0a23af --- /dev/null +++ b/docs/reference/esql/functions/layout/sin.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-sin]] +=== `SIN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/sin.svg[Embedded,opts=inline] + +include::../parameters/sin.asciidoc[] +include::../description/sin.asciidoc[] +include::../types/sin.asciidoc[] diff --git a/docs/reference/esql/functions/layout/sinh.asciidoc b/docs/reference/esql/functions/layout/sinh.asciidoc new file mode 100644 index 0000000000000..3ac1f03a608f6 --- /dev/null +++ b/docs/reference/esql/functions/layout/sinh.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-sinh]] +=== `SINH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/sinh.svg[Embedded,opts=inline] + +include::../parameters/sinh.asciidoc[] +include::../description/sinh.asciidoc[] +include::../types/sinh.asciidoc[] diff --git a/docs/reference/esql/functions/layout/split.asciidoc b/docs/reference/esql/functions/layout/split.asciidoc new file mode 100644 index 0000000000000..fc1e8540c4eea --- /dev/null +++ b/docs/reference/esql/functions/layout/split.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-split]] +=== `SPLIT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/split.svg[Embedded,opts=inline] + +include::../parameters/split.asciidoc[] +include::../description/split.asciidoc[] +include::../types/split.asciidoc[] diff --git a/docs/reference/esql/functions/layout/sqrt.asciidoc b/docs/reference/esql/functions/layout/sqrt.asciidoc new file mode 100644 index 0000000000000..2690bfd3f8cfc --- /dev/null +++ b/docs/reference/esql/functions/layout/sqrt.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-sqrt]] +=== `SQRT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/sqrt.svg[Embedded,opts=inline] + +include::../parameters/sqrt.asciidoc[] +include::../description/sqrt.asciidoc[] +include::../types/sqrt.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_intersects.asciidoc b/docs/reference/esql/functions/layout/st_intersects.asciidoc new file mode 100644 index 0000000000000..1d0721b65606e --- /dev/null +++ b/docs/reference/esql/functions/layout/st_intersects.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_intersects]] +=== `ST_INTERSECTS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_intersects.svg[Embedded,opts=inline] + +include::../parameters/st_intersects.asciidoc[] +include::../description/st_intersects.asciidoc[] +include::../types/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_x.asciidoc b/docs/reference/esql/functions/layout/st_x.asciidoc new file mode 100644 index 0000000000000..ce3824aa157b1 --- /dev/null +++ b/docs/reference/esql/functions/layout/st_x.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_x]] +=== `ST_X` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_x.svg[Embedded,opts=inline] + +include::../parameters/st_x.asciidoc[] +include::../description/st_x.asciidoc[] +include::../types/st_x.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_y.asciidoc b/docs/reference/esql/functions/layout/st_y.asciidoc new file mode 100644 index 0000000000000..702e9097ae689 --- /dev/null +++ b/docs/reference/esql/functions/layout/st_y.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_y]] +=== `ST_Y` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_y.svg[Embedded,opts=inline] + +include::../parameters/st_y.asciidoc[] +include::../description/st_y.asciidoc[] +include::../types/st_y.asciidoc[] diff --git a/docs/reference/esql/functions/layout/starts_with.asciidoc b/docs/reference/esql/functions/layout/starts_with.asciidoc new file mode 100644 index 0000000000000..363b5e3fe33ee --- /dev/null +++ b/docs/reference/esql/functions/layout/starts_with.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-starts_with]] +=== `STARTS_WITH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/starts_with.svg[Embedded,opts=inline] + +include::../parameters/starts_with.asciidoc[] +include::../description/starts_with.asciidoc[] +include::../types/starts_with.asciidoc[] diff --git a/docs/reference/esql/functions/layout/substring.asciidoc b/docs/reference/esql/functions/layout/substring.asciidoc new file mode 100644 index 0000000000000..6bc48c89ed003 --- /dev/null +++ b/docs/reference/esql/functions/layout/substring.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-substring]] +=== `SUBSTRING` + +*Syntax* + +[.text-center] +image::esql/functions/signature/substring.svg[Embedded,opts=inline] + +include::../parameters/substring.asciidoc[] +include::../description/substring.asciidoc[] +include::../types/substring.asciidoc[] diff --git a/docs/reference/esql/functions/layout/tan.asciidoc b/docs/reference/esql/functions/layout/tan.asciidoc new file mode 100644 index 0000000000000..056145f5eed44 --- /dev/null +++ b/docs/reference/esql/functions/layout/tan.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-tan]] +=== `TAN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/tan.svg[Embedded,opts=inline] + +include::../parameters/tan.asciidoc[] +include::../description/tan.asciidoc[] +include::../types/tan.asciidoc[] diff --git a/docs/reference/esql/functions/layout/tanh.asciidoc b/docs/reference/esql/functions/layout/tanh.asciidoc new file mode 100644 index 0000000000000..3024ac5fb2aff --- /dev/null +++ b/docs/reference/esql/functions/layout/tanh.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-tanh]] +=== `TANH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/tanh.svg[Embedded,opts=inline] + +include::../parameters/tanh.asciidoc[] +include::../description/tanh.asciidoc[] +include::../types/tanh.asciidoc[] diff --git a/docs/reference/esql/functions/layout/tau.asciidoc b/docs/reference/esql/functions/layout/tau.asciidoc new file mode 100644 index 0000000000000..3b402f3e7a59f --- /dev/null +++ b/docs/reference/esql/functions/layout/tau.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-tau]] +=== `TAU` + +*Syntax* + +[.text-center] +image::esql/functions/signature/tau.svg[Embedded,opts=inline] + +include::../parameters/tau.asciidoc[] +include::../description/tau.asciidoc[] +include::../types/tau.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_boolean.asciidoc b/docs/reference/esql/functions/layout/to_boolean.asciidoc new file mode 100644 index 0000000000000..670b9868be3d7 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_boolean.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_boolean]] +=== `TO_BOOLEAN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_boolean.svg[Embedded,opts=inline] + +include::../parameters/to_boolean.asciidoc[] +include::../description/to_boolean.asciidoc[] +include::../types/to_boolean.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc new file mode 100644 index 0000000000000..6489414c6612f --- /dev/null +++ b/docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_cartesianpoint]] +=== `TO_CARTESIANPOINT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_cartesianpoint.svg[Embedded,opts=inline] + +include::../parameters/to_cartesianpoint.asciidoc[] +include::../description/to_cartesianpoint.asciidoc[] +include::../types/to_cartesianpoint.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_cartesianshape.asciidoc b/docs/reference/esql/functions/layout/to_cartesianshape.asciidoc new file mode 100644 index 0000000000000..dbe5285a1d74a --- /dev/null +++ b/docs/reference/esql/functions/layout/to_cartesianshape.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_cartesianshape]] +=== `TO_CARTESIANSHAPE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_cartesianshape.svg[Embedded,opts=inline] + +include::../parameters/to_cartesianshape.asciidoc[] +include::../description/to_cartesianshape.asciidoc[] +include::../types/to_cartesianshape.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_datetime.asciidoc b/docs/reference/esql/functions/layout/to_datetime.asciidoc new file mode 100644 index 0000000000000..0ac5f7d5db4f9 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_datetime.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_datetime]] +=== `TO_DATETIME` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_datetime.svg[Embedded,opts=inline] + +include::../parameters/to_datetime.asciidoc[] +include::../description/to_datetime.asciidoc[] +include::../types/to_datetime.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_degrees.asciidoc b/docs/reference/esql/functions/layout/to_degrees.asciidoc new file mode 100644 index 0000000000000..2ca3524121f3f --- /dev/null +++ b/docs/reference/esql/functions/layout/to_degrees.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_degrees]] +=== `TO_DEGREES` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_degrees.svg[Embedded,opts=inline] + +include::../parameters/to_degrees.asciidoc[] +include::../description/to_degrees.asciidoc[] +include::../types/to_degrees.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_double.asciidoc b/docs/reference/esql/functions/layout/to_double.asciidoc new file mode 100644 index 0000000000000..e9ccbe66762c4 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_double.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_double]] +=== `TO_DOUBLE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_double.svg[Embedded,opts=inline] + +include::../parameters/to_double.asciidoc[] +include::../description/to_double.asciidoc[] +include::../types/to_double.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_geopoint.asciidoc b/docs/reference/esql/functions/layout/to_geopoint.asciidoc new file mode 100644 index 0000000000000..a1ede90e65043 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_geopoint.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_geopoint]] +=== `TO_GEOPOINT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_geopoint.svg[Embedded,opts=inline] + +include::../parameters/to_geopoint.asciidoc[] +include::../description/to_geopoint.asciidoc[] +include::../types/to_geopoint.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_geoshape.asciidoc b/docs/reference/esql/functions/layout/to_geoshape.asciidoc new file mode 100644 index 0000000000000..942dd1b7f052a --- /dev/null +++ b/docs/reference/esql/functions/layout/to_geoshape.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_geoshape]] +=== `TO_GEOSHAPE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_geoshape.svg[Embedded,opts=inline] + +include::../parameters/to_geoshape.asciidoc[] +include::../description/to_geoshape.asciidoc[] +include::../types/to_geoshape.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_integer.asciidoc b/docs/reference/esql/functions/layout/to_integer.asciidoc new file mode 100644 index 0000000000000..c9a57b819ef44 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_integer.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_integer]] +=== `TO_INTEGER` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_integer.svg[Embedded,opts=inline] + +include::../parameters/to_integer.asciidoc[] +include::../description/to_integer.asciidoc[] +include::../types/to_integer.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_ip.asciidoc b/docs/reference/esql/functions/layout/to_ip.asciidoc new file mode 100644 index 0000000000000..97e54b879693d --- /dev/null +++ b/docs/reference/esql/functions/layout/to_ip.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_ip]] +=== `TO_IP` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_ip.svg[Embedded,opts=inline] + +include::../parameters/to_ip.asciidoc[] +include::../description/to_ip.asciidoc[] +include::../types/to_ip.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_long.asciidoc b/docs/reference/esql/functions/layout/to_long.asciidoc new file mode 100644 index 0000000000000..00aafb0c8710b --- /dev/null +++ b/docs/reference/esql/functions/layout/to_long.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_long]] +=== `TO_LONG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_long.svg[Embedded,opts=inline] + +include::../parameters/to_long.asciidoc[] +include::../description/to_long.asciidoc[] +include::../types/to_long.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_lower.asciidoc b/docs/reference/esql/functions/layout/to_lower.asciidoc new file mode 100644 index 0000000000000..e1fbfb0079547 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_lower.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_lower]] +=== `TO_LOWER` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_lower.svg[Embedded,opts=inline] + +include::../parameters/to_lower.asciidoc[] +include::../description/to_lower.asciidoc[] +include::../types/to_lower.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_radians.asciidoc b/docs/reference/esql/functions/layout/to_radians.asciidoc new file mode 100644 index 0000000000000..e8f19a0bb1f04 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_radians.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_radians]] +=== `TO_RADIANS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_radians.svg[Embedded,opts=inline] + +include::../parameters/to_radians.asciidoc[] +include::../description/to_radians.asciidoc[] +include::../types/to_radians.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_string.asciidoc b/docs/reference/esql/functions/layout/to_string.asciidoc new file mode 100644 index 0000000000000..02bf6d75b2dbb --- /dev/null +++ b/docs/reference/esql/functions/layout/to_string.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_string]] +=== `TO_STRING` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_string.svg[Embedded,opts=inline] + +include::../parameters/to_string.asciidoc[] +include::../description/to_string.asciidoc[] +include::../types/to_string.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_unsigned_long.asciidoc b/docs/reference/esql/functions/layout/to_unsigned_long.asciidoc new file mode 100644 index 0000000000000..a1d7a2934bf8b --- /dev/null +++ b/docs/reference/esql/functions/layout/to_unsigned_long.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_unsigned_long]] +=== `TO_UNSIGNED_LONG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_unsigned_long.svg[Embedded,opts=inline] + +include::../parameters/to_unsigned_long.asciidoc[] +include::../description/to_unsigned_long.asciidoc[] +include::../types/to_unsigned_long.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_upper.asciidoc b/docs/reference/esql/functions/layout/to_upper.asciidoc new file mode 100644 index 0000000000000..a589ab7fe54ed --- /dev/null +++ b/docs/reference/esql/functions/layout/to_upper.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_upper]] +=== `TO_UPPER` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_upper.svg[Embedded,opts=inline] + +include::../parameters/to_upper.asciidoc[] +include::../description/to_upper.asciidoc[] +include::../types/to_upper.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_version.asciidoc b/docs/reference/esql/functions/layout/to_version.asciidoc new file mode 100644 index 0000000000000..71dbdbf45980a --- /dev/null +++ b/docs/reference/esql/functions/layout/to_version.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_version]] +=== `TO_VERSION` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_version.svg[Embedded,opts=inline] + +include::../parameters/to_version.asciidoc[] +include::../description/to_version.asciidoc[] +include::../types/to_version.asciidoc[] diff --git a/docs/reference/esql/functions/layout/trim.asciidoc b/docs/reference/esql/functions/layout/trim.asciidoc new file mode 100644 index 0000000000000..3cae051176a4e --- /dev/null +++ b/docs/reference/esql/functions/layout/trim.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-trim]] +=== `TRIM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/trim.svg[Embedded,opts=inline] + +include::../parameters/trim.asciidoc[] +include::../description/trim.asciidoc[] +include::../types/trim.asciidoc[] diff --git a/docs/reference/esql/functions/least.asciidoc b/docs/reference/esql/functions/least.asciidoc index 41f58b0d415c2..2860eb31090c4 100644 --- a/docs/reference/esql/functions/least.asciidoc +++ b/docs/reference/esql/functions/least.asciidoc @@ -24,8 +24,6 @@ NOTE: When run on `keyword` or `text` fields, this returns the first string in alphabetical order. When run on `boolean` columns this will return `false` if any values are `false`. -*Supported types* - include::types/least.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/left.asciidoc b/docs/reference/esql/functions/left.asciidoc deleted file mode 100644 index 5d666656b1ee4..0000000000000 --- a/docs/reference/esql/functions/left.asciidoc +++ /dev/null @@ -1,36 +0,0 @@ -[discrete] -[[esql-left]] -=== `LEFT` - -*Syntax* - -[.text-center] -image::esql/functions/signature/left.svg[Embedded,opts=inline] - -*Parameters* - -`str`:: -The string from which to return a substring. - -`length`:: -The number of characters to return. - -*Description* - -Returns the substring that extracts 'length' chars from 'str' starting -from the left. - -*Supported types* - -include::types/left.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/string.csv-spec[tag=left] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/string.csv-spec[tag=left-result] -|=== diff --git a/docs/reference/esql/functions/log.asciidoc b/docs/reference/esql/functions/log.asciidoc index 79ea72898bc2f..b1470e50f2881 100644 --- a/docs/reference/esql/functions/log.asciidoc +++ b/docs/reference/esql/functions/log.asciidoc @@ -23,8 +23,6 @@ Returns the logarithm of a value to a base. The input can be any numeric value, Logs of zero, negative numbers, infinites and base of one return `null` as well as a warning. -*Supported types* - include::types/log.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/log10.asciidoc b/docs/reference/esql/functions/log10.asciidoc index d806da3173818..f0c16f3e4769e 100644 --- a/docs/reference/esql/functions/log10.asciidoc +++ b/docs/reference/esql/functions/log10.asciidoc @@ -17,8 +17,6 @@ value is always a double. Logs of 0, negative numbers, and infinites return `null` as well as a warning. -*Supported types* - include::types/log10.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/ltrim.asciidoc b/docs/reference/esql/functions/ltrim.asciidoc index 4b7b619d06afc..8fb6b6b1b6e25 100644 --- a/docs/reference/esql/functions/ltrim.asciidoc +++ b/docs/reference/esql/functions/ltrim.asciidoc @@ -16,8 +16,6 @@ String expression. If `null`, the function returns `null`. Removes leading whitespaces from strings. -*Supported types* - include::types/rtrim.asciidoc[] *Example* @@ -29,4 +27,4 @@ include::{esql-specs}/string.csv-spec[tag=ltrim] [%header.monospaced.styled,format=dsv,separator=|] |=== include::{esql-specs}/string.csv-spec[tag=ltrim-result] -|=== \ No newline at end of file +|=== diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 0ddf7412db2a1..5faf994d61db6 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -31,12 +31,12 @@ * <> // end::math_list[] -include::abs.asciidoc[] -include::acos.asciidoc[] -include::asin.asciidoc[] -include::atan.asciidoc[] -include::atan2.asciidoc[] -include::ceil.asciidoc[] +include::layout/abs.asciidoc[] +include::layout/acos.asciidoc[] +include::layout/asin.asciidoc[] +include::layout/atan.asciidoc[] +include::layout/atan2.asciidoc[] +include::layout/ceil.asciidoc[] include::cos.asciidoc[] include::cosh.asciidoc[] include::e.asciidoc[] diff --git a/docs/reference/esql/functions/mv_avg.asciidoc b/docs/reference/esql/functions/mv_avg.asciidoc index 27fa2542a8b8f..c81574beed376 100644 --- a/docs/reference/esql/functions/mv_avg.asciidoc +++ b/docs/reference/esql/functions/mv_avg.asciidoc @@ -19,8 +19,6 @@ Multivalue expression. Converts a multivalued expression into a single valued column containing the average of all of the values. -*Supported types* - include::types/mv_avg.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_concat.asciidoc b/docs/reference/esql/functions/mv_concat.asciidoc index e42cc84d62b15..b5ad13cbe3619 100644 --- a/docs/reference/esql/functions/mv_concat.asciidoc +++ b/docs/reference/esql/functions/mv_concat.asciidoc @@ -20,8 +20,6 @@ Delimiter. Converts a multivalued string expression into a single valued column containing the concatenation of all values separated by a delimiter. -*Supported types* - include::types/mv_concat.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/mv_count.asciidoc b/docs/reference/esql/functions/mv_count.asciidoc index 0545335556030..ac870cf77605d 100644 --- a/docs/reference/esql/functions/mv_count.asciidoc +++ b/docs/reference/esql/functions/mv_count.asciidoc @@ -17,8 +17,6 @@ Multivalue expression. Converts a multivalued expression into a single valued column containing a count of the number of values. -*Supported types* - include::types/mv_count.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_dedupe.asciidoc b/docs/reference/esql/functions/mv_dedupe.asciidoc index 09b3827c45e45..84def0127f0ac 100644 --- a/docs/reference/esql/functions/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/mv_dedupe.asciidoc @@ -18,8 +18,6 @@ Removes duplicates from a multivalue expression. NOTE: `MV_DEDUPE` may, but won't always, sort the values in the column. -*Supported types* - include::types/mv_dedupe.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_first.asciidoc b/docs/reference/esql/functions/mv_first.asciidoc index 13d21b15f958e..115e8e69f2a3c 100644 --- a/docs/reference/esql/functions/mv_first.asciidoc +++ b/docs/reference/esql/functions/mv_first.asciidoc @@ -24,8 +24,6 @@ rely on that. If you need the minimum value use <> instead of `MV_FIRST`. `MV_MIN` has optimizations for sorted values so there isn't a performance benefit to `MV_FIRST`. -*Supported types* - include::types/mv_first.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_last.asciidoc b/docs/reference/esql/functions/mv_last.asciidoc index ee6a4a8fed8ba..7843009b74249 100644 --- a/docs/reference/esql/functions/mv_last.asciidoc +++ b/docs/reference/esql/functions/mv_last.asciidoc @@ -24,8 +24,6 @@ rely on that. If you need the maximum value use <> instead of `MV_LAST`. `MV_MAX` has optimizations for sorted values so there isn't a performance benefit to `MV_LAST`. -*Supported types* - include::types/mv_last.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_max.asciidoc b/docs/reference/esql/functions/mv_max.asciidoc index e13e61e0d123d..c915ce5d2e603 100644 --- a/docs/reference/esql/functions/mv_max.asciidoc +++ b/docs/reference/esql/functions/mv_max.asciidoc @@ -17,8 +17,6 @@ Multivalue expression. Converts a multivalued expression into a single valued column containing the maximum value. -*Supported types* - include::types/mv_max.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/mv_median.asciidoc b/docs/reference/esql/functions/mv_median.asciidoc index 05c54342c0f74..44f955e20e1cb 100644 --- a/docs/reference/esql/functions/mv_median.asciidoc +++ b/docs/reference/esql/functions/mv_median.asciidoc @@ -17,8 +17,6 @@ Multivalue expression. Converts a multivalued column into a single valued column containing the median value. -*Supported types* - include::types/mv_median.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/mv_min.asciidoc b/docs/reference/esql/functions/mv_min.asciidoc index b851f480fd619..1965d3de52781 100644 --- a/docs/reference/esql/functions/mv_min.asciidoc +++ b/docs/reference/esql/functions/mv_min.asciidoc @@ -17,8 +17,6 @@ Multivalue expression. Converts a multivalued expression into a single valued column containing the minimum value. -*Supported types* - include::types/mv_min.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/mv_slice.asciidoc b/docs/reference/esql/functions/mv_slice.asciidoc index f4431b25232a2..65436392fcf4e 100644 --- a/docs/reference/esql/functions/mv_slice.asciidoc +++ b/docs/reference/esql/functions/mv_slice.asciidoc @@ -22,8 +22,6 @@ End position. Optional; if omitted, the position at `start` is returned. The end Returns a subset of the multivalued field using the start and end index values. -*Supported types* - include::types/mv_slice.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_sort.asciidoc b/docs/reference/esql/functions/mv_sort.asciidoc index abe09989fbac5..2df9a8c01ca62 100644 --- a/docs/reference/esql/functions/mv_sort.asciidoc +++ b/docs/reference/esql/functions/mv_sort.asciidoc @@ -19,8 +19,6 @@ Sort order. The valid options are ASC and DESC, the default is ASC. Sorts a multivalue expression in lexicographical order. -*Supported types* - include::types/mv_sort.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_sum.asciidoc b/docs/reference/esql/functions/mv_sum.asciidoc index bc252bc9d3fa0..56f9565097a00 100644 --- a/docs/reference/esql/functions/mv_sum.asciidoc +++ b/docs/reference/esql/functions/mv_sum.asciidoc @@ -17,8 +17,6 @@ Multivalue expression. Converts a multivalued column into a single valued column containing the sum of all of the values. -*Supported types* - include::types/mv_sum.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_zip.asciidoc b/docs/reference/esql/functions/mv_zip.asciidoc index 4e71e2cafb9c4..0fc30fb91a737 100644 --- a/docs/reference/esql/functions/mv_zip.asciidoc +++ b/docs/reference/esql/functions/mv_zip.asciidoc @@ -22,8 +22,6 @@ Delimiter. Optional; if omitted, `,` is used as a default delimiter. Combines the values from two multivalued fields with a delimiter that joins them together. -*Supported types* - include::types/mv_zip.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/parameters/abs.asciidoc b/docs/reference/esql/functions/parameters/abs.asciidoc new file mode 100644 index 0000000000000..8527c7f74bb09 --- /dev/null +++ b/docs/reference/esql/functions/parameters/abs.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/acos.asciidoc b/docs/reference/esql/functions/parameters/acos.asciidoc new file mode 100644 index 0000000000000..2d06f7e70333d --- /dev/null +++ b/docs/reference/esql/functions/parameters/acos.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +Number between -1 and 1. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/asin.asciidoc b/docs/reference/esql/functions/parameters/asin.asciidoc new file mode 100644 index 0000000000000..2d06f7e70333d --- /dev/null +++ b/docs/reference/esql/functions/parameters/asin.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +Number between -1 and 1. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/atan.asciidoc b/docs/reference/esql/functions/parameters/atan.asciidoc new file mode 100644 index 0000000000000..8527c7f74bb09 --- /dev/null +++ b/docs/reference/esql/functions/parameters/atan.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/atan2.asciidoc b/docs/reference/esql/functions/parameters/atan2.asciidoc new file mode 100644 index 0000000000000..8dc744ad03e6a --- /dev/null +++ b/docs/reference/esql/functions/parameters/atan2.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`y_coordinate`:: +y coordinate. If `null`, the function returns `null`. + +`x_coordinate`:: +x coordinate. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/auto_bucket.asciidoc b/docs/reference/esql/functions/parameters/auto_bucket.asciidoc new file mode 100644 index 0000000000000..0f9c6a1b81c99 --- /dev/null +++ b/docs/reference/esql/functions/parameters/auto_bucket.asciidoc @@ -0,0 +1,13 @@ +*Parameters* + +`field`:: + + +`buckets`:: + + +`from`:: + + +`to`:: + diff --git a/docs/reference/esql/functions/parameters/case.asciidoc b/docs/reference/esql/functions/parameters/case.asciidoc new file mode 100644 index 0000000000000..fb70278c17d1a --- /dev/null +++ b/docs/reference/esql/functions/parameters/case.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`condition`:: + + +`trueValue`:: + diff --git a/docs/reference/esql/functions/parameters/ceil.asciidoc b/docs/reference/esql/functions/parameters/ceil.asciidoc new file mode 100644 index 0000000000000..8527c7f74bb09 --- /dev/null +++ b/docs/reference/esql/functions/parameters/ceil.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/coalesce.asciidoc b/docs/reference/esql/functions/parameters/coalesce.asciidoc new file mode 100644 index 0000000000000..07c8a84ed5583 --- /dev/null +++ b/docs/reference/esql/functions/parameters/coalesce.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`first`:: +Expression to evaluate + +`rest`:: +Other expression to evaluate diff --git a/docs/reference/esql/functions/parameters/concat.asciidoc b/docs/reference/esql/functions/parameters/concat.asciidoc new file mode 100644 index 0000000000000..47a555fbe80c6 --- /dev/null +++ b/docs/reference/esql/functions/parameters/concat.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`string1`:: + + +`string2`:: + diff --git a/docs/reference/esql/functions/parameters/cos.asciidoc b/docs/reference/esql/functions/parameters/cos.asciidoc new file mode 100644 index 0000000000000..eceab83443236 --- /dev/null +++ b/docs/reference/esql/functions/parameters/cos.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +An angle, in radians diff --git a/docs/reference/esql/functions/parameters/cosh.asciidoc b/docs/reference/esql/functions/parameters/cosh.asciidoc new file mode 100644 index 0000000000000..1535b0feb8424 --- /dev/null +++ b/docs/reference/esql/functions/parameters/cosh.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +The number who's hyperbolic cosine is to be returned diff --git a/docs/reference/esql/functions/parameters/date_diff.asciidoc b/docs/reference/esql/functions/parameters/date_diff.asciidoc new file mode 100644 index 0000000000000..9a9ef6fb34fba --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_diff.asciidoc @@ -0,0 +1,10 @@ +*Parameters* + +`unit`:: +A valid date unit + +`startTimestamp`:: +A string representing a start timestamp + +`endTimestamp`:: +A string representing an end timestamp diff --git a/docs/reference/esql/functions/parameters/date_extract.asciidoc b/docs/reference/esql/functions/parameters/date_extract.asciidoc new file mode 100644 index 0000000000000..170bc40d89ef6 --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_extract.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`datePart`:: +Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era. + +`date`:: +Date expression diff --git a/docs/reference/esql/functions/parameters/date_format.asciidoc b/docs/reference/esql/functions/parameters/date_format.asciidoc new file mode 100644 index 0000000000000..7b000418b961c --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_format.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`dateFormat`:: +A valid date pattern + +`date`:: +Date expression diff --git a/docs/reference/esql/functions/parameters/date_parse.asciidoc b/docs/reference/esql/functions/parameters/date_parse.asciidoc new file mode 100644 index 0000000000000..30a09e43c5361 --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_parse.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`datePattern`:: +A valid date pattern + +`dateString`:: +A string representing a date diff --git a/docs/reference/esql/functions/parameters/e.asciidoc b/docs/reference/esql/functions/parameters/e.asciidoc new file mode 100644 index 0000000000000..ddb88c98f7503 --- /dev/null +++ b/docs/reference/esql/functions/parameters/e.asciidoc @@ -0,0 +1 @@ +*Parameters* diff --git a/docs/reference/esql/functions/parameters/ends_with.asciidoc b/docs/reference/esql/functions/parameters/ends_with.asciidoc new file mode 100644 index 0000000000000..314eec2bf39ea --- /dev/null +++ b/docs/reference/esql/functions/parameters/ends_with.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`str`:: + + +`suffix`:: + diff --git a/docs/reference/esql/functions/parameters/floor.asciidoc b/docs/reference/esql/functions/parameters/floor.asciidoc new file mode 100644 index 0000000000000..9faa6c1adebe2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/floor.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/parameters/greatest.asciidoc b/docs/reference/esql/functions/parameters/greatest.asciidoc new file mode 100644 index 0000000000000..55c75eae0de74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/greatest.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`first`:: + + +`rest`:: + diff --git a/docs/reference/esql/functions/parameters/least.asciidoc b/docs/reference/esql/functions/parameters/least.asciidoc new file mode 100644 index 0000000000000..55c75eae0de74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/least.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`first`:: + + +`rest`:: + diff --git a/docs/reference/esql/functions/parameters/left.asciidoc b/docs/reference/esql/functions/parameters/left.asciidoc new file mode 100644 index 0000000000000..b296adfc064be --- /dev/null +++ b/docs/reference/esql/functions/parameters/left.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`string`:: +The string from which to return a substring. + +`length`:: +The number of characters to return. diff --git a/docs/reference/esql/functions/parameters/length.asciidoc b/docs/reference/esql/functions/parameters/length.asciidoc new file mode 100644 index 0000000000000..4c3a25283c403 --- /dev/null +++ b/docs/reference/esql/functions/parameters/length.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`string`:: + diff --git a/docs/reference/esql/functions/parameters/log.asciidoc b/docs/reference/esql/functions/parameters/log.asciidoc new file mode 100644 index 0000000000000..1d2306c5b215b --- /dev/null +++ b/docs/reference/esql/functions/parameters/log.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`base`:: + + +`number`:: + diff --git a/docs/reference/esql/functions/parameters/log10.asciidoc b/docs/reference/esql/functions/parameters/log10.asciidoc new file mode 100644 index 0000000000000..9faa6c1adebe2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/log10.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/parameters/ltrim.asciidoc b/docs/reference/esql/functions/parameters/ltrim.asciidoc new file mode 100644 index 0000000000000..4c3a25283c403 --- /dev/null +++ b/docs/reference/esql/functions/parameters/ltrim.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`string`:: + diff --git a/docs/reference/esql/functions/parameters/mv_avg.asciidoc b/docs/reference/esql/functions/parameters/mv_avg.asciidoc new file mode 100644 index 0000000000000..9faa6c1adebe2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_avg.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/parameters/mv_concat.asciidoc b/docs/reference/esql/functions/parameters/mv_concat.asciidoc new file mode 100644 index 0000000000000..88893478e2b74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_concat.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`string`:: +values to join + +`delim`:: +delimiter diff --git a/docs/reference/esql/functions/parameters/mv_count.asciidoc b/docs/reference/esql/functions/parameters/mv_count.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_count.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc b/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/mv_first.asciidoc b/docs/reference/esql/functions/parameters/mv_first.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_first.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/mv_last.asciidoc b/docs/reference/esql/functions/parameters/mv_last.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_last.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/mv_max.asciidoc b/docs/reference/esql/functions/parameters/mv_max.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_max.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/mv_median.asciidoc b/docs/reference/esql/functions/parameters/mv_median.asciidoc new file mode 100644 index 0000000000000..9faa6c1adebe2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_median.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/parameters/mv_min.asciidoc b/docs/reference/esql/functions/parameters/mv_min.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_min.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/mv_slice.asciidoc b/docs/reference/esql/functions/parameters/mv_slice.asciidoc new file mode 100644 index 0000000000000..cffbfaff95e86 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_slice.asciidoc @@ -0,0 +1,10 @@ +*Parameters* + +`field`:: +A multivalued field + +`start`:: +start index + +`end`:: +end index (included) diff --git a/docs/reference/esql/functions/parameters/mv_sort.asciidoc b/docs/reference/esql/functions/parameters/mv_sort.asciidoc new file mode 100644 index 0000000000000..aee8353cfd416 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_sort.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`field`:: +A multivalued field + +`order`:: +sort order diff --git a/docs/reference/esql/functions/parameters/mv_sum.asciidoc b/docs/reference/esql/functions/parameters/mv_sum.asciidoc new file mode 100644 index 0000000000000..9faa6c1adebe2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_sum.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/parameters/mv_zip.asciidoc b/docs/reference/esql/functions/parameters/mv_zip.asciidoc new file mode 100644 index 0000000000000..09ab5969fe66a --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_zip.asciidoc @@ -0,0 +1,10 @@ +*Parameters* + +`string1`:: +A multivalued field + +`string2`:: +A multivalued field + +`delim`:: +delimiter diff --git a/docs/reference/esql/functions/parameters/pi.asciidoc b/docs/reference/esql/functions/parameters/pi.asciidoc new file mode 100644 index 0000000000000..ddb88c98f7503 --- /dev/null +++ b/docs/reference/esql/functions/parameters/pi.asciidoc @@ -0,0 +1 @@ +*Parameters* diff --git a/docs/reference/esql/functions/parameters/pow.asciidoc b/docs/reference/esql/functions/parameters/pow.asciidoc new file mode 100644 index 0000000000000..77b3dc186dac7 --- /dev/null +++ b/docs/reference/esql/functions/parameters/pow.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`base`:: + + +`exponent`:: + diff --git a/docs/reference/esql/functions/parameters/replace.asciidoc b/docs/reference/esql/functions/parameters/replace.asciidoc new file mode 100644 index 0000000000000..f8831e5a6b8c5 --- /dev/null +++ b/docs/reference/esql/functions/parameters/replace.asciidoc @@ -0,0 +1,10 @@ +*Parameters* + +`string`:: + + +`regex`:: + + +`newString`:: + diff --git a/docs/reference/esql/functions/parameters/right.asciidoc b/docs/reference/esql/functions/parameters/right.asciidoc new file mode 100644 index 0000000000000..3ddd7e7c8cd68 --- /dev/null +++ b/docs/reference/esql/functions/parameters/right.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`string`:: + + +`length`:: + diff --git a/docs/reference/esql/functions/parameters/round.asciidoc b/docs/reference/esql/functions/parameters/round.asciidoc new file mode 100644 index 0000000000000..ef53d9e07eb00 --- /dev/null +++ b/docs/reference/esql/functions/parameters/round.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`number`:: +The numeric value to round + +`decimals`:: +The number of decimal places to round to. Defaults to 0. diff --git a/docs/reference/esql/functions/parameters/rtrim.asciidoc b/docs/reference/esql/functions/parameters/rtrim.asciidoc new file mode 100644 index 0000000000000..4c3a25283c403 --- /dev/null +++ b/docs/reference/esql/functions/parameters/rtrim.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`string`:: + diff --git a/docs/reference/esql/functions/parameters/sin.asciidoc b/docs/reference/esql/functions/parameters/sin.asciidoc new file mode 100644 index 0000000000000..eceab83443236 --- /dev/null +++ b/docs/reference/esql/functions/parameters/sin.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +An angle, in radians diff --git a/docs/reference/esql/functions/parameters/sinh.asciidoc b/docs/reference/esql/functions/parameters/sinh.asciidoc new file mode 100644 index 0000000000000..d2dfa9701ff89 --- /dev/null +++ b/docs/reference/esql/functions/parameters/sinh.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +The number to return the hyperbolic sine of diff --git a/docs/reference/esql/functions/parameters/split.asciidoc b/docs/reference/esql/functions/parameters/split.asciidoc new file mode 100644 index 0000000000000..7b3c24adae928 --- /dev/null +++ b/docs/reference/esql/functions/parameters/split.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`string`:: + + +`delim`:: + diff --git a/docs/reference/esql/functions/parameters/sqrt.asciidoc b/docs/reference/esql/functions/parameters/sqrt.asciidoc new file mode 100644 index 0000000000000..9faa6c1adebe2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/sqrt.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/parameters/st_intersects.asciidoc b/docs/reference/esql/functions/parameters/st_intersects.asciidoc new file mode 100644 index 0000000000000..dbc9adf478948 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_intersects.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`geomA`:: +Geometry column name or variable of geometry type + +`geomB`:: +Geometry column name or variable of geometry type diff --git a/docs/reference/esql/functions/parameters/st_x.asciidoc b/docs/reference/esql/functions/parameters/st_x.asciidoc new file mode 100644 index 0000000000000..d3d26fc981caf --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_x.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`point`:: + diff --git a/docs/reference/esql/functions/parameters/st_y.asciidoc b/docs/reference/esql/functions/parameters/st_y.asciidoc new file mode 100644 index 0000000000000..d3d26fc981caf --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_y.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`point`:: + diff --git a/docs/reference/esql/functions/parameters/starts_with.asciidoc b/docs/reference/esql/functions/parameters/starts_with.asciidoc new file mode 100644 index 0000000000000..75558cad04106 --- /dev/null +++ b/docs/reference/esql/functions/parameters/starts_with.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`str`:: + + +`prefix`:: + diff --git a/docs/reference/esql/functions/parameters/substring.asciidoc b/docs/reference/esql/functions/parameters/substring.asciidoc new file mode 100644 index 0000000000000..19c4e5551185a --- /dev/null +++ b/docs/reference/esql/functions/parameters/substring.asciidoc @@ -0,0 +1,10 @@ +*Parameters* + +`string`:: + + +`start`:: + + +`length`:: + diff --git a/docs/reference/esql/functions/parameters/tan.asciidoc b/docs/reference/esql/functions/parameters/tan.asciidoc new file mode 100644 index 0000000000000..eceab83443236 --- /dev/null +++ b/docs/reference/esql/functions/parameters/tan.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +An angle, in radians diff --git a/docs/reference/esql/functions/parameters/tanh.asciidoc b/docs/reference/esql/functions/parameters/tanh.asciidoc new file mode 100644 index 0000000000000..1fc97c3b68f84 --- /dev/null +++ b/docs/reference/esql/functions/parameters/tanh.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: +The number to return the hyperbolic tangent of diff --git a/docs/reference/esql/functions/parameters/tau.asciidoc b/docs/reference/esql/functions/parameters/tau.asciidoc new file mode 100644 index 0000000000000..ddb88c98f7503 --- /dev/null +++ b/docs/reference/esql/functions/parameters/tau.asciidoc @@ -0,0 +1 @@ +*Parameters* diff --git a/docs/reference/esql/functions/parameters/to_boolean.asciidoc b/docs/reference/esql/functions/parameters/to_boolean.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_boolean.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc b/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_datetime.asciidoc b/docs/reference/esql/functions/parameters/to_datetime.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_datetime.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_degrees.asciidoc b/docs/reference/esql/functions/parameters/to_degrees.asciidoc new file mode 100644 index 0000000000000..9faa6c1adebe2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_degrees.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/parameters/to_double.asciidoc b/docs/reference/esql/functions/parameters/to_double.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_double.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_geopoint.asciidoc b/docs/reference/esql/functions/parameters/to_geopoint.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_geopoint.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_geoshape.asciidoc b/docs/reference/esql/functions/parameters/to_geoshape.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_geoshape.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_integer.asciidoc b/docs/reference/esql/functions/parameters/to_integer.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_integer.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_ip.asciidoc b/docs/reference/esql/functions/parameters/to_ip.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_ip.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_long.asciidoc b/docs/reference/esql/functions/parameters/to_long.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_long.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_lower.asciidoc b/docs/reference/esql/functions/parameters/to_lower.asciidoc new file mode 100644 index 0000000000000..4f2e56949be24 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_lower.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`str`:: +The input string diff --git a/docs/reference/esql/functions/parameters/to_radians.asciidoc b/docs/reference/esql/functions/parameters/to_radians.asciidoc new file mode 100644 index 0000000000000..9faa6c1adebe2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_radians.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/parameters/to_string.asciidoc b/docs/reference/esql/functions/parameters/to_string.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_string.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc b/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/to_upper.asciidoc b/docs/reference/esql/functions/parameters/to_upper.asciidoc new file mode 100644 index 0000000000000..4f2e56949be24 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_upper.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`str`:: +The input string diff --git a/docs/reference/esql/functions/parameters/to_version.asciidoc b/docs/reference/esql/functions/parameters/to_version.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_version.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/trim.asciidoc b/docs/reference/esql/functions/parameters/trim.asciidoc new file mode 100644 index 0000000000000..4c3a25283c403 --- /dev/null +++ b/docs/reference/esql/functions/parameters/trim.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`string`:: + diff --git a/docs/reference/esql/functions/pow.asciidoc b/docs/reference/esql/functions/pow.asciidoc index 8c31bd21e8a46..6618b728d7da9 100644 --- a/docs/reference/esql/functions/pow.asciidoc +++ b/docs/reference/esql/functions/pow.asciidoc @@ -21,8 +21,6 @@ Returns the value of `base` raised to the power of `exponent`. Both arguments must be numeric. The output is always a double. Note that it is still possible to overflow a double result here; in that case, null will be returned. -*Supported types* - include::types/pow.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/replace.asciidoc b/docs/reference/esql/functions/replace.asciidoc index 05856829eb193..f56567c5150c8 100644 --- a/docs/reference/esql/functions/replace.asciidoc +++ b/docs/reference/esql/functions/replace.asciidoc @@ -25,8 +25,6 @@ The function substitutes in the string `str` any match of the regular expression If any of the arguments is `null`, the result is `null`. -*Supported types* - include::types/replace.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/right.asciidoc b/docs/reference/esql/functions/right.asciidoc index 1b291e53729ee..e9e4c2ebf3806 100644 --- a/docs/reference/esql/functions/right.asciidoc +++ b/docs/reference/esql/functions/right.asciidoc @@ -20,8 +20,6 @@ The number of characters to return. Return the substring that extracts 'length' chars from 'str' starting from the right. -*Supported types* - include::types/right.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/round.asciidoc b/docs/reference/esql/functions/round.asciidoc index 7f1285e85f664..e792db6c1ed69 100644 --- a/docs/reference/esql/functions/round.asciidoc +++ b/docs/reference/esql/functions/round.asciidoc @@ -20,8 +20,6 @@ Rounds a number to the closest number with the specified number of digits. Defaults to 0 digits if no number of digits is provided. If the specified number of digits is negative, rounds to the number of digits left of the decimal point. -*Supported types* - include::types/round.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/rtrim.asciidoc b/docs/reference/esql/functions/rtrim.asciidoc index 588b7b9fc5433..aead0cf88b898 100644 --- a/docs/reference/esql/functions/rtrim.asciidoc +++ b/docs/reference/esql/functions/rtrim.asciidoc @@ -16,8 +16,6 @@ String expression. If `null`, the function returns `null`. Removes trailing whitespaces from strings. -*Supported types* - include::types/rtrim.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/signature/abs.svg b/docs/reference/esql/functions/signature/abs.svg index 0b7aac99357ea..0ca58b790aeab 100644 --- a/docs/reference/esql/functions/signature/abs.svg +++ b/docs/reference/esql/functions/signature/abs.svg @@ -1 +1 @@ -ABS(n) \ No newline at end of file +ABS(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/acos.svg b/docs/reference/esql/functions/signature/acos.svg index 6a2e2c04cd20e..837a3aa00258b 100644 --- a/docs/reference/esql/functions/signature/acos.svg +++ b/docs/reference/esql/functions/signature/acos.svg @@ -1 +1 @@ -ACOS(n) \ No newline at end of file +ACOS(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/asin.svg b/docs/reference/esql/functions/signature/asin.svg index 9792e7316b138..85bff67ad7a96 100644 --- a/docs/reference/esql/functions/signature/asin.svg +++ b/docs/reference/esql/functions/signature/asin.svg @@ -1 +1 @@ -ASIN(n) \ No newline at end of file +ASIN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/atan.svg b/docs/reference/esql/functions/signature/atan.svg index 184956ff2e126..e741718b6dbd8 100644 --- a/docs/reference/esql/functions/signature/atan.svg +++ b/docs/reference/esql/functions/signature/atan.svg @@ -1 +1 @@ -ATAN(n) \ No newline at end of file +ATAN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/atan2.svg b/docs/reference/esql/functions/signature/atan2.svg index f2295d3d98f16..d54fcf2770c3c 100644 --- a/docs/reference/esql/functions/signature/atan2.svg +++ b/docs/reference/esql/functions/signature/atan2.svg @@ -1 +1 @@ -ATAN2(y,x) \ No newline at end of file +ATAN2(y_coordinate,x_coordinate) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/ceil.svg b/docs/reference/esql/functions/signature/ceil.svg index bb07117e56630..51b1bbba6bcef 100644 --- a/docs/reference/esql/functions/signature/ceil.svg +++ b/docs/reference/esql/functions/signature/ceil.svg @@ -1 +1 @@ -CEIL(n) \ No newline at end of file +CEIL(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/coalesce.svg b/docs/reference/esql/functions/signature/coalesce.svg index 22a70efead49c..4c4119582b223 100644 --- a/docs/reference/esql/functions/signature/coalesce.svg +++ b/docs/reference/esql/functions/signature/coalesce.svg @@ -1 +1 @@ -COALESCE(expression,expressionX) \ No newline at end of file +COALESCE(first,rest) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/concat.svg b/docs/reference/esql/functions/signature/concat.svg index 3ad2ae37b11c3..fec245895aa08 100644 --- a/docs/reference/esql/functions/signature/concat.svg +++ b/docs/reference/esql/functions/signature/concat.svg @@ -1 +1 @@ -CONCAT(first,rest) \ No newline at end of file +CONCAT(string1,string2) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/cos.svg b/docs/reference/esql/functions/signature/cos.svg index f06a24726f71a..ff0484a362aef 100644 --- a/docs/reference/esql/functions/signature/cos.svg +++ b/docs/reference/esql/functions/signature/cos.svg @@ -1 +1 @@ -COS(n) \ No newline at end of file +COS(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/cosh.svg b/docs/reference/esql/functions/signature/cosh.svg index 54ea9bff84097..9b9eddd3cb808 100644 --- a/docs/reference/esql/functions/signature/cosh.svg +++ b/docs/reference/esql/functions/signature/cosh.svg @@ -1 +1 @@ -COSH(n) \ No newline at end of file +COSH(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/date_extract.svg b/docs/reference/esql/functions/signature/date_extract.svg index 397cdd400d88c..9aa7d4f4869c0 100644 --- a/docs/reference/esql/functions/signature/date_extract.svg +++ b/docs/reference/esql/functions/signature/date_extract.svg @@ -1 +1 @@ -DATE_EXTRACT(date_part,field) \ No newline at end of file +DATE_EXTRACT(datePart,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/date_format.svg b/docs/reference/esql/functions/signature/date_format.svg new file mode 100644 index 0000000000000..961fcff51d42b --- /dev/null +++ b/docs/reference/esql/functions/signature/date_format.svg @@ -0,0 +1 @@ +DATE_FORMAT(dateFormat,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/floor.svg b/docs/reference/esql/functions/signature/floor.svg index 7e153548bfd82..06a7de70fce3e 100644 --- a/docs/reference/esql/functions/signature/floor.svg +++ b/docs/reference/esql/functions/signature/floor.svg @@ -1 +1 @@ -FLOOR(n) \ No newline at end of file +FLOOR(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/left.svg b/docs/reference/esql/functions/signature/left.svg index 75704982af004..ec14bf8c72131 100644 --- a/docs/reference/esql/functions/signature/left.svg +++ b/docs/reference/esql/functions/signature/left.svg @@ -1 +1 @@ -LEFT(str,length) \ No newline at end of file +LEFT(string,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/length.svg b/docs/reference/esql/functions/signature/length.svg index d199f1a9a0170..a1410895f7fdf 100644 --- a/docs/reference/esql/functions/signature/length.svg +++ b/docs/reference/esql/functions/signature/length.svg @@ -1 +1 @@ -LENGTH(str) \ No newline at end of file +LENGTH(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/log.svg b/docs/reference/esql/functions/signature/log.svg index 39a9a7e8dc52e..6b013ee52f498 100644 --- a/docs/reference/esql/functions/signature/log.svg +++ b/docs/reference/esql/functions/signature/log.svg @@ -1 +1 @@ -LOG(base,value) \ No newline at end of file +LOG(base,number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/log10.svg b/docs/reference/esql/functions/signature/log10.svg index 50edcf6ea943f..20522ccff4bea 100644 --- a/docs/reference/esql/functions/signature/log10.svg +++ b/docs/reference/esql/functions/signature/log10.svg @@ -1 +1 @@ -LOG10(n) \ No newline at end of file +LOG10(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/ltrim.svg b/docs/reference/esql/functions/signature/ltrim.svg index 327e75b92ca19..6fc4b0b0f8f47 100644 --- a/docs/reference/esql/functions/signature/ltrim.svg +++ b/docs/reference/esql/functions/signature/ltrim.svg @@ -1 +1 @@ -LTRIM(str) \ No newline at end of file +LTRIM(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_avg.svg b/docs/reference/esql/functions/signature/mv_avg.svg index 4c2371eac0b44..c3d71a5f6d5d8 100644 --- a/docs/reference/esql/functions/signature/mv_avg.svg +++ b/docs/reference/esql/functions/signature/mv_avg.svg @@ -1 +1 @@ -MV_AVG(field) \ No newline at end of file +MV_AVG(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_concat.svg b/docs/reference/esql/functions/signature/mv_concat.svg index ec3a3aa4ae750..34dd6f98a9dce 100644 --- a/docs/reference/esql/functions/signature/mv_concat.svg +++ b/docs/reference/esql/functions/signature/mv_concat.svg @@ -1 +1 @@ -MV_CONCAT(v,delim) \ No newline at end of file +MV_CONCAT(string,delim) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_count.svg b/docs/reference/esql/functions/signature/mv_count.svg index 48e60f26e394d..faac0140e5910 100644 --- a/docs/reference/esql/functions/signature/mv_count.svg +++ b/docs/reference/esql/functions/signature/mv_count.svg @@ -1 +1 @@ -MV_COUNT(v) \ No newline at end of file +MV_COUNT(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_dedupe.svg b/docs/reference/esql/functions/signature/mv_dedupe.svg index 92be3210ce895..b84aa78051e44 100644 --- a/docs/reference/esql/functions/signature/mv_dedupe.svg +++ b/docs/reference/esql/functions/signature/mv_dedupe.svg @@ -1 +1 @@ -MV_DEDUPE(v) \ No newline at end of file +MV_DEDUPE(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_first.svg b/docs/reference/esql/functions/signature/mv_first.svg index 20d201eab0add..0f302227091d8 100644 --- a/docs/reference/esql/functions/signature/mv_first.svg +++ b/docs/reference/esql/functions/signature/mv_first.svg @@ -1 +1 @@ -MV_FIRST(v) \ No newline at end of file +MV_FIRST(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_last.svg b/docs/reference/esql/functions/signature/mv_last.svg index eb32bb49f8ccc..3c60c008a9004 100644 --- a/docs/reference/esql/functions/signature/mv_last.svg +++ b/docs/reference/esql/functions/signature/mv_last.svg @@ -1 +1 @@ -MV_LAST(v) \ No newline at end of file +MV_LAST(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_max.svg b/docs/reference/esql/functions/signature/mv_max.svg index 6c64809be0720..18b0ef9847ae8 100644 --- a/docs/reference/esql/functions/signature/mv_max.svg +++ b/docs/reference/esql/functions/signature/mv_max.svg @@ -1 +1 @@ -MV_MAX(v) \ No newline at end of file +MV_MAX(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_median.svg b/docs/reference/esql/functions/signature/mv_median.svg index b287fde6dd97e..be0e34b5739e0 100644 --- a/docs/reference/esql/functions/signature/mv_median.svg +++ b/docs/reference/esql/functions/signature/mv_median.svg @@ -1 +1 @@ -MV_MEDIAN(v) \ No newline at end of file +MV_MEDIAN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_min.svg b/docs/reference/esql/functions/signature/mv_min.svg index c6ef5e30c289c..c6f998283de4c 100644 --- a/docs/reference/esql/functions/signature/mv_min.svg +++ b/docs/reference/esql/functions/signature/mv_min.svg @@ -1 +1 @@ -MV_MIN(v) \ No newline at end of file +MV_MIN(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_slice.svg b/docs/reference/esql/functions/signature/mv_slice.svg index 277566a35e47d..c62cec6ceca4d 100644 --- a/docs/reference/esql/functions/signature/mv_slice.svg +++ b/docs/reference/esql/functions/signature/mv_slice.svg @@ -1 +1 @@ -MV_SLICE(v,start,end) \ No newline at end of file +MV_SLICE(field,start,end) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_sum.svg b/docs/reference/esql/functions/signature/mv_sum.svg index 3e3fbd30355b1..ca296b9f96711 100644 --- a/docs/reference/esql/functions/signature/mv_sum.svg +++ b/docs/reference/esql/functions/signature/mv_sum.svg @@ -1 +1 @@ -MV_SUM(v) \ No newline at end of file +MV_SUM(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_zip.svg b/docs/reference/esql/functions/signature/mv_zip.svg index 02c61b3c4bc5c..727d09ae7f47e 100644 --- a/docs/reference/esql/functions/signature/mv_zip.svg +++ b/docs/reference/esql/functions/signature/mv_zip.svg @@ -1 +1 @@ -MV_ZIP(mvLeft,mvRight,delim) \ No newline at end of file +MV_ZIP(string1,string2,delim) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/replace.svg b/docs/reference/esql/functions/signature/replace.svg index bbcd11bcc0ab6..49fccf329732d 100644 --- a/docs/reference/esql/functions/signature/replace.svg +++ b/docs/reference/esql/functions/signature/replace.svg @@ -1 +1 @@ -REPLACE(str,regex,newStr) \ No newline at end of file +REPLACE(string,regex,newString) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/right.svg b/docs/reference/esql/functions/signature/right.svg index 969a6c9442479..0afa5dbf01f16 100644 --- a/docs/reference/esql/functions/signature/right.svg +++ b/docs/reference/esql/functions/signature/right.svg @@ -1 +1 @@ -RIGHT(str,length) \ No newline at end of file +RIGHT(string,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/round.svg b/docs/reference/esql/functions/signature/round.svg index 9da0b9d11213e..288b8ab2383e4 100644 --- a/docs/reference/esql/functions/signature/round.svg +++ b/docs/reference/esql/functions/signature/round.svg @@ -1 +1 @@ -ROUND(value,decimals) \ No newline at end of file +ROUND(number,decimals) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/rtrim.svg b/docs/reference/esql/functions/signature/rtrim.svg index b830bb59c5c31..1f2bfe6252a64 100644 --- a/docs/reference/esql/functions/signature/rtrim.svg +++ b/docs/reference/esql/functions/signature/rtrim.svg @@ -1 +1 @@ -RTRIM(str) \ No newline at end of file +RTRIM(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sin.svg b/docs/reference/esql/functions/signature/sin.svg index eb20f3386d441..2c60f0580f8fb 100644 --- a/docs/reference/esql/functions/signature/sin.svg +++ b/docs/reference/esql/functions/signature/sin.svg @@ -1 +1 @@ -SIN(n) \ No newline at end of file +SIN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sinh.svg b/docs/reference/esql/functions/signature/sinh.svg index 30361aca1fb35..16e7ddb6b6534 100644 --- a/docs/reference/esql/functions/signature/sinh.svg +++ b/docs/reference/esql/functions/signature/sinh.svg @@ -1 +1 @@ -SINH(n) \ No newline at end of file +SINH(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/split.svg b/docs/reference/esql/functions/signature/split.svg index 1213f6041b0c4..5d148a750fa8c 100644 --- a/docs/reference/esql/functions/signature/split.svg +++ b/docs/reference/esql/functions/signature/split.svg @@ -1 +1 @@ -SPLIT(str,delim) \ No newline at end of file +SPLIT(string,delim) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sqrt.svg b/docs/reference/esql/functions/signature/sqrt.svg index 77c657120735f..8816e55429550 100644 --- a/docs/reference/esql/functions/signature/sqrt.svg +++ b/docs/reference/esql/functions/signature/sqrt.svg @@ -1 +1 @@ -SQRT(n) \ No newline at end of file +SQRT(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/st_intersects.svg b/docs/reference/esql/functions/signature/st_intersects.svg new file mode 100644 index 0000000000000..491ba80aee5e5 --- /dev/null +++ b/docs/reference/esql/functions/signature/st_intersects.svg @@ -0,0 +1 @@ +ST_INTERSECTS(geomA,geomB) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/substring.svg b/docs/reference/esql/functions/signature/substring.svg index 1f9f6e8c3afa0..41eb89fb2504a 100644 --- a/docs/reference/esql/functions/signature/substring.svg +++ b/docs/reference/esql/functions/signature/substring.svg @@ -1 +1 @@ -SUBSTRING(str,start,length) \ No newline at end of file +SUBSTRING(string,start,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/tan.svg b/docs/reference/esql/functions/signature/tan.svg index 8ac6ee37cb52a..c8065b30586cc 100644 --- a/docs/reference/esql/functions/signature/tan.svg +++ b/docs/reference/esql/functions/signature/tan.svg @@ -1 +1 @@ -TAN(n) \ No newline at end of file +TAN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/tanh.svg b/docs/reference/esql/functions/signature/tanh.svg index dfe167afc5470..c2edfe2d6942f 100644 --- a/docs/reference/esql/functions/signature/tanh.svg +++ b/docs/reference/esql/functions/signature/tanh.svg @@ -1 +1 @@ -TANH(n) \ No newline at end of file +TANH(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_boolean.svg b/docs/reference/esql/functions/signature/to_boolean.svg index 43c2aac2bca53..97c86bf1543ee 100644 --- a/docs/reference/esql/functions/signature/to_boolean.svg +++ b/docs/reference/esql/functions/signature/to_boolean.svg @@ -1 +1 @@ -TO_BOOLEAN(v) \ No newline at end of file +TO_BOOLEAN(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_cartesianpoint.svg b/docs/reference/esql/functions/signature/to_cartesianpoint.svg index 44484e8321e2f..0de42490c2d57 100644 --- a/docs/reference/esql/functions/signature/to_cartesianpoint.svg +++ b/docs/reference/esql/functions/signature/to_cartesianpoint.svg @@ -1 +1 @@ -TO_CARTESIANPOINT(v) \ No newline at end of file +TO_CARTESIANPOINT(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_cartesianshape.svg b/docs/reference/esql/functions/signature/to_cartesianshape.svg index c16ce9a6c15bc..fdc56831122ee 100644 --- a/docs/reference/esql/functions/signature/to_cartesianshape.svg +++ b/docs/reference/esql/functions/signature/to_cartesianshape.svg @@ -1 +1 @@ -TO_CARTESIANSHAPE(v) \ No newline at end of file +TO_CARTESIANSHAPE(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_datetime.svg b/docs/reference/esql/functions/signature/to_datetime.svg index eb9e74248471a..cfd2fcd94aaa0 100644 --- a/docs/reference/esql/functions/signature/to_datetime.svg +++ b/docs/reference/esql/functions/signature/to_datetime.svg @@ -1 +1 @@ -TO_DATETIME(v) \ No newline at end of file +TO_DATETIME(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_degrees.svg b/docs/reference/esql/functions/signature/to_degrees.svg index 01fe0a4770156..1f323d6f2be6a 100644 --- a/docs/reference/esql/functions/signature/to_degrees.svg +++ b/docs/reference/esql/functions/signature/to_degrees.svg @@ -1 +1 @@ -TO_DEGREES(v) \ No newline at end of file +TO_DEGREES(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_double.svg b/docs/reference/esql/functions/signature/to_double.svg index e785e30ce5f81..1e87a7ea6a4a5 100644 --- a/docs/reference/esql/functions/signature/to_double.svg +++ b/docs/reference/esql/functions/signature/to_double.svg @@ -1 +1 @@ -TO_DOUBLE(v) \ No newline at end of file +TO_DOUBLE(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_geopoint.svg b/docs/reference/esql/functions/signature/to_geopoint.svg index 444817aa388b9..e74ea27d1a9f6 100644 --- a/docs/reference/esql/functions/signature/to_geopoint.svg +++ b/docs/reference/esql/functions/signature/to_geopoint.svg @@ -1 +1 @@ -TO_GEOPOINT(v) \ No newline at end of file +TO_GEOPOINT(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_geoshape.svg b/docs/reference/esql/functions/signature/to_geoshape.svg index 91b02332ad806..8991592cea2fa 100644 --- a/docs/reference/esql/functions/signature/to_geoshape.svg +++ b/docs/reference/esql/functions/signature/to_geoshape.svg @@ -1 +1 @@ -TO_GEOSHAPE(v) \ No newline at end of file +TO_GEOSHAPE(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_integer.svg b/docs/reference/esql/functions/signature/to_integer.svg index beb2e94039e53..413acdb2ce6d9 100644 --- a/docs/reference/esql/functions/signature/to_integer.svg +++ b/docs/reference/esql/functions/signature/to_integer.svg @@ -1 +1 @@ -TO_INTEGER(v) \ No newline at end of file +TO_INTEGER(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_ip.svg b/docs/reference/esql/functions/signature/to_ip.svg index c1669c9376c8b..54856be5439b5 100644 --- a/docs/reference/esql/functions/signature/to_ip.svg +++ b/docs/reference/esql/functions/signature/to_ip.svg @@ -1 +1 @@ -TO_IP(v) \ No newline at end of file +TO_IP(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_long.svg b/docs/reference/esql/functions/signature/to_long.svg index 464d4a001cb35..ffe94435ebc04 100644 --- a/docs/reference/esql/functions/signature/to_long.svg +++ b/docs/reference/esql/functions/signature/to_long.svg @@ -1 +1 @@ -TO_LONG(v) \ No newline at end of file +TO_LONG(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_radians.svg b/docs/reference/esql/functions/signature/to_radians.svg index 712431fb32497..8388ad682df3c 100644 --- a/docs/reference/esql/functions/signature/to_radians.svg +++ b/docs/reference/esql/functions/signature/to_radians.svg @@ -1 +1 @@ -TO_RADIANS(v) \ No newline at end of file +TO_RADIANS(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_string.svg b/docs/reference/esql/functions/signature/to_string.svg index 72fc676289d64..e7a167348c5a0 100644 --- a/docs/reference/esql/functions/signature/to_string.svg +++ b/docs/reference/esql/functions/signature/to_string.svg @@ -1 +1 @@ -TO_STRING(v) \ No newline at end of file +TO_STRING(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_unsigned_long.svg b/docs/reference/esql/functions/signature/to_unsigned_long.svg index da07b3a4c7349..c19873a0c4015 100644 --- a/docs/reference/esql/functions/signature/to_unsigned_long.svg +++ b/docs/reference/esql/functions/signature/to_unsigned_long.svg @@ -1 +1 @@ -TO_UNSIGNED_LONG(v) \ No newline at end of file +TO_UNSIGNED_LONG(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_version.svg b/docs/reference/esql/functions/signature/to_version.svg index e6337280c2e8d..3266c0b476ae6 100644 --- a/docs/reference/esql/functions/signature/to_version.svg +++ b/docs/reference/esql/functions/signature/to_version.svg @@ -1 +1 @@ -TO_VERSION(v) \ No newline at end of file +TO_VERSION(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/trim.svg b/docs/reference/esql/functions/signature/trim.svg index 5fc865d306f11..9d16d660b57ed 100644 --- a/docs/reference/esql/functions/signature/trim.svg +++ b/docs/reference/esql/functions/signature/trim.svg @@ -1 +1 @@ -TRIM(str) \ No newline at end of file +TRIM(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/sin.asciidoc b/docs/reference/esql/functions/sin.asciidoc index e6a8e0cf9331f..6034a695c6071 100644 --- a/docs/reference/esql/functions/sin.asciidoc +++ b/docs/reference/esql/functions/sin.asciidoc @@ -17,8 +17,6 @@ Numeric expression. If `null`, the function returns `null`. {wikipedia}/Sine_and_cosine[Sine] trigonometric function. Input expected in radians. -*Supported types* - include::types/sin.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/sinh.asciidoc b/docs/reference/esql/functions/sinh.asciidoc index 683ae6962c2fd..0931b9a2b88e1 100644 --- a/docs/reference/esql/functions/sinh.asciidoc +++ b/docs/reference/esql/functions/sinh.asciidoc @@ -16,8 +16,6 @@ Numeric expression. If `null`, the function returns `null`. {wikipedia}/Hyperbolic_functions[Sine] hyperbolic function. -*Supported types* - include::types/sinh.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index d99fe36191a31..c1758f61de723 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -8,9 +8,11 @@ {esql} supports these spatial functions: // tag::spatial_list[] +* <> * <> * <> // end::spatial_list[] +include::st_intersects.asciidoc[] include::st_x.asciidoc[] include::st_y.asciidoc[] diff --git a/docs/reference/esql/functions/split.asciidoc b/docs/reference/esql/functions/split.asciidoc index 0a4ce584d01da..972085ad36cc6 100644 --- a/docs/reference/esql/functions/split.asciidoc +++ b/docs/reference/esql/functions/split.asciidoc @@ -17,8 +17,6 @@ Delimiter. Only single byte delimiters are currently supported. Splits a single valued string into multiple strings. -*Supported types* - include::types/split.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/sqrt.asciidoc b/docs/reference/esql/functions/sqrt.asciidoc index faf504a6b0af4..e43d01e6cf814 100644 --- a/docs/reference/esql/functions/sqrt.asciidoc +++ b/docs/reference/esql/functions/sqrt.asciidoc @@ -20,8 +20,6 @@ return value is always a double. Square roots of negative numbers are NaN. Square roots of infinites are infinite. -*Supported types* - include::types/sqrt.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc new file mode 100644 index 0000000000000..1bf4cef0e2977 --- /dev/null +++ b/docs/reference/esql/functions/st_intersects.asciidoc @@ -0,0 +1,40 @@ +[discrete] +[[esql-st_intersects]] +=== `ST_INTERSECTS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_intersects.svg[Embedded,opts=inline] + +*Parameters* + +`geomA`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. + +`geomB`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. +The second parameter must also have the same coordinate system as the first. +This means it is not possible to combine `geo_*` and `cartesian_*` parameters. + +*Description* + +Returns true if two geometries intersect. +They intersect if they have any point in common, including their interior points +(points along lines or within polygons). +In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ + +*Supported types* + +include::types/st_intersects.asciidoc[] + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports-results] +|=== diff --git a/docs/reference/esql/functions/st_x.asciidoc b/docs/reference/esql/functions/st_x.asciidoc index 0f40a66417f9f..692373f054d99 100644 --- a/docs/reference/esql/functions/st_x.asciidoc +++ b/docs/reference/esql/functions/st_x.asciidoc @@ -17,8 +17,6 @@ Expression of type `geo_point` or `cartesian_point`. If `null`, the function ret Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. -*Supported types* - include::types/st_x.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/st_y.asciidoc b/docs/reference/esql/functions/st_y.asciidoc index e876852228d83..dba9b3d450006 100644 --- a/docs/reference/esql/functions/st_y.asciidoc +++ b/docs/reference/esql/functions/st_y.asciidoc @@ -17,8 +17,6 @@ Expression of type `geo_point` or `cartesian_point`. If `null`, the function ret Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. -*Supported types* - include::types/st_y.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/starts_with.asciidoc b/docs/reference/esql/functions/starts_with.asciidoc index 4d45e89882400..6fbd6ca1f18e6 100644 --- a/docs/reference/esql/functions/starts_with.asciidoc +++ b/docs/reference/esql/functions/starts_with.asciidoc @@ -20,8 +20,6 @@ String expression. If `null`, the function returns `null`. Returns a boolean that indicates whether a keyword string starts with another string. -*Supported types* - include::types/starts_with.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index e9fe04ce15761..b568ae1061bb5 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -23,7 +23,7 @@ // end::string_list[] include::concat.asciidoc[] -include::left.asciidoc[] +include::layout/left.asciidoc[] include::length.asciidoc[] include::ltrim.asciidoc[] include::replace.asciidoc[] diff --git a/docs/reference/esql/functions/substring.asciidoc b/docs/reference/esql/functions/substring.asciidoc index 73df7a19aa6b7..82d1c361aa749 100644 --- a/docs/reference/esql/functions/substring.asciidoc +++ b/docs/reference/esql/functions/substring.asciidoc @@ -24,8 +24,6 @@ positions after `start` are returned. Returns a substring of a string, specified by a start position and an optional length. -*Supported types* - include::types/substring.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/tan.asciidoc b/docs/reference/esql/functions/tan.asciidoc index cc06421616fc1..3b1c446806733 100644 --- a/docs/reference/esql/functions/tan.asciidoc +++ b/docs/reference/esql/functions/tan.asciidoc @@ -17,8 +17,6 @@ Numeric expression. If `null`, the function returns `null`. {wikipedia}/Sine_and_cosine[Tangent] trigonometric function. Input expected in radians. -*Supported types* - include::types/tan.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/tanh.asciidoc b/docs/reference/esql/functions/tanh.asciidoc index a21354d23ba50..9b47c68c19cf1 100644 --- a/docs/reference/esql/functions/tanh.asciidoc +++ b/docs/reference/esql/functions/tanh.asciidoc @@ -16,8 +16,6 @@ Numeric expression. If `null`, the function returns `null`. {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function. -*Supported types* - include::types/tanh.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/to_cartesianpoint.asciidoc index 223556d2c0e96..bb534d67f2754 100644 --- a/docs/reference/esql/functions/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/to_cartesianpoint.asciidoc @@ -21,8 +21,6 @@ Converts an input value to a `point` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT Point] format. -*Supported types* - include::types/to_cartesianpoint.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_cartesianshape.asciidoc b/docs/reference/esql/functions/to_cartesianshape.asciidoc index 287d437b3906c..a7ad5351498a2 100644 --- a/docs/reference/esql/functions/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/to_cartesianshape.asciidoc @@ -22,8 +22,6 @@ Converts an input value to a `cartesian_shape` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT] format. -*Supported types* - include::types/to_cartesianshape.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_geopoint.asciidoc b/docs/reference/esql/functions/to_geopoint.asciidoc index d4d7d397d8f7b..29514a24161d7 100644 --- a/docs/reference/esql/functions/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/to_geopoint.asciidoc @@ -19,8 +19,6 @@ The input type must be a string or a `geo_point`. Converts an input value to a `geo_point` value. -*Supported types* - include::types/to_geopoint.asciidoc[] A string will only be successfully converted if it respects the diff --git a/docs/reference/esql/functions/to_geoshape.asciidoc b/docs/reference/esql/functions/to_geoshape.asciidoc index 8a6ec978dc7bf..2964e3c02fdf0 100644 --- a/docs/reference/esql/functions/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/to_geoshape.asciidoc @@ -22,8 +22,6 @@ Converts an input value to a `geo_shape` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT] format. -*Supported types* - include::types/to_geoshape.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_lower.asciidoc b/docs/reference/esql/functions/to_lower.asciidoc index 5b98d82c9a94f..165b59528b43b 100644 --- a/docs/reference/esql/functions/to_lower.asciidoc +++ b/docs/reference/esql/functions/to_lower.asciidoc @@ -16,8 +16,6 @@ String expression. If `null`, the function returns `null`. Returns a new string representing the input string converted to lower case. -*Supported types* - include::types/to_lower.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_string.asciidoc b/docs/reference/esql/functions/to_string.asciidoc index e771915977d97..be0ba3583c5da 100644 --- a/docs/reference/esql/functions/to_string.asciidoc +++ b/docs/reference/esql/functions/to_string.asciidoc @@ -18,8 +18,6 @@ Input value. The input can be a single- or multi-valued column or an expression. Converts an input value into a string. -*Supported types* - include::types/to_string.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_upper.asciidoc b/docs/reference/esql/functions/to_upper.asciidoc index cea63bcbb4bb0..282c6aa65e691 100644 --- a/docs/reference/esql/functions/to_upper.asciidoc +++ b/docs/reference/esql/functions/to_upper.asciidoc @@ -16,8 +16,6 @@ String expression. If `null`, the function returns `null`. Returns a new string representing the input string converted to upper case. -*Supported types* - include::types/to_upper.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_version.asciidoc b/docs/reference/esql/functions/to_version.asciidoc index 6a1583889c87f..878f57f604933 100644 --- a/docs/reference/esql/functions/to_version.asciidoc +++ b/docs/reference/esql/functions/to_version.asciidoc @@ -20,8 +20,6 @@ Input value. The input can be a single- or multi-valued column or an expression. Converts an input string to a version value. -*Supported types* - include::types/to_version.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/trim.asciidoc b/docs/reference/esql/functions/trim.asciidoc index 0b246b7526cd2..341c09d114e88 100644 --- a/docs/reference/esql/functions/trim.asciidoc +++ b/docs/reference/esql/functions/trim.asciidoc @@ -16,8 +16,6 @@ String expression. If `null`, the function returns `null`. Removes leading and trailing whitespaces from strings. -*Supported types* - include::types/trim.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/types/abs.asciidoc b/docs/reference/esql/functions/types/abs.asciidoc index 54341360fed3f..d81bbf36ae3fe 100644 --- a/docs/reference/esql/functions/types/abs.asciidoc +++ b/docs/reference/esql/functions/types/abs.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/acos.asciidoc b/docs/reference/esql/functions/types/acos.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/acos.asciidoc +++ b/docs/reference/esql/functions/types/acos.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/add.asciidoc b/docs/reference/esql/functions/types/add.asciidoc index 3665c112d802d..a0215a803d4e3 100644 --- a/docs/reference/esql/functions/types/add.asciidoc +++ b/docs/reference/esql/functions/types/add.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/asin.asciidoc b/docs/reference/esql/functions/types/asin.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/asin.asciidoc +++ b/docs/reference/esql/functions/types/asin.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/atan.asciidoc b/docs/reference/esql/functions/types/atan.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/atan.asciidoc +++ b/docs/reference/esql/functions/types/atan.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/atan2.asciidoc b/docs/reference/esql/functions/types/atan2.asciidoc index 74fffe9056a16..d1fffd88a7c3f 100644 --- a/docs/reference/esql/functions/types/atan2.asciidoc +++ b/docs/reference/esql/functions/types/atan2.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -y | x | result +y_coordinate | x_coordinate | result double | double | double double | integer | double double | long | double diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/auto_bucket.asciidoc index e0ede29e40df1..535e2df29c353 100644 --- a/docs/reference/esql/functions/types/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/types/auto_bucket.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== field | buckets | from | to | result diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index 3bf3d8ad3d713..44acf331a43dc 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -1,5 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -condition | rest | result +condition | trueValue | result |=== diff --git a/docs/reference/esql/functions/types/ceil.asciidoc b/docs/reference/esql/functions/types/ceil.asciidoc index 54341360fed3f..d81bbf36ae3fe 100644 --- a/docs/reference/esql/functions/types/ceil.asciidoc +++ b/docs/reference/esql/functions/types/ceil.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index 2daf6126d6fb0..97ac47c2bb505 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -expression | expressionX | result +first | rest | result boolean | boolean | boolean integer | integer | integer keyword | keyword | keyword diff --git a/docs/reference/esql/functions/types/concat.asciidoc b/docs/reference/esql/functions/types/concat.asciidoc index 1f14abf9c498f..16fbd6918c40f 100644 --- a/docs/reference/esql/functions/types/concat.asciidoc +++ b/docs/reference/esql/functions/types/concat.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -first | rest | result +string1 | string2 | result keyword | keyword | keyword text | text | keyword |=== diff --git a/docs/reference/esql/functions/types/cos.asciidoc b/docs/reference/esql/functions/types/cos.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/cos.asciidoc +++ b/docs/reference/esql/functions/types/cos.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/cosh.asciidoc b/docs/reference/esql/functions/types/cosh.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/cosh.asciidoc +++ b/docs/reference/esql/functions/types/cosh.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/date_diff.asciidoc b/docs/reference/esql/functions/types/date_diff.asciidoc index b4e5c6ad5e0b5..98adcef51e75c 100644 --- a/docs/reference/esql/functions/types/date_diff.asciidoc +++ b/docs/reference/esql/functions/types/date_diff.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== unit | startTimestamp | endTimestamp | result diff --git a/docs/reference/esql/functions/types/date_extract.asciidoc b/docs/reference/esql/functions/types/date_extract.asciidoc index edd244548fb18..43702ef0671a7 100644 --- a/docs/reference/esql/functions/types/date_extract.asciidoc +++ b/docs/reference/esql/functions/types/date_extract.asciidoc @@ -1,5 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -date_part | field | result +datePart | date | result keyword | datetime | long +text | datetime | long |=== diff --git a/docs/reference/esql/functions/types/date_format.asciidoc b/docs/reference/esql/functions/types/date_format.asciidoc new file mode 100644 index 0000000000000..a76f38653b9b8 --- /dev/null +++ b/docs/reference/esql/functions/types/date_format.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +dateFormat | date | result +keyword | datetime | keyword +text | datetime | keyword +|=== diff --git a/docs/reference/esql/functions/types/date_parse.asciidoc b/docs/reference/esql/functions/types/date_parse.asciidoc index f4922b9bf9c61..82ae8253baa26 100644 --- a/docs/reference/esql/functions/types/date_parse.asciidoc +++ b/docs/reference/esql/functions/types/date_parse.asciidoc @@ -1,6 +1,11 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== datePattern | dateString | result keyword | keyword | datetime keyword | text | datetime +text | text | datetime |=== diff --git a/docs/reference/esql/functions/types/div.asciidoc b/docs/reference/esql/functions/types/div.asciidoc index eee2d68e4653f..79749dda1bc55 100644 --- a/docs/reference/esql/functions/types/div.asciidoc +++ b/docs/reference/esql/functions/types/div.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/e.asciidoc b/docs/reference/esql/functions/types/e.asciidoc index 5854465d5fb49..50e9c47238e34 100644 --- a/docs/reference/esql/functions/types/e.asciidoc +++ b/docs/reference/esql/functions/types/e.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== result diff --git a/docs/reference/esql/functions/types/ends_with.asciidoc b/docs/reference/esql/functions/types/ends_with.asciidoc index 88489185b41f7..a0236634bbf01 100644 --- a/docs/reference/esql/functions/types/ends_with.asciidoc +++ b/docs/reference/esql/functions/types/ends_with.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | suffix | result diff --git a/docs/reference/esql/functions/types/equals.asciidoc b/docs/reference/esql/functions/types/equals.asciidoc index 27fb19b6d38a2..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/equals.asciidoc +++ b/docs/reference/esql/functions/types/equals.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/floor.asciidoc b/docs/reference/esql/functions/types/floor.asciidoc index 54341360fed3f..d81bbf36ae3fe 100644 --- a/docs/reference/esql/functions/types/floor.asciidoc +++ b/docs/reference/esql/functions/types/floor.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/greater_than.asciidoc b/docs/reference/esql/functions/types/greater_than.asciidoc index 27fb19b6d38a2..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/greater_than.asciidoc +++ b/docs/reference/esql/functions/types/greater_than.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc index 27fb19b6d38a2..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/greatest.asciidoc b/docs/reference/esql/functions/types/greatest.asciidoc index 0e4ebb2d45a31..2a14b6280aa0a 100644 --- a/docs/reference/esql/functions/types/greatest.asciidoc +++ b/docs/reference/esql/functions/types/greatest.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== first | rest | result diff --git a/docs/reference/esql/functions/types/least.asciidoc b/docs/reference/esql/functions/types/least.asciidoc index 0e4ebb2d45a31..2a14b6280aa0a 100644 --- a/docs/reference/esql/functions/types/least.asciidoc +++ b/docs/reference/esql/functions/types/least.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== first | rest | result diff --git a/docs/reference/esql/functions/types/left.asciidoc b/docs/reference/esql/functions/types/left.asciidoc index 6899a408969f7..157f61b59316d 100644 --- a/docs/reference/esql/functions/types/left.asciidoc +++ b/docs/reference/esql/functions/types/left.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -str | length | result +string | length | result keyword | integer | keyword text | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/length.asciidoc b/docs/reference/esql/functions/types/length.asciidoc index de84fe63c794a..db5a48c7c4390 100644 --- a/docs/reference/esql/functions/types/length.asciidoc +++ b/docs/reference/esql/functions/types/length.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -str | result +string | result keyword | integer text | integer |=== diff --git a/docs/reference/esql/functions/types/less_than.asciidoc b/docs/reference/esql/functions/types/less_than.asciidoc index 27fb19b6d38a2..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/less_than.asciidoc +++ b/docs/reference/esql/functions/types/less_than.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc index 27fb19b6d38a2..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/log.asciidoc b/docs/reference/esql/functions/types/log.asciidoc index d72ea848c349f..0a59e51e45c72 100644 --- a/docs/reference/esql/functions/types/log.asciidoc +++ b/docs/reference/esql/functions/types/log.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -base | value | result +base | number | result double | double | double double | integer | double double | long | double diff --git a/docs/reference/esql/functions/types/log10.asciidoc b/docs/reference/esql/functions/types/log10.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/log10.asciidoc +++ b/docs/reference/esql/functions/types/log10.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/ltrim.asciidoc b/docs/reference/esql/functions/types/ltrim.asciidoc index 26f4e7633d8ae..41d60049d59b8 100644 --- a/docs/reference/esql/functions/types/ltrim.asciidoc +++ b/docs/reference/esql/functions/types/ltrim.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -str | result +string | result keyword | keyword text | text |=== diff --git a/docs/reference/esql/functions/types/mod.asciidoc b/docs/reference/esql/functions/types/mod.asciidoc index eee2d68e4653f..79749dda1bc55 100644 --- a/docs/reference/esql/functions/types/mod.asciidoc +++ b/docs/reference/esql/functions/types/mod.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/mul.asciidoc b/docs/reference/esql/functions/types/mul.asciidoc index 2f5100b1d1494..188dae5a50982 100644 --- a/docs/reference/esql/functions/types/mul.asciidoc +++ b/docs/reference/esql/functions/types/mul.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/mv_avg.asciidoc b/docs/reference/esql/functions/types/mv_avg.asciidoc index 0bba9b341c301..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/mv_avg.asciidoc +++ b/docs/reference/esql/functions/types/mv_avg.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -field | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/mv_concat.asciidoc b/docs/reference/esql/functions/types/mv_concat.asciidoc index e3ea8b0830f47..e5c8f8eeb6bd7 100644 --- a/docs/reference/esql/functions/types/mv_concat.asciidoc +++ b/docs/reference/esql/functions/types/mv_concat.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | delim | result +string | delim | result keyword | keyword | keyword keyword | text | keyword text | keyword | keyword diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index a2e7119bab05d..8af6b76591acb 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | integer cartesian_point | integer cartesian_shape | integer diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index dc1175ccdd951..705745d76dbab 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean datetime | datetime double | double diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc index 620c7cf13b771..e077c57971a4a 100644 --- a/docs/reference/esql/functions/types/mv_first.asciidoc +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc index 620c7cf13b771..e077c57971a4a 100644 --- a/docs/reference/esql/functions/types/mv_last.asciidoc +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape diff --git a/docs/reference/esql/functions/types/mv_max.asciidoc b/docs/reference/esql/functions/types/mv_max.asciidoc index 1a9a1bee08388..4e5f0a5e0ae89 100644 --- a/docs/reference/esql/functions/types/mv_max.asciidoc +++ b/docs/reference/esql/functions/types/mv_max.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean datetime | datetime double | double diff --git a/docs/reference/esql/functions/types/mv_median.asciidoc b/docs/reference/esql/functions/types/mv_median.asciidoc index 4bb9cf6c7a1cb..d81bbf36ae3fe 100644 --- a/docs/reference/esql/functions/types/mv_median.asciidoc +++ b/docs/reference/esql/functions/types/mv_median.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +number | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/mv_min.asciidoc b/docs/reference/esql/functions/types/mv_min.asciidoc index 1a9a1bee08388..4e5f0a5e0ae89 100644 --- a/docs/reference/esql/functions/types/mv_min.asciidoc +++ b/docs/reference/esql/functions/types/mv_min.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean datetime | datetime double | double diff --git a/docs/reference/esql/functions/types/mv_slice.asciidoc b/docs/reference/esql/functions/types/mv_slice.asciidoc index 1891fed3631e9..568de10f53d32 100644 --- a/docs/reference/esql/functions/types/mv_slice.asciidoc +++ b/docs/reference/esql/functions/types/mv_slice.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | start | end | result +field | start | end | result boolean | integer | integer | boolean cartesian_point | integer | integer | cartesian_point cartesian_shape | integer | integer | cartesian_shape diff --git a/docs/reference/esql/functions/types/mv_sort.asciidoc b/docs/reference/esql/functions/types/mv_sort.asciidoc index 01416cdd71ae6..24925ca8a6587 100644 --- a/docs/reference/esql/functions/types/mv_sort.asciidoc +++ b/docs/reference/esql/functions/types/mv_sort.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== field | order | result diff --git a/docs/reference/esql/functions/types/mv_sum.asciidoc b/docs/reference/esql/functions/types/mv_sum.asciidoc index 4bb9cf6c7a1cb..d81bbf36ae3fe 100644 --- a/docs/reference/esql/functions/types/mv_sum.asciidoc +++ b/docs/reference/esql/functions/types/mv_sum.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +number | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/mv_zip.asciidoc b/docs/reference/esql/functions/types/mv_zip.asciidoc index 6ee6c29c77264..514041202a1d5 100644 --- a/docs/reference/esql/functions/types/mv_zip.asciidoc +++ b/docs/reference/esql/functions/types/mv_zip.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -mvLeft | mvRight | delim | result +string1 | string2 | delim | result keyword | keyword | keyword | keyword text | text | text | keyword |=== diff --git a/docs/reference/esql/functions/types/neg.asciidoc b/docs/reference/esql/functions/types/neg.asciidoc index 1b841483fb22e..28d3b2a512dec 100644 --- a/docs/reference/esql/functions/types/neg.asciidoc +++ b/docs/reference/esql/functions/types/neg.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/not_equals.asciidoc b/docs/reference/esql/functions/types/not_equals.asciidoc index 27fb19b6d38a2..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/not_equals.asciidoc +++ b/docs/reference/esql/functions/types/not_equals.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/pi.asciidoc b/docs/reference/esql/functions/types/pi.asciidoc index 5854465d5fb49..50e9c47238e34 100644 --- a/docs/reference/esql/functions/types/pi.asciidoc +++ b/docs/reference/esql/functions/types/pi.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== result diff --git a/docs/reference/esql/functions/types/pow.asciidoc b/docs/reference/esql/functions/types/pow.asciidoc index 0e22c123ebf53..3753fd2317bc5 100644 --- a/docs/reference/esql/functions/types/pow.asciidoc +++ b/docs/reference/esql/functions/types/pow.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== base | exponent | result diff --git a/docs/reference/esql/functions/types/replace.asciidoc b/docs/reference/esql/functions/types/replace.asciidoc index 8c2be37bd63a0..3401c04bbe395 100644 --- a/docs/reference/esql/functions/types/replace.asciidoc +++ b/docs/reference/esql/functions/types/replace.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -str | regex | newStr | result +string | regex | newString | result keyword | keyword | keyword | keyword keyword | keyword | text | keyword keyword | text | keyword | keyword diff --git a/docs/reference/esql/functions/types/right.asciidoc b/docs/reference/esql/functions/types/right.asciidoc index 6899a408969f7..157f61b59316d 100644 --- a/docs/reference/esql/functions/types/right.asciidoc +++ b/docs/reference/esql/functions/types/right.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -str | length | result +string | length | result keyword | integer | keyword text | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/round.asciidoc b/docs/reference/esql/functions/types/round.asciidoc index 33e89c91f0bfe..ea9ab93825d4f 100644 --- a/docs/reference/esql/functions/types/round.asciidoc +++ b/docs/reference/esql/functions/types/round.asciidoc @@ -1,5 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -value | decimals | result +number | decimals | result double | integer | double |=== diff --git a/docs/reference/esql/functions/types/rtrim.asciidoc b/docs/reference/esql/functions/types/rtrim.asciidoc index 26f4e7633d8ae..41d60049d59b8 100644 --- a/docs/reference/esql/functions/types/rtrim.asciidoc +++ b/docs/reference/esql/functions/types/rtrim.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -str | result +string | result keyword | keyword text | text |=== diff --git a/docs/reference/esql/functions/types/sin.asciidoc b/docs/reference/esql/functions/types/sin.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/sin.asciidoc +++ b/docs/reference/esql/functions/types/sin.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/sinh.asciidoc b/docs/reference/esql/functions/types/sinh.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/sinh.asciidoc +++ b/docs/reference/esql/functions/types/sinh.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/split.asciidoc b/docs/reference/esql/functions/types/split.asciidoc index 4b5e6856c8fe2..8a69a25bd0c48 100644 --- a/docs/reference/esql/functions/types/split.asciidoc +++ b/docs/reference/esql/functions/types/split.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -str | delim | result +string | delim | result keyword | keyword | keyword text | text | keyword |=== diff --git a/docs/reference/esql/functions/types/sqrt.asciidoc b/docs/reference/esql/functions/types/sqrt.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/sqrt.asciidoc +++ b/docs/reference/esql/functions/types/sqrt.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/st_intersects.asciidoc b/docs/reference/esql/functions/types/st_intersects.asciidoc new file mode 100644 index 0000000000000..36bd9cc036ade --- /dev/null +++ b/docs/reference/esql/functions/types/st_intersects.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +geomA | geomB | result +cartesian_point | cartesian_point | boolean +cartesian_point | cartesian_shape | boolean +cartesian_shape | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +geo_point | geo_point | boolean +geo_point | geo_shape | boolean +geo_shape | geo_point | boolean +geo_shape | geo_shape | boolean +|=== diff --git a/docs/reference/esql/functions/types/st_x.asciidoc b/docs/reference/esql/functions/types/st_x.asciidoc index 94ed4b296f1d4..31cbd98042f1b 100644 --- a/docs/reference/esql/functions/types/st_x.asciidoc +++ b/docs/reference/esql/functions/types/st_x.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== point | result diff --git a/docs/reference/esql/functions/types/st_y.asciidoc b/docs/reference/esql/functions/types/st_y.asciidoc index 94ed4b296f1d4..31cbd98042f1b 100644 --- a/docs/reference/esql/functions/types/st_y.asciidoc +++ b/docs/reference/esql/functions/types/st_y.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== point | result diff --git a/docs/reference/esql/functions/types/starts_with.asciidoc b/docs/reference/esql/functions/types/starts_with.asciidoc index 863ddef3c0361..a6c77014966b4 100644 --- a/docs/reference/esql/functions/types/starts_with.asciidoc +++ b/docs/reference/esql/functions/types/starts_with.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | prefix | result diff --git a/docs/reference/esql/functions/types/sub.asciidoc b/docs/reference/esql/functions/types/sub.asciidoc index 826c4f6274652..c439830b7d1e3 100644 --- a/docs/reference/esql/functions/types/sub.asciidoc +++ b/docs/reference/esql/functions/types/sub.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/substring.asciidoc b/docs/reference/esql/functions/types/substring.asciidoc index f12a40c9253fb..7837c14b5a956 100644 --- a/docs/reference/esql/functions/types/substring.asciidoc +++ b/docs/reference/esql/functions/types/substring.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -str | start | length | result +string | start | length | result keyword | integer | integer | keyword text | integer | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/tan.asciidoc b/docs/reference/esql/functions/types/tan.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/tan.asciidoc +++ b/docs/reference/esql/functions/types/tan.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/tanh.asciidoc b/docs/reference/esql/functions/types/tanh.asciidoc index 1df8dd6526f18..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/tanh.asciidoc +++ b/docs/reference/esql/functions/types/tanh.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/tau.asciidoc b/docs/reference/esql/functions/types/tau.asciidoc index 5854465d5fb49..50e9c47238e34 100644 --- a/docs/reference/esql/functions/types/tau.asciidoc +++ b/docs/reference/esql/functions/types/tau.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== result diff --git a/docs/reference/esql/functions/types/to_boolean.asciidoc b/docs/reference/esql/functions/types/to_boolean.asciidoc index 7f543963eb090..2ab34243db67d 100644 --- a/docs/reference/esql/functions/types/to_boolean.asciidoc +++ b/docs/reference/esql/functions/types/to_boolean.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean double | boolean integer | boolean diff --git a/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc index 081d879c4b713..52339abb70512 100644 --- a/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result cartesian_point | cartesian_point keyword | cartesian_point text | cartesian_point diff --git a/docs/reference/esql/functions/types/to_cartesianshape.asciidoc b/docs/reference/esql/functions/types/to_cartesianshape.asciidoc index 258a31169782d..bc42ddcb99221 100644 --- a/docs/reference/esql/functions/types/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/types/to_cartesianshape.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result cartesian_point | cartesian_shape cartesian_shape | cartesian_shape keyword | cartesian_shape diff --git a/docs/reference/esql/functions/types/to_datetime.asciidoc b/docs/reference/esql/functions/types/to_datetime.asciidoc index bbd755f81f4da..52c4cebb661cf 100644 --- a/docs/reference/esql/functions/types/to_datetime.asciidoc +++ b/docs/reference/esql/functions/types/to_datetime.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result datetime | datetime double | datetime integer | datetime diff --git a/docs/reference/esql/functions/types/to_degrees.asciidoc b/docs/reference/esql/functions/types/to_degrees.asciidoc index 7cb7ca46022c2..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/to_degrees.asciidoc +++ b/docs/reference/esql/functions/types/to_degrees.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/to_double.asciidoc b/docs/reference/esql/functions/types/to_double.asciidoc index 38e8482b77544..c78c3974af5a5 100644 --- a/docs/reference/esql/functions/types/to_double.asciidoc +++ b/docs/reference/esql/functions/types/to_double.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | double datetime | double double | double diff --git a/docs/reference/esql/functions/types/to_geopoint.asciidoc b/docs/reference/esql/functions/types/to_geopoint.asciidoc index c464aec9e983c..6b833c4cfeabd 100644 --- a/docs/reference/esql/functions/types/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/types/to_geopoint.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result geo_point | geo_point keyword | geo_point text | geo_point diff --git a/docs/reference/esql/functions/types/to_geoshape.asciidoc b/docs/reference/esql/functions/types/to_geoshape.asciidoc index 5fc8611ee2f92..98063e2766e88 100644 --- a/docs/reference/esql/functions/types/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/types/to_geoshape.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result geo_point | geo_shape geo_shape | geo_shape keyword | geo_shape diff --git a/docs/reference/esql/functions/types/to_integer.asciidoc b/docs/reference/esql/functions/types/to_integer.asciidoc index bcea15b9ec80b..11fd7914c5b0f 100644 --- a/docs/reference/esql/functions/types/to_integer.asciidoc +++ b/docs/reference/esql/functions/types/to_integer.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | integer datetime | integer double | integer diff --git a/docs/reference/esql/functions/types/to_ip.asciidoc b/docs/reference/esql/functions/types/to_ip.asciidoc index 6d7f9338a9aeb..d2f94889b81ef 100644 --- a/docs/reference/esql/functions/types/to_ip.asciidoc +++ b/docs/reference/esql/functions/types/to_ip.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result ip | ip keyword | ip text | ip diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc index 307f573f1db2d..4bc927fd94697 100644 --- a/docs/reference/esql/functions/types/to_long.asciidoc +++ b/docs/reference/esql/functions/types/to_long.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | long datetime | long double | long diff --git a/docs/reference/esql/functions/types/to_lower.asciidoc b/docs/reference/esql/functions/types/to_lower.asciidoc index 26f4e7633d8ae..974066d225bca 100644 --- a/docs/reference/esql/functions/types/to_lower.asciidoc +++ b/docs/reference/esql/functions/types/to_lower.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | result diff --git a/docs/reference/esql/functions/types/to_radians.asciidoc b/docs/reference/esql/functions/types/to_radians.asciidoc index 7cb7ca46022c2..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/to_radians.asciidoc +++ b/docs/reference/esql/functions/types/to_radians.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index 773e396f41373..f14cfbb39929f 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | keyword cartesian_point | keyword cartesian_shape | keyword diff --git a/docs/reference/esql/functions/types/to_unsigned_long.asciidoc b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc index 76d9cf44f4dd2..a271e1a19321d 100644 --- a/docs/reference/esql/functions/types/to_unsigned_long.asciidoc +++ b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | unsigned_long datetime | unsigned_long double | unsigned_long diff --git a/docs/reference/esql/functions/types/to_upper.asciidoc b/docs/reference/esql/functions/types/to_upper.asciidoc index 26f4e7633d8ae..974066d225bca 100644 --- a/docs/reference/esql/functions/types/to_upper.asciidoc +++ b/docs/reference/esql/functions/types/to_upper.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | result diff --git a/docs/reference/esql/functions/types/to_version.asciidoc b/docs/reference/esql/functions/types/to_version.asciidoc index ebb83f03a6fe6..f95ea15d6e2b4 100644 --- a/docs/reference/esql/functions/types/to_version.asciidoc +++ b/docs/reference/esql/functions/types/to_version.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result keyword | version text | version version | version diff --git a/docs/reference/esql/functions/types/trim.asciidoc b/docs/reference/esql/functions/types/trim.asciidoc index 26f4e7633d8ae..41d60049d59b8 100644 --- a/docs/reference/esql/functions/types/trim.asciidoc +++ b/docs/reference/esql/functions/types/trim.asciidoc @@ -1,6 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== -str | result +string | result keyword | keyword text | text |=== diff --git a/docs/reference/esql/functions/values.asciidoc b/docs/reference/esql/functions/values.asciidoc new file mode 100644 index 0000000000000..9707180058e24 --- /dev/null +++ b/docs/reference/esql/functions/values.asciidoc @@ -0,0 +1,38 @@ +[discrete] +[[esql-agg-values]] +=== `VALUES` + +preview::["Do not use `VALUES` on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +*Syntax* + +[source,esql] +---- +VALUES(expression) +---- + +`expression`:: +Expression of any type except `geo_point`, `cartesian_point`, or `geo_shape`. + +*Description* + +Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. +If you need the values returned in order use <>. + +WARNING: This can use a significant amount of memory and ES|QL doesn't yet + grow aggregations beyond memory. So this aggregation will work until + it is used to collect more values than can fit into memory. Once it + collects too many values it will fail the query with + a <>. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=values-grouped] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=values-grouped-result] +|=== + diff --git a/docs/reference/esql/source-commands/show.asciidoc b/docs/reference/esql/source-commands/show.asciidoc index 1913c60660b93..298ea5d8f92b9 100644 --- a/docs/reference/esql/source-commands/show.asciidoc +++ b/docs/reference/esql/source-commands/show.asciidoc @@ -12,7 +12,7 @@ SHOW item *Parameters* `item`:: -Can be `INFO` or experimental:[] `FUNCTIONS`. +Can only be `INFO`. *Description* @@ -20,16 +20,15 @@ The `SHOW` source command returns information about the deployment and its capabilities: * Use `SHOW INFO` to return the deployment's version, build date and hash. -* Use experimental:[] `SHOW FUNCTIONS` to return a list of all supported functions and a -synopsis of each function. *Examples* -[source.merge.styled,esql] +[source,esql] ---- -include::{esql-specs}/show.csv-spec[tag=showFunctionsFiltered] +SHOW INFO ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/show.csv-spec[tag=showFunctionsFiltered-result] + version | date | hash +8.13.0 |2024-02-23T10:04:18.123117961Z|04ba8c8db2507501c88f215e475de7b0798cb3b3 |=== diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 850b4ef1b10b0..5b693f51d65da 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -16,23 +16,25 @@ own model, use the <>. [[delete-inference-api-request]] ==== {api-request-title} -`DELETE /_inference/` -`DELETE /_inference//` +`DELETE /_inference/` + +`DELETE /_inference//` [discrete] [[delete-inference-api-prereqs]] ==== {api-prereq-title} -* Requires the `manage` <>. +* Requires the `manage_inference` <> +(the built-in `inference_admin` role grants this privilege) [discrete] [[delete-inference-api-path-params]] ==== {api-path-parms-title} -:: +:: (Required, string) -The unique identifier of the {infer} model to delete. +The unique identifier of the {infer} endpoint to delete. :: (Optional, string) diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 176909bc5458f..1a11904a169ca 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -18,17 +18,18 @@ own model, use the <>. `GET /_inference/_all` -`GET /_inference/` +`GET /_inference/` `GET /_inference//_all` -`GET /_inference//` +`GET /_inference//` [discrete] [[get-inference-api-prereqs]] ==== {api-prereq-title} -* Requires the `manage` <>. +* Requires the `monitor_inference` <> +(the built-in `inference_admin` and `inference_user` roles grant this privilege) [discrete] [[get-inference-api-desc]] @@ -46,9 +47,9 @@ and a wildcard expression, [[get-inference-api-path-params]] ==== {api-path-parms-title} -``:: +``:: (Optional, string) -The unique identifier of the {infer} model. +The unique identifier of the {infer} endpoint. ``:: @@ -76,7 +77,7 @@ The API returns the following response: [source,console-result] ------------------------------------------------------------ { - "model_id": "my-elser-model", + "inference_id": "my-elser-model", "task_type": "sparse_embedding", "service": "elser", "service_settings": { diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 4fb6ea5a4fb6d..e4cbd26904271 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -16,23 +16,24 @@ own model, use the <>. [[post-inference-api-request]] ==== {api-request-title} -`POST /_inference/` -`POST /_inference//` +`POST /_inference/` + +`POST /_inference//` [discrete] [[post-inference-api-prereqs]] ==== {api-prereq-title} -* Requires the `manage` <>. - +* Requires the `monitor_inference` <> +(the built-in `inference_admin` and `inference_user` roles grant this privilege) [discrete] [[post-inference-api-desc]] ==== {api-description-title} -The perform {infer} API enables you to use {infer} models to perform specific -tasks on data that you provide as an input. The API returns a response with the +The perform {infer} API enables you to use {ml} models to perform specific tasks +on data that you provide as an input. The API returns a response with the resutls of the tasks. The {infer} model you use can perform one specific task that has been defined when the model was created with the <>. @@ -41,9 +42,9 @@ that has been defined when the model was created with the <>. [[post-inference-api-path-params]] ==== {api-path-parms-title} -``:: +``:: (Required, string) -The unique identifier of the {infer} model. +The unique identifier of the {infer} endpoint. ``:: diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 2c0d4d38548bb..c0b9d508e13c3 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -25,14 +25,15 @@ or if you want to use non-NLP models, use the <>. [[put-inference-api-prereqs]] ==== {api-prereq-title} -* Requires the `manage` <>. +* Requires the `manage_inference` <> +(the built-in `inference_admin` role grants this privilege) [discrete] [[put-inference-api-desc]] ==== {api-description-title} -The create {infer} API enables you to create and configure an {infer} model to +The create {infer} API enables you to create and configure a {ml} model to perform a specific {infer} task. The following services are available through the {infer} API: @@ -49,9 +50,9 @@ The following services are available through the {infer} API: ==== {api-path-parms-title} -``:: +``:: (Required, string) -The unique identifier of the model. +The unique identifier of the {infer} endpoint. ``:: (Required, string) @@ -100,6 +101,7 @@ the same name and the updated API key. (Optional, string) Specifies the types of embeddings you want to get back. Defaults to `float`. Valid values are: + * `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). * `float`: use it for the default float embeddings. * `int8`: use it for signed int8 embeddings. @@ -161,6 +163,12 @@ creating the {infer} model, you cannot change the associated API key. If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. +`model_id`::: +(Optional, string) +The name of the model to use for the {infer} task. Refer to the +https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] +for the list of available text embedding models. + `organization_id`::: (Optional, string) The unique identifier of your organization. You can find the Organization ID in @@ -214,13 +222,6 @@ Valid values are: * `search`: use it for storing embeddings of search queries run against a vector data base to find relevant documents. -`model`::: -(Optional, string) -For `openai` sevice only. The name of the model to use for the {infer} task. Refer -to the -https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] -for the list of available text embedding models. - `truncate`::: (Optional, string) For `cohere` service only. Specifies how the API handles inputs longer than the @@ -245,7 +246,7 @@ This section contains example API calls for every service type. [[inference-example-cohere]] ===== Cohere service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `cohere_embeddings` to perform a `text_embedding` task type. [source,console] @@ -255,10 +256,8 @@ PUT _inference/text_embedding/cohere-embeddings "service": "cohere", "service_settings": { "api_key": "", - "model": "embed-english-light-v3.0", - "embedding_type": "int8" - }, - "task_settings": { + "model_id": "embed-english-light-v3.0", + "embedding_type": "byte" } } ------------------------------------------------------------ @@ -269,7 +268,7 @@ PUT _inference/text_embedding/cohere-embeddings [[inference-example-e5]] ===== E5 via the elasticsearch service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `my-e5-model` to perform a `text_embedding` task type. [source,console] @@ -294,7 +293,7 @@ further details, refer to the {ml-docs}/ml-nlp-e5.html[E5 model documentation]. [[inference-example-elser]] ===== ELSER service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `my-elser-model` to perform a `sparse_embedding` task type. [source,console] @@ -305,8 +304,7 @@ PUT _inference/sparse_embedding/my-elser-model "service_settings": { "num_allocations": 1, "num_threads": 1 - }, - "task_settings": {} + } } ------------------------------------------------------------ // TEST[skip:TBD] @@ -317,7 +315,7 @@ Example response: [source,console-result] ------------------------------------------------------------ { - "model_id": "my-elser-model", + "inference_id": "my-elser-model", "task_type": "sparse_embedding", "service": "elser", "service_settings": { @@ -334,7 +332,7 @@ Example response: [[inference-example-hugging-face]] ===== Hugging Face service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `hugging-face_embeddings` to perform a `text_embedding` task type. [source,console] @@ -364,7 +362,7 @@ after the endpoint initialization has been finished. [[inference-example-eland]] ===== Models uploaded by Eland via the elasticsearch service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `my-msmarco-minilm-model` to perform a `text_embedding` task type. [source,console] @@ -389,7 +387,7 @@ been [[inference-example-openai]] ===== OpenAI service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `openai_embeddings` to perform a `text_embedding` task type. [source,console] @@ -398,10 +396,8 @@ PUT _inference/text_embedding/openai_embeddings { "service": "openai", "service_settings": { - "api_key": "" - }, - "task_settings": { - "model": "text-embedding-ada-002" + "api_key": "", + "model_id": "text-embedding-ada-002" } } ------------------------------------------------------------ diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index fff736712529a..7ee1face25339 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -75,6 +75,7 @@ markup. Used for identifying named entities. <>:: Used for auto-complete suggestions. <>:: `text`-like type for as-you-type completion. +<>:: <>:: A count of tokens in a text. @@ -178,6 +179,8 @@ include::types/rank-features.asciidoc[] include::types/search-as-you-type.asciidoc[] +include::types/semantic-text.asciidoc[] + include::types/shape.asciidoc[] include::types/sparse-vector.asciidoc[] diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc new file mode 100644 index 0000000000000..51d7693db12aa --- /dev/null +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -0,0 +1,8 @@ +[role="xpack"] +[[semantic-text]] +=== Semantic text field type +++++ +Semantic text +++++ + +The documentation page for the `semantic_text` field type. \ No newline at end of file diff --git a/docs/reference/mapping/types/text.asciidoc b/docs/reference/mapping/types/text.asciidoc index 2a6e2f3ef8ae8..c33af69df5607 100644 --- a/docs/reference/mapping/types/text.asciidoc +++ b/docs/reference/mapping/types/text.asciidoc @@ -133,8 +133,11 @@ The following parameters are accepted by `text` fields: <>:: Whether the field value should be stored and retrievable separately from - the <> field. Accepts `true` or `false` - (default). + the <> field. Accepts `true` or `false` (default). + This parameter will be automatically set to `true` for TSDB indices + (indices that have `index.mode` set to `time_series`) + if there is no <> + sub-field that supports synthetic `_source`. <>:: diff --git a/docs/reference/migration/migrate_8_13.asciidoc b/docs/reference/migration/migrate_8_13.asciidoc index c2f431da388f1..c9e726d940b1d 100644 --- a/docs/reference/migration/migrate_8_13.asciidoc +++ b/docs/reference/migration/migrate_8_13.asciidoc @@ -16,5 +16,119 @@ coming::[8.13.0] [[breaking-changes-8.13]] === Breaking changes -There are no breaking changes in {es} 8.13. +The following changes in {es} 8.13 might affect your applications +and prevent them from operating normally. +Before upgrading to 8.13, review these changes and take the described steps +to mitigate the impact. + + +There are no notable breaking changes in {es} 8.13. +But there are some less critical breaking changes. + +[discrete] +[[breaking_813_index_setting_changes]] +==== Index setting changes + +[[change_index_look_ahead_time_index_settings_default_value_from_2_hours_to_30_minutes]] +.Change `index.look_ahead_time` index setting's default value from 2 hours to 30 minutes. +[%collapsible] +==== +*Details* + +Lower the `index.look_ahead_time` index setting's max value from 2 hours to 30 minutes. + +*Impact* + +Documents with @timestamp of 30 minutes or more in the future will be rejected. Before documents with @timestamp of 2 hours or more in the future were rejected. If the previous behaviour should be kept, then update the `index.look_ahead_time` setting to two hours before performing the upgrade. +==== + +[[lower_look_ahead_time_index_settings_max_value]] +.Lower the `look_ahead_time` index setting's max value +[%collapsible] +==== +*Details* + +Lower the `look_ahead_time` index setting's max value from 7 days to 2 hours. + +*Impact* + +Any value between 2 hours and 7 days will be as a look ahead time of 2 hours is defined +==== + +[discrete] +[[breaking_813_rest_api_changes]] +==== REST API changes + +[[esql_grammar_from_metadata_no_longer_requires]] +.ESQL: Grammar - FROM METADATA no longer requires [] +[%collapsible] +==== +*Details* + +Remove [ ] for METADATA option inside FROM command statements + +*Impact* + +Previously to return metadata fields, one had to use square brackets: (eg. 'FROM index [METADATA _index]'). This is no longer needed: the [ ] are dropped and do not have to be specified, thus simplifying the command above to:'FROM index METADATA _index'. +==== + +[[es_ql_remove_project_keyword_from_grammar]] +.ES|QL: remove PROJECT keyword from the grammar +[%collapsible] +==== +*Details* + +Removes the PROJECT keyword (an alias for KEEP) from ES|QL grammar + +*Impact* + +Before this change, users could use PROJECT as an alias for KEEP in ESQL queries, (eg. 'FROM idx | PROJECT name, surname') the parser replaced PROJECT with KEEP, emitted a warning: 'PROJECT command is no longer supported, please use KEEP instead' and the query was executed normally. With this change, PROJECT command is no longer recognized by the query parser; queries using PROJECT command now return a parsing exception. +==== + +[[esql_remove_nan_finite_infinite]] +.[ESQL] Remove is_nan, is_finite, and `is_infinite` +[%collapsible] +==== +*Details* + +Removes the functions `is_nan`, `is_finite`, and `is_infinite`. + +*Impact* + +Attempting to use the above functions will now be a planner time error. These functions are no longer supported. +==== + + +[discrete] +[[deprecated-8.13]] +=== Deprecations + +The following functionality has been deprecated in {es} 8.13 +and will be removed in a future version. +While this won't have an immediate impact on your applications, +we strongly encourage you to take the described steps to update your code +after upgrading to 8.13. + +To find out if you are using any deprecated functionality, +enable <>. + +[discrete] +[[deprecations_813_cluster_and_node_setting]] +==== Cluster and node setting deprecations + +[[deprecate_client_type]] +.Deprecate `client.type` +[%collapsible] +==== +*Details* + +The node setting `client.type` has been ignored since the node client was removed in 8.0. The setting is now deprecated and will be removed in a future release. + +*Impact* + +Remove the `client.type` setting from `elasticsearch.yml` +==== + +[discrete] +[[deprecations_813_rest_api]] +==== REST API deprecations + +[[desirednode_deprecate_node_version_field_make_it_optional_for_current_version]] +.`DesiredNode:` deprecate `node_version` field and make it optional for the current version +[%collapsible] +==== +*Details* + +The desired_node API includes a `node_version` field to perform validation on the new node version required. This kind of check is too broad, and it's better done by external logic, so it has been removed, making the `node_version` field not necessary. The field will be removed in a later version. + +*Impact* + +Users should update their usages of `desired_node` to not include the `node_version` field anymore. +==== diff --git a/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc b/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc index f55ab207a2689..b904508d20f3d 100644 --- a/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc @@ -54,19 +54,17 @@ image::images/ml-annotations.png["Delayed data annotations in the Single Metric [IMPORTANT] ==== -As the `doc_count` from an aggregation is compared with the -bucket results of the job, the delayed data check will not work correctly in the -following cases: +The delayed data check will not work correctly in the following cases: -* if the datafeed uses aggregations and the job's `analysis_config` does not have its -`summary_count_field_name` set to `doc_count`, -* if the datafeed is _not_ using aggregations and `summary_count_field_name` is set to -any value. +* if the {dfeed} uses aggregations that filter data, +* if the {dfeed} uses aggregations and the job's `analysis_config` does not have +its `summary_count_field_name` set to `doc_count`, +* if the {dfeed} is _not_ using aggregations and `summary_count_field_name` is +set to any value. -If the datafeed is using aggregations then it's highly likely that the job's -`summary_count_field_name` should be set to `doc_count`. If -`summary_count_field_name` is set to any value other than `doc_count`, the -delayed data check for the datafeed must be disabled. +If the datafeed is using aggregations, set the job's `summary_count_field_name` +to `doc_count`. If `summary_count_field_name` is set to any value other than +`doc_count`, the delayed data check for the datafeed must be disabled. ==== There is another tool for visualizing the delayed data on the *Annotations* tab in the {anomaly-detect} job management page: diff --git a/docs/reference/modules/discovery/discovery.asciidoc b/docs/reference/modules/discovery/discovery.asciidoc index a665a401ebab2..2311fa738fc1a 100644 --- a/docs/reference/modules/discovery/discovery.asciidoc +++ b/docs/reference/modules/discovery/discovery.asciidoc @@ -115,7 +115,7 @@ supplied in `unicast_hosts.txt`. The `unicast_hosts.txt` file contains one node entry per line. Each node entry consists of the host (host name or IP address) and an optional transport port -number. If the port number is specified, is must come immediately after the +number. If the port number is specified, it must come immediately after the host (on the same line) separated by a `:`. If the port number is not specified, {es} will implicitly use the first port in the port range given by `transport.profiles.default.port`, or by `transport.port` if diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index ec60b2bca37e4..8a42d11f6367a 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -68,8 +68,8 @@ A node that has the `master` role, which makes it eligible to be <>:: -A node that has the `data` role. Data nodes hold data and perform data -related operations such as CRUD, search, and aggregations. A node with the `data` role can fill any of the specialised data node roles. +A node that has one of several data roles. Data nodes hold data and perform data +related operations such as CRUD, search, and aggregations. A node with a generic `data` role can fill any of the specialized data node roles. <>:: @@ -220,7 +220,7 @@ therefore ensure that the storage and networking available to the nodes in your cluster are good enough to meet your performance goals. [[data-node]] -==== Data node +==== Data nodes Data nodes hold the shards that contain the documents you have indexed. Data nodes handle data related operations like CRUD, search, and aggregations. @@ -230,20 +230,27 @@ monitor these resources and to add more data nodes if they are overloaded. The main benefit of having dedicated data nodes is the separation of the master and data roles. -To create a dedicated data node, set: +In a multi-tier deployment architecture, you use specialized data roles to +assign data nodes to specific tiers: `data_content`,`data_hot`, `data_warm`, +`data_cold`, or `data_frozen`. A node can belong to multiple tiers. + +If you want to include a node in all tiers, or if your cluster does not use multiple tiers, then you can use the generic `data` role. + +WARNING: If you assign a node to a specific tier using a specialized data role, then you shouldn't also assign it the generic `data` role. The generic `data` role takes precedence over specialized data roles. + +[[generic-data-node]] +===== Generic data node + +Generic data nodes are included in all content tiers. + +To create a dedicated generic data node, set: [source,yaml] ---- node.roles: [ data ] ---- -In a multi-tier deployment architecture, you use specialized data roles to -assign data nodes to specific tiers: `data_content`,`data_hot`, `data_warm`, -`data_cold`, or `data_frozen`. A node can belong to multiple tiers, but a node -that has one of the specialized data roles cannot have the generic `data` role. - -[role="xpack"] [[data-content-node]] -==== Content data node +===== Content data node Content data nodes are part of the content tier. include::{es-repo-dir}/datatiers.asciidoc[tag=content-tier] @@ -254,9 +261,8 @@ To create a dedicated content node, set: node.roles: [ data_content ] ---- -[role="xpack"] [[data-hot-node]] -==== Hot data node +===== Hot data node Hot data nodes are part of the hot tier. include::{es-repo-dir}/datatiers.asciidoc[tag=hot-tier] @@ -267,9 +273,8 @@ To create a dedicated hot node, set: node.roles: [ data_hot ] ---- -[role="xpack"] [[data-warm-node]] -==== Warm data node +===== Warm data node Warm data nodes are part of the warm tier. include::{es-repo-dir}/datatiers.asciidoc[tag=warm-tier] @@ -280,9 +285,8 @@ To create a dedicated warm node, set: node.roles: [ data_warm ] ---- -[role="xpack"] [[data-cold-node]] -==== Cold data node +===== Cold data node Cold data nodes are part of the cold tier. include::{es-repo-dir}/datatiers.asciidoc[tag=cold-tier] @@ -293,9 +297,8 @@ To create a dedicated cold node, set: node.roles: [ data_cold ] ---- -[role="xpack"] [[data-frozen-node]] -==== Frozen data node +===== Frozen data node Frozen data nodes are part of the frozen tier. include::{es-repo-dir}/datatiers.asciidoc[tag=frozen-tier] diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index 5b7d4f90f98de..2ef183374f167 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -1,8 +1,443 @@ [[release-notes-8.13.0]] == {es} version 8.13.0 -coming[8.13.0] - Also see <>. +[[breaking-8.13.0]] +[float] +=== Breaking changes + +ES|QL:: +* ESQL: Grammar - FROM METADATA no longer requires [] {es-pull}105221[#105221] +* ES|QL: remove PROJECT keyword from the grammar {es-pull}105064[#105064] +* [ESQL] Remove is_nan, is_finite, and `is_infinite` {es-pull}104091[#104091] + +TSDB:: +* Change `index.look_ahead_time` index setting's default value from 2 hours to 30 minutes. {es-pull}103898[#103898] +* Lower the `look_ahead_time` index setting's max value from 7 days to 2 hours. {es-pull}103434[#103434] + +[[bug-8.13.0]] +[float] +=== Bug fixes + +Aggregations:: +* Disable parallel collection for terms aggregation with `min_doc_count` equals to 0 {es-pull}106156[#106156] +* `GlobalOrdCardinalityAggregator` should use `HyperLogLogPlusPlus` instead of `HyperLogLogPlusPlusSparse` {es-pull}105546[#105546] + +Allocation:: +* Fix disk computation when initializing new shards {es-pull}102879[#102879] +* Fix disk computation when initializing unassigned shards in desired balance computation {es-pull}102207[#102207] + +Application:: +* Fix Search Applications bug where deleting an alias before deleting an application intermittently caused errors {es-pull}106329[#106329] +* Use search to determine if cluster contains data {es-pull}103920[#103920] +* [Connector API] Bugfix: support list type in filtering advenced snippet value {es-pull}105633[#105633] +* [Connector API] Fix default ordering in `SyncJob` list endpoint {es-pull}105945[#105945] +* [Connector API] Fix serialisation of script params in connector index service {es-pull}106060[#106060] + +Authentication:: +* Execute SAML authentication on the generic threadpool {es-pull}105232[#105232] (issue: {es-issue}104962[#104962]) + +Authorization:: +* Adjust interception of requests for specific shard IDs {es-pull}101656[#101656] + +Client:: +* Validate settings in `ReloadSecureSettings` API {es-pull}103176[#103176] + +Data streams:: +* Apm-data: fix `@custom` component templates {es-pull}104182[#104182] +* Avoid false-positive matches on intermediate objects in `ecs@mappings` {es-pull}105440[#105440] (issue: {es-issue}102794[#102794]) +* Execute lazy rollover with an internal dedicated user #104732 {es-pull}104905[#104905] (issue: {es-issue}104732[#104732]) +* Fix write index resolution when an alias is pointing to a TSDS {es-pull}104440[#104440] (issue: {es-issue}104189[#104189]) +* x-pack/plugin/core: add `match_mapping_type` to `ecs@mappings` dynamic templates {es-pull}103035[#103035] + +Distributed:: +* Fix logger Strings.format calls {es-pull}104573[#104573] +* Request indexing memory pressure in APM node metrics publisher {es-pull}103520[#103520] + +ES|QL:: +* ESQL: Add single value checks on LIKE/RLIKE pushdown {es-pull}103807[#103807] (issue: {es-issue}103806[#103806]) +* ESQL: Correct out-of-range filter pushdowns {es-pull}99961[#99961] (issue: {es-issue}99960[#99960]) +* ESQL: Fix Analyzer to not interpret escaped * as a pattern {es-pull}105325[#105325] (issue: {es-issue}104955[#104955]) +* ESQL: Fix a bug loading unindexed text fields {es-pull}104553[#104553] +* ESQL: Fix bug in grammar that allowed spaces inside id pattern {es-pull}105476[#105476] (issue: {es-issue}105441[#105441]) +* ESQL: Fix replacement of nested expressions in aggs with multiple parameters {es-pull}104718[#104718] (issue: {es-issue}104706[#104706]) +* ESQL: Fix wrong attribute shadowing in pushdown rules {es-pull}105650[#105650] (issue: {es-issue}105434[#105434]) +* ESQL: Improve pushdown of certain filters {es-pull}103538[#103538] (issue: {es-issue}103536[#103536]) +* ESQL: allow `null` in date math {es-pull}103610[#103610] (issue: {es-issue}103085[#103085]) +* ESQL: make `cidr_match` foldable {es-pull}105403[#105403] (issue: {es-issue}105376[#105376]) +* ES|QL: Disable optimizations that rely on Expression.nullable() {es-pull}105691[#105691] +* ES|QL: Improve type validation in aggs for UNSIGNED_LONG better support for VERSION {es-pull}104911[#104911] (issue: {es-issue}102961[#102961]) +* ES|QL: better management of exact subfields for TEXT fields {es-pull}103510[#103510] (issue: {es-issue}99899[#99899]) +* Fix error on sorting unsortable `geo_point` and `cartesian_point` {es-pull}106351[#106351] (issue: {es-issue}106007[#106007]) +* For empty mappings use a `LocalRelation` {es-pull}105081[#105081] (issue: {es-issue}104809[#104809]) +* Resume driver when failing to fetch pages {es-pull}106392[#106392] (issue: {es-issue}106262[#106262]) +* Review KEEP logic to prevent duplicate column names {es-pull}103316[#103316] +* `ProjectOperator` should not retain references to released blocks {es-pull}105848[#105848] + +Engine:: +* Consider currently refreshing data in the memory usage of refresh {es-pull}104122[#104122] +* Release `TranslogSnapshot` buffer after iteration {es-pull}106398[#106398] (issue: {es-issue}106390[#106390]) + +Health:: +* Make Health API more resilient to multi-version clusters {es-pull}105789[#105789] (issue: {es-issue}90183[#90183]) +* Stop the periodic health logger when es is stopping {es-pull}105272[#105272] + +ILM+SLM:: +* Remove `hashCode` and `equals` from `OperationModeUpdateTask` {es-pull}104265[#104265] (issue: {es-issue}100871[#100871]) +* [ILM] Delete step deletes data stream with only one index {es-pull}105772[#105772] + +Indices APIs:: +* Fix `require_alias` implicit true value on presence {es-pull}104099[#104099] (issue: {es-issue}103945[#103945]) + +Infra/CLI:: +* Fix server cli to always pass through exit code {es-pull}104943[#104943] + +Infra/Core:: +* Do not enable APM agent 'instrument', it's not required for manual tracing {es-pull}105055[#105055] +* Fix bogus assertion tripped by force-executed tasks {es-pull}104581[#104581] (issue: {es-issue}104580[#104580]) +* Metrics: Allow `AsyncCounters` to switch providers {es-pull}103025[#103025] +* Metrics: Handle null observations in observers {es-pull}103091[#103091] + +Infra/Node Lifecycle:: +* Close rather than stop `HttpServerTransport` on shutdown {es-pull}102759[#102759] (issue: {es-issue}102501[#102501]) + +Ingest Node:: +* Add stable `ThreadPool` constructor to `LogstashInternalBridge` {es-pull}105163[#105163] +* Adding `executedPipelines` to the `IngestDocument` copy constructor {es-pull}105427[#105427] +* Revert "x-pack/plugin/apm-data: download geoip DB on pipeline creation" {es-pull}104505[#104505] +* X-pack/plugin/apm-data: fix `@custom` pipeline support {es-pull}104113[#104113] + +Machine Learning:: +* Allow GET inference models by user a with read only permission {es-pull}105346[#105346] +* Avoid computing `currentInferenceProcessors` on every cluster state {es-pull}106057[#106057] +* Catch all the potential exceptions in the ingest processor code {es-pull}105391[#105391] +* Changed system auditor to use levels {es-pull}105429[#105429] +* During ML maintenance, reset jobs in the reset state without a corresponding task {es-pull}106062[#106062] +* Fix `categorize_text` aggregation nested under empty buckets {es-pull}105987[#105987] (issue: {es-issue}105836[#105836]) +* Fix resetting a job if the original reset task no longer exists. {es-pull}106020[#106020] +* Retry updates to model snapshot ID on job config {es-pull}104077[#104077] +* The OpenAI model parameter should be in service settings not task settings. Move the configuration field to service settings {es-pull}105458[#105458] +* Undeploy elser when inference model deleted {es-pull}104230[#104230] + +Mapping:: +* Fix parsing of flattened fields within subobjects: false {es-pull}105373[#105373] + +Network:: +* Fix use-after-free at event-loop shutdown {es-pull}105486[#105486] + +Search:: +* Correct profiled rewrite time for knn with a pre-filter {es-pull}104150[#104150] +* Force execution of `SearchService.Reaper` {es-pull}106544[#106544] (issue: {es-issue}106543[#106543]) +* Move `TransportTermsEnumAction` coordination off transport threads {es-pull}104408[#104408] +* Remove `SearchException` usages without a proper status code {es-pull}105150[#105150] +* Require the name field for `inner_hits` for collapse {es-pull}104666[#104666] +* add validation on _id field when upsert new doc {es-pull}103399[#103399] (issue: {es-issue}102981[#102981]) + +Security:: +* Revert "Validate settings in `ReloadSecureSettings` API" {es-pull}103310[#103310] + +Snapshot/Restore:: +* Do not record s3 http request time when it is not available {es-pull}105103[#105103] +* `URLRepository` should not block shutdown {es-pull}105588[#105588] + +TLS:: +* Respect --pass option in certutil csr mode {es-pull}106105[#106105] + +Transform:: +* Fix `_reset` API when called with `force=true` on a failed transform {es-pull}106574[#106574] (issue: {es-issue}106573[#106573]) +* Fix a bug where destination index aliases are not set up for an unattended transform {es-pull}105499[#105499] +* Remove duplicate checkpoint audits {es-pull}105164[#105164] (issue: {es-issue}105106[#105106]) +* Return results in order {es-pull}105089[#105089] (issue: {es-issue}104847[#104847]) +* Use deduced mappings for determining proper fields' format even if `deduce_mappings==false` {es-pull}103682[#103682] (issue: {es-issue}103115[#103115]) + +Vector Search:: +* Fix bug when nested knn pre-filter might match nested docs {es-pull}105994[#105994] + +Watcher:: +* Handling exceptions on watcher reload {es-pull}105442[#105442] (issue: {es-issue}69842[#69842]) + +[[deprecation-8.13.0]] +[float] +=== Deprecations + +Distributed:: +* `DesiredNode:` deprecate `node_version` field and make it optional (unused) in current parser {es-pull}104209[#104209] + +Infra/Core:: +* Deprecate `client.type` {es-pull}104574[#104574] + +[[enhancement-8.13.0]] +[float] +=== Enhancements + +Aggregations:: +* Add index mapping parameter for `counted_keyword` {es-pull}103646[#103646] +* Introduce an `AggregatorReducer` to reduce the footprint of aggregations in the coordinating node {es-pull}105207[#105207] +* Release resources in `BestBucketsDeferringCollector` earlier {es-pull}104893[#104893] +* Support sampling in `counted_terms` aggregation {es-pull}103846[#103846] + +Allocation:: +* Account for reserved disk size {es-pull}103903[#103903] +* Derive expected replica size from primary {es-pull}102078[#102078] + +Application:: +* Add serverless scopes for Connector APIs {es-pull}104063[#104063] +* [Connector API] Change required privileges to indices:data/read(write) {es-pull}105289[#105289] +* [Connector API] Implement update `index_name` action {es-pull}104648[#104648] +* [Connector API] Support filtering by name, index name in list action {es-pull}105131[#105131] +* [Connector API] Support filtering connectors by service type and a query {es-pull}105178[#105178] +* [Connector API] Support updating configuration values only {es-pull}105249[#105249] +* [Connectors API] Add new field `api_key_secret_id` to Connector {es-pull}104982[#104982] +* [Connectors API] Implement connector status update action {es-pull}104750[#104750] +* [Connectors API] Implement update native action endpoint {es-pull}104654[#104654] +* [Connectors API] Implement update service type action {es-pull}104643[#104643] +* [Connectors API] Relax strict response parsing for get/list operations {es-pull}104909[#104909] +* [Profiling] Extract properties faster from source {es-pull}104356[#104356] +* [Profiling] Mark all templates as managed {es-pull}103783[#103783] +* [Profiling] Speed up processing of stacktraces {es-pull}104674[#104674] +* [Profiling] Support downsampling of generic events {es-pull}104730[#104730] +* [Profiling] Use shard request cache consistently {es-pull}103643[#103643] + +Authentication:: +* Expose API key authentication metrics {es-pull}103178[#103178] +* Expose realms authentication metrics {es-pull}104200[#104200] +* Expose service account authentication metrics {es-pull}104043[#104043] +* Expose token authentication metrics {es-pull}104142[#104142] +* Hot-reloadable LDAP bind password {es-pull}104320[#104320] +* Support of `match` for the Query API Key API {es-pull}104594[#104594] + +Authorization:: +* [Security Solution] Allow write permission for `kibana_system` role on endpoint response index {es-pull}103555[#103555] + +CRUD:: +* Avoid wrapping searchers multiple times in mget {es-pull}104227[#104227] (issue: {es-issue}85069[#85069]) + +Client:: +* Add rest spec for Query User API {es-pull}104529[#104529] + +Cluster Coordination:: +* Add troubleshooting docs link to `PeerFinder` logs {es-pull}104787[#104787] +* Report current master in `PeerFinder` {es-pull}104396[#104396] + +Data streams:: +* Introduce lazy rollover for mapping updates in data streams {es-pull}103309[#103309] (issue: {es-issue}89346[#89346]) +* Use new `ignore_dynamic_beyond_limit` in logs and metric data streams {es-pull}105180[#105180] +* X-pack/plugin/apm-data: add dynamic setting for enabling template registry {es-pull}104386[#104386] (issue: {es-issue}104385[#104385]) +* X-pack/plugin/core: rename `double_metrics` template {es-pull}103033[#103033] +* x-pack/plugin/apm-data: Add a new field transaction.profiler_stack_trace_ids to traces-apm@mappings.yaml {es-pull}105223[#105223] +* x-pack/plugin/apm-data: Map some APM fields as flattened and fix error.grouping_name script {es-pull}103032[#103032] +* x-pack/plugin/core: make automatic rollovers lazy {es-pull}105273[#105273] (issue: {es-issue}104083[#104083]) + +Discovery-Plugins:: +* Set read timeout for fetching IMDSv2 token {es-pull}104407[#104407] (issue: {es-issue}104244[#104244]) + +Downsampling:: +* Support patch transport version from 8.12 {es-pull}104406[#104406] + +ES|QL:: +* Add ES|QL async delete API {es-pull}103628[#103628] +* Avoid humongous blocks {es-pull}103340[#103340] +* ESQL: Add TO_UPPER and TO_LOWER functions {es-pull}104309[#104309] +* ESQL: Add option to drop null fields {es-pull}102428[#102428] +* ESQL: Add plan consistency verification after each optimizer {es-pull}105371[#105371] +* ESQL: Check field exists before load from `_source` {es-pull}103632[#103632] +* ESQL: Delay finding field load infrastructure {es-pull}103821[#103821] +* ESQL: Expand shallow copy with vecs {es-pull}103681[#103681] (issue: {es-issue}100528[#100528]) +* ESQL: Extend STATS command to support aggregate expressions {es-pull}104958[#104958] +* ESQL: Infer not null for aggregated fields {es-pull}103673[#103673] (issue: {es-issue}102787[#102787]) +* ESQL: Nested expressions inside stats command {es-pull}104387[#104387] (issue: {es-issue}99828[#99828]) +* ESQL: Pre-allocate rows in TopNOperator {es-pull}104796[#104796] +* ESQL: Referencing expressions that contain backticks requires <>. {es-pull}100740[#100740] (issue: {es-issue}100312[#100312]) +* ESQL: Simpify IS NULL/IS NOT NULL evaluation {es-pull}103099[#103099] (issue: {es-issue}103097[#103097]) +* ESQL: Speed up reading many nulls {es-pull}105088[#105088] +* ESQL: Support loading shapes from source into WKB blocks {es-pull}104269[#104269] +* ESQL: Track the rest of `DocVector` {es-pull}103727[#103727] +* ESQL: `MV_FIRST` and `MV_LAST` {es-pull}103928[#103928] +* ESQL: add `date_diff` function {es-pull}104118[#104118] (issue: {es-issue}101942[#101942]) +* ESQL: push down "[text_field] is not null" {es-pull}105593[#105593] +* ES|QL Async Query API {es-pull}103398[#103398] +* Prepare enrich plan to support multi clusters {es-pull}104355[#104355] +* Reading points from source to reduce precision loss {es-pull}103698[#103698] +* Remove deprecated Block APIs {es-pull}103592[#103592] +* Reserve bytes before serializing page {es-pull}105269[#105269] +* Support ST_CENTROID over spatial points {es-pull}104218[#104218] (issue: {es-issue}104656[#104656]) +* Support cross clusters query in ESQL {es-pull}101640[#101640] +* Support enrich ANY mode in cross clusters query {es-pull}104840[#104840] +* Support enrich coordinator mode {es-pull}104936[#104936] +* Support enrich remote mode {es-pull}104993[#104993] + +Geo:: +* Add support for Well Known Binary (WKB) in the fields API for spatial fields {es-pull}103461[#103461] +* Add the possibility to transform WKT to WKB directly {es-pull}104030[#104030] + +Health:: +* Add APM metrics to `HealthPeriodicLogger` {es-pull}102765[#102765] +* Extend `repository_integrity` health indicator for unknown and invalid repos {es-pull}104614[#104614] (issue: {es-issue}103784[#103784]) + +ILM+SLM:: +* Add "step":"ERROR" to ILM explain response for missing policy {es-pull}103720[#103720] (issue: {es-issue}99030[#99030]) +* Add default rollover conditions to ILM explain API response {es-pull}104721[#104721] (issue: {es-issue}103395[#103395]) +* ILM/SLM history policies forcemerge in hot and dsl configuration {es-pull}103190[#103190] + +Infra/CLI:: +* Add replay diagnostic dir to system jvm options {es-pull}103535[#103535] + +Infra/Circuit Breakers:: +* Lower G1 minimum full GC interval {es-pull}105259[#105259] + +Infra/Core:: +* Adding threadpool metrics {es-pull}102371[#102371] +* ES - document observing with rejections {es-pull}104859[#104859] +* Thread pool metrics {es-pull}104500[#104500] + +Infra/Metrics:: +* Modify name of threadpool metric for rejected {es-pull}105015[#105015] + +Infra/Node Lifecycle:: +* Wait for async searches to finish when shutting down {es-pull}101487[#101487] + +Infra/Transport API:: +* Make `ParentTaskAssigningClient.getRemoteClusterClient` method also return `ParentTaskAssigningClient` {es-pull}100813[#100813] + +Ingest Node:: +* Adding `ActionRequestLazyBuilder` implementation of `RequestBuilder` {es-pull}104927[#104927] +* Adding a `RequestBuilder` interface {es-pull}104778[#104778] +* Adding a custom exception for problems with the graph of pipelines to be applied to a document {es-pull}105196[#105196] +* Improving the performance of the ingest simulate verbose API {es-pull}105265[#105265] +* Ingest geoip processor cache 'no results' from the database {es-pull}104092[#104092] +* Limiting the number of nested pipelines that can be executed {es-pull}105428[#105428] +* Modifying request builders {es-pull}104636[#104636] + +Java Low Level REST Client:: +* Set thread name used by REST client {es-pull}103160[#103160] + +Machine Learning:: +* Add optional pruning configuration (weighted terms scoring) to text expansion query {es-pull}102862[#102862] +* Add text_embedding inference service with multilingual-e5 and custom eland models {es-pull}104949[#104949] +* Add 3 automatic restarts for `pytorch_inference` processes that stop unexpectedly {es-pull}104433[#104433] +* Add support for Cohere inference service {es-pull}104559[#104559] +* Always test for spikes and dips as well as changes in the change point aggregation {es-pull}103922[#103922] +* Apply windowing and chunking to long documents {es-pull}104363[#104363] +* Automatically download the ELSER model when PUT in `_inference` {es-pull}104334[#104334] +* Better handling of number of allocations in pytorch_inference in the case that hardware_concurrency fails {ml-pull}2607[#2607] +* Change detection aggregation improvements {es-pull}102824[#102824] +* Conditionally send the dimensions field as part of the openai requests {es-pull}105299[#105299] (issue: {es-issue}105005[#105005]) +* Endpoint to find positions of Grok pattern matches {es-pull}104394[#104394] +* Ensure unique IDs between inference models and trained model deployments {es-pull}103996[#103996] +* Expose some ML metrics via APM {es-pull}102584[#102584] +* Make `task_type` optional in `_inference` APIs {es-pull}104483[#104483] +* Update `missingTrainedModel` message to include: you may need to create it {es-pull}104155[#104155] +* Upgrade MKL to version 2024.0 on Linux x86_64 {ml-pull}2619[#2619] +* Upgrade PyTorch to version 2.1.2. {ml-pull}2588[#2588] +* Upgrade zlib to version 1.2.13 on Windows {ml-pull}2588[#2588] +* Use Boost.JSON for JSON processing {ml-pull}2614[#2614] +* Validate inference model ids {es-pull}103669[#103669] + + +Mapping:: +* Add `index.mapping.total_fields.ignore_dynamic_beyond_limit` setting to ignore dynamic fields when field limit is reached {es-pull}96235[#96235] +* Make field limit more predictable {es-pull}102885[#102885] + +Network:: +* Prune unnecessary information from TransportNodesStatsAction.NodeStatsRequest {es-pull}102559[#102559] (issue: {es-issue}100878[#100878]) + +Percolator:: +* Return `matched_queries` in Percolator {es-pull}103084[#103084] (issue: {es-issue}10163[#10163]) + +Query Languages:: +* Introduce Alias.unwrap method {es-pull}104575[#104575] + +Search:: +* Dyamically adjust node metrics cache expire {es-pull}104460[#104460] +* Enhancement: Metrics for Search Took Times using Action Listeners {es-pull}104996[#104996] +* Field caps performance pt2 {es-pull}105941[#105941] +* Field-caps field has value lookup use map instead of looping array {es-pull}105770[#105770] +* Flag in `_field_caps` to return only fields with values in index {es-pull}103651[#103651] +* Include better output in profiling & `toString` for automaton based queries {es-pull}105468[#105468] +* Metrics for search latencies {es-pull}102557[#102557] +* Ref count search response bytes {es-pull}103763[#103763] (issue: {es-issue}102657[#102657]) +* Remove leniency in msearch parsing {es-pull}103232[#103232] +* Resolve Cluster API {es-pull}102726[#102726] +* Reuse number field mapper tests in other modules {es-pull}99142[#99142] (issue: {es-issue}92947[#92947]) +* S3 first byte latency metric {es-pull}102435[#102435] +* Update s3 latency metric to use micros {es-pull}103633[#103633] +* Upgrade to Lucene 9.10.0 {es-pull}105578[#105578] + +Security:: +* Add Query Users API {es-pull}104033[#104033] +* Add `ApiKey` expiration time to audit log {es-pull}103959[#103959] +* Add expiration time to update api key api {es-pull}103453[#103453] +* Add stricter validation for api key expiration time {es-pull}103973[#103973] +* Add support for the `simple_query_string` to the Query API Key API {es-pull}104132[#104132] +* Add support for the `type` parameter, for sorting, to the Query API Key API {es-pull}104625[#104625] +* Aggs support for Query API Key Information API {es-pull}104895[#104895] +* Hot-reloadable remote cluster credentials {es-pull}102798[#102798] + +Snapshot/Restore:: +* Add s3 `HeadObject` request to request stats {es-pull}105105[#105105] +* Expose `OperationPurpose` via `CustomQueryParameter` to s3 logs {es-pull}105044[#105044] +* Fix blob cache race, decay, time dependency {es-pull}104784[#104784] +* Pause shard snapshots on graceful shutdown {es-pull}101717[#101717] +* Retry indefinitely for s3 indices blob read errors {es-pull}103300[#103300] + +Store:: +* List hidden shard stores by default {es-pull}103710[#103710] + +TLS:: +* 'elasticsearch-certutil cert' now verifies the issuing chain of the generated certificate {es-pull}103948[#103948] + +TSDB:: +* Improve storage efficiency for non-metric fields in TSDB {es-pull}99747[#99747] +* Introduce experimental pass-through field type {es-pull}103648[#103648] +* Nest pass-through objects within objects {es-pull}105062[#105062] +* Restrict usage of certain aggregations when in sort order execution is required {es-pull}104665[#104665] +* Small time series agg improvement {es-pull}106288[#106288] + +Transform:: +* Allow transforms to use PIT with remote clusters again {es-pull}105192[#105192] (issue: {es-issue}104518[#104518]) +* Transforms: Adding basic stats API param {es-pull}104878[#104878] + +Vector Search:: +* Add new int8_flat and flat vector index types {es-pull}104872[#104872] +* Add support for more than one `inner_hit` when searching nested vectors {es-pull}104006[#104006] +* Making `k` and `num_candidates` optional for knn search {es-pull}101209[#101209] (issue: {es-issue}97533[#97533]) + +[[feature-8.13.0]] +[float] +=== New features + +Data streams:: +* Add `require_data_stream` parameter to indexing requests to enforce indexing operations target a data stream {es-pull}101872[#101872] (issue: {es-issue}97032[#97032]) +* Redirect failed ingest node operations to a failure store when available {es-pull}103481[#103481] + +ES|QL:: +* ESQL: Introduce mode setting for ENRICH {es-pull}103949[#103949] +* ESQL: add =~ operator (case insensitive equality) {es-pull}103656[#103656] + +Health:: +* Create a DSL health indicator as part of the health API {es-pull}103130[#103130] + +Infra/Core:: +* Add gradle tasks and code to modify and access mappings between version ids and release versions {es-pull}103627[#103627] + +Mapping:: +* Add `unmatch_mapping_type`, and support array of types {es-pull}103171[#103171] (issues: {es-issue}102807[#102807], {es-issue}102795[#102795]) + +Search:: +* Added Duplicate Word Check Feature to Analysis Nori {es-pull}103325[#103325] (issue: {es-issue}103321[#103321]) +* [Synonyms] Mark Synonyms as GA {es-pull}103223[#103223] + +[[upgrade-8.13.0]] +[float] +=== Upgrades + +Query Languages:: +* Upgrade ANTLR4 to 4.13.1 {es-pull}105334[#105334] (issue: {es-issue}102953[#102953]) + +Search:: +* Upgrade to Lucene 9.9.0 {es-pull}102782[#102782] +* Upgrade to Lucene 9.9.1 {es-pull}103387[#103387] +* Upgrade to Lucene 9.9.2 {es-pull}104753[#104753] + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 92cd447a48deb..25096779521e4 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -62,6 +62,16 @@ fields that don't have a value. This can be done through the newly added {es-pull}103651[#103651] +[discrete] +[[new_lucene_9_10_release]] +=== New Lucene 9.10 release +- https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. +- https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNSW graph search +- https://github.com/apache/lucene/pull/13033: Range queries on numeric/date/ip fields now exit earlier on segments whose values don't intersect with the query range. This should especially help when there are other required clauses in the `bool` query and when the range filter is narrow, e.g. filtering on the last 5 minutes. +- https://github.com/apache/lucene/pull/13026: `bool` queries that mix `filter` and `should` clauses will now propagate minimum competitive scores through the `should` clauses. This should yield speedups when sorting by descending score. + +{es-pull}105578[#105578] + // end::notable-highlights[] diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 6395c8800bb39..fa0d3babb3a0c 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -44,6 +44,7 @@ not be included yet. * <> * <> * <> +* <> * <> * <> * <> @@ -93,6 +94,7 @@ include::{es-repo-dir}/query-rules/apis/index.asciidoc[] include::{es-repo-dir}/indices/apis/reload-analyzers.asciidoc[] include::{es-repo-dir}/repositories-metering-api/repositories-metering-apis.asciidoc[] include::{es-repo-dir}/rollup/rollup-apis.asciidoc[] +include::{es-repo-dir}/rest-api/root.asciidoc[] include::{es-repo-dir}/scripting/apis/script-apis.asciidoc[] include::{es-repo-dir}/search.asciidoc[] include::{es-repo-dir}/search-application/apis/index.asciidoc[] diff --git a/docs/reference/rest-api/root.asciidoc b/docs/reference/rest-api/root.asciidoc new file mode 100644 index 0000000000000..8821981c2afe3 --- /dev/null +++ b/docs/reference/rest-api/root.asciidoc @@ -0,0 +1,117 @@ +[[rest-api-root]] +== Root API +++++ +Root API +++++ + +The Elasticsearch API's base url returns its basic build, +version, and cluster information. + +[source,console] +-------------------------------------------------- +GET / +-------------------------------------------------- + +[discrete] +[[rest-api-root-prereq]] +=== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the +`monitor`, `manage`, or `all` +<> to use this API. + +[role="child_attributes"] +[discrete] +[[rest-api-root-response-body]] +=== {api-response-body-title} + + +`name` :: +The responding <>. + +`cluster_name` :: +The responding <>. + +`cluster_uuid` :: +The responding Cluster's `uuid` as confirmed by +<>. + +`version` :: +(object) +Contains information about the running version of Elasticsearch. ++ properties of `version` +[%collapsible%open] +==== +`number` :: +Version number of responding +https://www.elastic.co/downloads/past-releases#elasticsearch[Elasticsearch release]. + +`build_flavor` :: +Build flavor, e.g. `default`. + +`build_type` :: +Build type corresponding to how +<, +e.g. `docker`, `rpm`, `tar`. + +`build_hash` :: +Elasticsearch's Git commit's SHA hash. + +`build_date` :: +Elasticsearch's Git commit's date. + +`build_snapshot` :: +If Elasticsearch's build was from a snapshot. + +`lucene_version` :: +Version number of Elasticsearch's +<>. + +`minimum_wire_compatibility_version` :: +Minimum node version with which the responding node can +communicate. Also minimum version from which you can perform +a <>. + +`minimum_index_compatibility_version` :: +Minimum index version with which the responding node can read +from disk. +==== + +[discrete] +[[rest-api-root-response-example]] +=== {api-examples-title} + +The API returns the following response: + +[source,console-result] +---- +{ + "name": "instance-0000000000", + "cluster_name": "my_test_cluster", + "cluster_uuid": "5QaxoN0pRZuOmWSxstBBwQ", + "version": { + "build_date": "2024-02-01T13:07:13.727175297Z", + "minimum_wire_compatibility_version": "7.17.0", + "build_hash": "6185ba65d27469afabc9bc951cded6c17c21e3f3", + "number": "8.12.1", + "lucene_version": "9.9.2", + "minimum_index_compatibility_version": "7.0.0", + "build_flavor": "default", + "build_snapshot": false, + "build_type": "docker" + }, + "tagline": "You Know, for Search" +} +---- +// TESTRESPONSE[s/"name": "instance-0000000000"/"name": "$body.name"/] +// TESTRESPONSE[s/"cluster_name": "my_test_cluster"/"cluster_name": "$body.cluster_name"/] +// TESTRESPONSE[s/"cluster_uuid": "5QaxoN0pRZuOmWSxstBBwQ"/"cluster_uuid": "$body.cluster_uuid"/] +// TESTRESPONSE[s/"build_date": "2024-02-01T13:07:13.727175297Z"/"build_date": "$body.version.build_date"/] +// TESTRESPONSE[s/"minimum_wire_compatibility_version": "7.17.0"/"minimum_wire_compatibility_version": "$body.version.minimum_wire_compatibility_version"/] +// TESTRESPONSE[s/"build_hash": "6185ba65d27469afabc9bc951cded6c17c21e3f3"/"build_hash": "$body.version.build_hash"/] +// TESTRESPONSE[s/"number": "8.12.1"/"number": "$body.version.number"/] +// TESTRESPONSE[s/"lucene_version": "9.9.2"/"lucene_version": "$body.version.lucene_version"/] +// TESTRESPONSE[s/"minimum_index_compatibility_version": "7.0.0"/"minimum_index_compatibility_version": "$body.version.minimum_index_compatibility_version"/] +// TESTRESPONSE[s/"build_flavor": "default"/"build_flavor": "$body.version.build_flavor"/] +// TESTRESPONSE[s/"build_snapshot": false/"build_snapshot": "$body.version.build_snapshot"/] +// TESTRESPONSE[s/"build_type": "docker"/"build_type": "$body.version.build_type"/] diff --git a/docs/reference/rest-api/security/get-builtin-privileges.asciidoc b/docs/reference/rest-api/security/get-builtin-privileges.asciidoc index bd2d21317212b..576a30866dbdf 100644 --- a/docs/reference/rest-api/security/get-builtin-privileges.asciidoc +++ b/docs/reference/rest-api/security/get-builtin-privileges.asciidoc @@ -78,6 +78,7 @@ A successful call returns an object with "cluster" and "index" fields. "manage_enrich", "manage_ilm", "manage_index_templates", + "manage_inference", "manage_ingest_pipelines", "manage_logstash_pipelines", "manage_ml", @@ -99,6 +100,7 @@ A successful call returns an object with "cluster" and "index" fields. "monitor", "monitor_data_frame_transforms", "monitor_enrich", + "monitor_inference", "monitor_ml", "monitor_rollup", "monitor_snapshot", diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 6bf7f2139680b..bbbd73a0523f3 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -228,7 +228,8 @@ GET /_xpack/usage "keep" : 0, "enrich" : 0, "from" : 0, - "row" : 0 + "row" : 0, + "meta" : 0 }, "queries" : { "rest" : { diff --git a/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc b/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc index bf51042d6adec..5af3c997251dd 100644 --- a/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc +++ b/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc @@ -1,22 +1,24 @@ -[cols="^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^"] +[cols="^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^"] |==== -| 16+^h| Remote cluster version +| 17+^h| Remote cluster version h| Local cluster version - | 6.8 | 7.1–7.16 | 7.17 | 8.0 | 8.1 | 8.2 | 8.3 | 8.4 | 8.5 |8.6 |8.7 |8.8 |8.9 |8.10 |8.11 |8.12 -| 6.8 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} -| 7.1–7.16 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} -| 7.17 | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.0 | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.1 | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.2 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.3 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}|{yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.4 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.5 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.6 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.7 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.8 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.9 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.10 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.11 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.12 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon} -|==== \ No newline at end of file + | 6.8 | 7.1–7.16 | 7.17 | 8.0 | 8.1 | 8.2 | 8.3 | 8.4 | 8.5 |8.6 |8.7 |8.8 |8.9 |8.10 |8.11 |8.12 |8.13 +| 6.8 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} +| 7.1–7.16 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} +| 7.17 | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.0 | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.1 | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.2 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.3 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}|{yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.4 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.5 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.6 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.7 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.8 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.9 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.10 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.11 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.12 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.13 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} +|==== + diff --git a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc index 0bee9533cd358..c3eefec86e6f3 100644 --- a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc @@ -135,6 +135,11 @@ a list of relevant text passages. All unique passages, along with their IDs, have been extracted from that data set and compiled into a https://github.com/elastic/stack-docs/blob/main/docs/en/stack/ml/nlp/data/msmarco-passagetest2019-unique.tsv[tsv file]. +IMOPRTANT: The `msmarco-passagetest2019-top1000` dataset was not utilized to +train the model. It is only used in this tutorial as a sample dataset that is +easily accessible for demonstration purposes. You can use a different data set +to test the workflow and become familiar with it. + Download the file and upload it to your cluster using the {kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[Data Visualizer] in the {ml-app} UI. Assign the name `id` to the first column and `content` to @@ -184,10 +189,10 @@ follow the progress. [[text-expansion-query]] ==== Semantic search by using the `text_expansion` query -To perform semantic search, use the `text_expansion` query, and provide the -query text and the ELSER model ID. The example below uses the query text "How to -avoid muscle soreness after running?", the `content_embedding` field contains -the generated ELSER output: +To perform semantic search, use the <>, +and provide the query text and the ELSER model ID. The example below uses the +query text "How to avoid muscle soreness after running?", the `content_embedding` +field contains the generated ELSER output: [source,console] ---- @@ -258,9 +263,9 @@ tokens from source, refer to <> to learn more. [[text-expansion-compound-query]] ==== Combining semantic search with other queries -You can combine `text_expansion` with other queries in a -<>. For example using a filter clause in a -<> or a full text query which may or may not use the same +You can combine <> with other +queries in a <>. For example, use a filter clause +in a <> or a full text query with the same (or different) query text as the `text_expansion` query. This enables you to combine the search results from both queries. diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc index 97a37e34eb116..b5619f8dda7b9 100644 --- a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -23,9 +23,9 @@ include::{es-repo-dir}/tab-widgets/inference-api/infer-api-requirements-widget.a [discrete] [[infer-text-embedding-task]] -==== Create the inference task +==== Create an inference endpoint -Create the {infer} task by using the <>: +Create an {infer} endpoint by using the <>: include::{es-repo-dir}/tab-widgets/inference-api/infer-api-task-widget.asciidoc[] diff --git a/docs/reference/security/authentication/oidc-guide.asciidoc b/docs/reference/security/authentication/oidc-guide.asciidoc index c2112b949c540..bbb37853ca3ac 100644 --- a/docs/reference/security/authentication/oidc-guide.asciidoc +++ b/docs/reference/security/authentication/oidc-guide.asciidoc @@ -22,7 +22,7 @@ The OpenID Connect Provider (OP) is the entity in OpenID Connect that is respons authenticating the user and for granting the necessary tokens with the authentication and user information to be consumed by the Relying Parties. -In order for the Elastic Stack to be able use your OpenID Connect Provider for authentication, +In order for the Elastic Stack to be able to use your OpenID Connect Provider for authentication, a trust relationship needs to be established between the OP and the RP. In the OpenID Connect Provider, this means registering the RP as a client. OpenID Connect defines a dynamic client registration protocol but this is usually geared towards real-time client registration and diff --git a/docs/reference/security/authorization/built-in-roles.asciidoc b/docs/reference/security/authorization/built-in-roles.asciidoc index 14808140873fd..6db08b307f193 100644 --- a/docs/reference/security/authorization/built-in-roles.asciidoc +++ b/docs/reference/security/authorization/built-in-roles.asciidoc @@ -69,6 +69,14 @@ Grants full access to all features in {kib} (including Solutions) and read-only Grants access to manage *all* enrich indices (`.enrich-*`) and *all* operations on ingest pipelines. +[[built-in-roles-inference-admin]] `inference_admin`:: +Provides all of the privileges of the `inference_user` role and the full +use of the {inference} APIs. Grants the `manage_inference` cluster privilege. + +[[built-in-roles-inference-user]] `inference_user`:: +Provides the minimum privileges required to view {inference} configurations +and perform inference. Grants the `monintor_inference` cluster privilege. + [[built-in-roles-ingest-user]] `ingest_admin` :: Grants access to manage *all* index templates and *all* ingest pipeline configurations. + diff --git a/docs/reference/setup/logging-config.asciidoc b/docs/reference/setup/logging-config.asciidoc index 69fa086d67673..f6b0ceb2d6ae6 100644 --- a/docs/reference/setup/logging-config.asciidoc +++ b/docs/reference/setup/logging-config.asciidoc @@ -204,6 +204,13 @@ configuration for other reasons. For example, you may want to send logs for a particular logger to another file. However, these use cases are rare. -- +IMPORTANT: {es}'s application logs are intended for humans to read and +interpret. Different versions of {es} may report information in these logs in +different ways, perhaps adding extra detail, removing unnecessary information, +formatting the same information in different ways, renaming the logger or +adjusting the log level for specific messages. Do not rely on the contents of +the application logs remaining precisely the same between versions. + [discrete] [[deprecation-logging]] === Deprecation logging diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc index a5a1910e8f8ef..39f37f407926e 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc @@ -28,8 +28,8 @@ PUT _ingest/pipeline/cohere_embeddings ] } -------------------------------------------------- -<1> The name of the inference configuration you created by using the -<>. +<1> The name of the inference endpoint you created by using the +<>, it's referred to as `inference_id` in that step. <2> Configuration object that defines the `input_field` for the {infer} process and the `output_field` that will contain the {infer} results. @@ -55,8 +55,8 @@ PUT _ingest/pipeline/openai_embeddings ] } -------------------------------------------------- -<1> The name of the inference configuration you created by using the -<>. +<1> The name of the inference endpoint you created by using the +<>, it's referred to as `inference_id` in that step. <2> Configuration object that defines the `input_field` for the {infer} process and the `output_field` that will contain the {infer} results. diff --git a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc index 1e8470471491f..843c351648c63 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc @@ -8,7 +8,7 @@ GET cohere-embeddings/_search "field": "content_embedding", "query_vector_builder": { "text_embedding": { - "model_id": "cohere_embeddings", + "inference_id": "cohere_embeddings", "model_text": "Muscles in human body" } }, @@ -83,7 +83,7 @@ GET openai-embeddings/_search "field": "content_embedding", "query_vector_builder": { "text_embedding": { - "model_id": "openai_embeddings", + "inference_id": "openai_embeddings", "model_text": "Calculate fuel cost" } }, diff --git a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc index be0319fcf1ec1..dea7511f74566 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc @@ -8,14 +8,13 @@ PUT _inference/text_embedding/cohere_embeddings <1> "service_settings": { "api_key": "", <2> "model_id": "embed-english-v3.0", <3> - "embedding_type": "int8" - }, - "task_settings": { + "embedding_type": "byte" } } ------------------------------------------------------------ // TEST[skip:TBD] -<1> The task type is `text_embedding` in the path. +<1> The task type is `text_embedding` in the path and the `inference_id` which +is the unique identifier of the {infer} endpoint is `cohere_embeddings`. <2> The API key of your Cohere account. You can find your API keys in your Cohere dashboard under the https://dashboard.cohere.com/api-keys[API keys section]. You need to provide @@ -24,6 +23,13 @@ key. <3> The name of the embedding model to use. You can find the list of Cohere embedding models https://docs.cohere.com/reference/embed[here]. +NOTE: When using this model the recommended similarity measure to use in the +`dense_vector` field mapping is `dot_product`. In the case of Cohere models, the +embeddings are normalized to unit length in which case the `dot_product` and +the `cosine` measures are equivalent. + + + // end::cohere[] @@ -37,13 +43,12 @@ PUT _inference/text_embedding/openai_embeddings <1> "service_settings": { "api_key": "", <2> "model_id": "text-embedding-ada-002" <3> - }, - "task_settings": { } } ------------------------------------------------------------ // TEST[skip:TBD] -<1> The task type is `text_embedding` in the path. +<1> The task type is `text_embedding` in the path and the `inference_id` which +is the unique identifier of the {infer} endpoint is `openai_embeddings`. <2> The API key of your OpenAI account. You can find your OpenAI API keys in your OpenAI account under the https://platform.openai.com/api-keys[API keys section]. You need to provide @@ -53,4 +58,9 @@ key. embedding models https://platform.openai.com/docs/guides/embeddings/embedding-models[here]. +NOTE: When using this model the recommended similarity measure to use in the +`dense_vector` field mapping is `dot_product`. In the case of OpenAI models, the +embeddings are normalized to unit length in which case the `dot_product` and +the `cosine` measures are equivalent. + // end::openai[] \ No newline at end of file diff --git a/docs/reference/transform/images/transform-alert-actions.png b/docs/reference/transform/images/transform-alert-actions.png index a78c02fa305cd..e171a5c60cbfb 100644 Binary files a/docs/reference/transform/images/transform-alert-actions.png and b/docs/reference/transform/images/transform-alert-actions.png differ diff --git a/docs/reference/transform/images/transform-check-config.png b/docs/reference/transform/images/transform-check-config.png index 46fad65b5d407..916e7edbb9435 100644 Binary files a/docs/reference/transform/images/transform-check-config.png and b/docs/reference/transform/images/transform-check-config.png differ diff --git a/docs/reference/transform/images/transform-rule.png b/docs/reference/transform/images/transform-rule.png index b2c682cd4ce84..c43dd6c1be929 100644 Binary files a/docs/reference/transform/images/transform-rule.png and b/docs/reference/transform/images/transform-rule.png differ diff --git a/docs/reference/transform/transform-alerts.asciidoc b/docs/reference/transform/transform-alerts.asciidoc index 953018b3b6b02..e3ea82d34ec2e 100644 --- a/docs/reference/transform/transform-alerts.asciidoc +++ b/docs/reference/transform/transform-alerts.asciidoc @@ -18,30 +18,39 @@ refer to You can create {transform} rules under **{stack-manage-app} > {rules-ui}**. -On the *Create rule* window, give a name to the rule and optionally provide +. On the *Create rule* window, give a name to the rule and optionally provide tags. Select the {transform} health rule type: - ++ +-- [role="screenshot"] image::images/transform-rule.png["Creating a transform health rule",500] // NOTE: This is screenshot is automatically generated. Do not edit it directly. +-- -Select the {transform} or {transforms} to include. You can also use a special +. Select the {transform} or {transforms} to include. You can also use a special character (`*`) to apply the rule to all your {transforms}. {transforms-cap} created after the rule are automatically included. -The following health checks are available and enabled by default: - +. The following health checks are available and enabled by default: ++ +-- _{transform-cap} is not started_:: - Notifies if the corresponding {transforms} is not started or it does not index - any data. The notification message recommends the necessary actions to solve - the error. + Get alerts when a {transform} is not started or is not indexing any data. + The notification message recommends the necessary actions to solve the error. -_Errors in {transform} messages_:: - Notifies if {transform} messages contain errors. +_Unhealthy {transform}_:: + Get alerts when a {transform} has an unhealthy status. + The notification message contains status details and related issues. [role="screenshot"] image::images/transform-check-config.png["Selecting health check",500] // NOTE: This is screenshot is automatically generated. Do not edit it directly. +-- + +. Set the check interval, which defines how often to evaluate the rule conditions. + +. In the advanced options, you can change the number of consecutive runs that +must meet the rule conditions before an alert occurs. The default value is 1. As the last step in the rule creation process, define its actions. diff --git a/docs/reference/troubleshooting/troubleshooting-unbalanced-cluster.asciidoc b/docs/reference/troubleshooting/troubleshooting-unbalanced-cluster.asciidoc index ca06ec066c8a8..a1d4f5df9c4f6 100644 --- a/docs/reference/troubleshooting/troubleshooting-unbalanced-cluster.asciidoc +++ b/docs/reference/troubleshooting/troubleshooting-unbalanced-cluster.asciidoc @@ -46,7 +46,7 @@ Some operations such as node restarting, decommissioning, or changing cluster al are disruptive and might require multiple shards to move in order to rebalance the cluster. Shard movement order is not deterministic and mostly determined by the source and target node readiness to move a shard. -While rebalancing is in progress some nodes might appear busier then others. +While rebalancing is in progress some nodes might appear busier than others. When a shard is allocated to an undesired node it uses the resources of the current node instead of the target. This might cause a hotspot (disk or CPU) when multiple shards reside on the current node that have not been diff --git a/docs/reference/watcher/input/search.asciidoc b/docs/reference/watcher/input/search.asciidoc index 9ea7a7d5da728..96ec1e4ad6e5f 100644 --- a/docs/reference/watcher/input/search.asciidoc +++ b/docs/reference/watcher/input/search.asciidoc @@ -194,7 +194,7 @@ accurately. When a search generates a large response, you can use `extract` to select the relevant fields instead of loading the entire response. -| `timeout` | no | 30s | The timeout for waiting for the search api call to return. If no response is +| `timeout` | no | 1m | The timeout for waiting for the search api call to return. If no response is returned within this time, the search input times out and fails. This setting overrides the default search operations timeouts. |====== diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 8978274e6df95..7e2e781d3ce62 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1694,25 +1694,25 @@ - - - + + + - - + + - - - + + + - - + + - - - + + + diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index d64cd4917707c..e6441136f3d4b 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 865f1ba80d1e6..fcbbad6dd644c 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d -distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip +distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/libs/native/build.gradle b/libs/native/build.gradle index 150ca9c8e80f3..bfcba01b4783a 100644 --- a/libs/native/build.gradle +++ b/libs/native/build.gradle @@ -28,11 +28,3 @@ dependencies { tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } - -tasks.named('forbiddenApisMain21').configure { - ignoreMissingClasses = true -} - -tasks.named('forbiddenApisMain22').configure { - ignoreMissingClasses = true -} diff --git a/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaCloseableByteBuffer.java b/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaCloseableByteBuffer.java index e47b17e234705..e987f8042691b 100644 --- a/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaCloseableByteBuffer.java +++ b/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaCloseableByteBuffer.java @@ -15,7 +15,7 @@ import java.nio.ByteBuffer; class JnaCloseableByteBuffer implements CloseableByteBuffer { - private final Memory memory; + final Memory memory; private final ByteBuffer bufferView; JnaCloseableByteBuffer(int len) { diff --git a/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaZstdLibrary.java b/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaZstdLibrary.java index f0581633ea969..f2c4a85c8f2bc 100644 --- a/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaZstdLibrary.java +++ b/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaZstdLibrary.java @@ -10,23 +10,23 @@ import com.sun.jna.Library; import com.sun.jna.Native; +import com.sun.jna.Pointer; +import org.elasticsearch.nativeaccess.CloseableByteBuffer; import org.elasticsearch.nativeaccess.lib.ZstdLibrary; -import java.nio.ByteBuffer; - class JnaZstdLibrary implements ZstdLibrary { private interface NativeFunctions extends Library { long ZSTD_compressBound(int scrLen); - long ZSTD_compress(ByteBuffer dst, int dstLen, ByteBuffer src, int srcLen, int compressionLevel); + long ZSTD_compress(Pointer dst, int dstLen, Pointer src, int srcLen, int compressionLevel); boolean ZSTD_isError(long code); String ZSTD_getErrorName(long code); - long ZSTD_decompress(ByteBuffer dst, int dstLen, ByteBuffer src, int srcLen); + long ZSTD_decompress(Pointer dst, int dstLen, Pointer src, int srcLen); } private final NativeFunctions functions; @@ -41,8 +41,18 @@ public long compressBound(int scrLen) { } @Override - public long compress(ByteBuffer dst, ByteBuffer src, int compressionLevel) { - return functions.ZSTD_compress(dst, dst.remaining(), src, src.remaining(), compressionLevel); + public long compress(CloseableByteBuffer dst, CloseableByteBuffer src, int compressionLevel) { + assert dst instanceof JnaCloseableByteBuffer; + assert src instanceof JnaCloseableByteBuffer; + var nativeDst = (JnaCloseableByteBuffer) dst; + var nativeSrc = (JnaCloseableByteBuffer) src; + return functions.ZSTD_compress( + nativeDst.memory.share(dst.buffer().position()), + dst.buffer().remaining(), + nativeSrc.memory.share(src.buffer().position()), + src.buffer().remaining(), + compressionLevel + ); } @Override @@ -56,7 +66,16 @@ public String getErrorName(long code) { } @Override - public long decompress(ByteBuffer dst, ByteBuffer src) { - return functions.ZSTD_decompress(dst, dst.remaining(), src, src.remaining()); + public long decompress(CloseableByteBuffer dst, CloseableByteBuffer src) { + assert dst instanceof JnaCloseableByteBuffer; + assert src instanceof JnaCloseableByteBuffer; + var nativeDst = (JnaCloseableByteBuffer) dst; + var nativeSrc = (JnaCloseableByteBuffer) src; + return functions.ZSTD_decompress( + nativeDst.memory.share(dst.buffer().position()), + dst.buffer().remaining(), + nativeSrc.memory.share(src.buffer().position()), + src.buffer().remaining() + ); } } diff --git a/libs/native/libraries/build.gradle b/libs/native/libraries/build.gradle index 23d2b6e2219d9..621eb2d625af4 100644 --- a/libs/native/libraries/build.gradle +++ b/libs/native/libraries/build.gradle @@ -57,6 +57,12 @@ def extractLibs = tasks.register('extractLibs', Copy) { filesMatching("win32*/*") { it.path = it.path.replace("win32", "windows") } + filePermissions { + unix("644") + } + dirPermissions { + unix("755") + } } artifacts { diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/CloseableByteBuffer.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/CloseableByteBuffer.java index aa5d94080afa9..6590aff307cc6 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/CloseableByteBuffer.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/CloseableByteBuffer.java @@ -10,7 +10,16 @@ import java.nio.ByteBuffer; +/** + * A wrapper around a native {@link ByteBuffer} which allows that buffer to be + * closed synchronously. This is in contrast to JDK created native buffers + * which are deallocated only after GC has cleaned up references to + * the buffer. + */ public interface CloseableByteBuffer extends AutoCloseable { + /** + * Returns the wrapped {@link ByteBuffer}. + */ ByteBuffer buffer(); @Override diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java index 5b2be93dadc1f..f7019a4fd7a96 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java @@ -35,5 +35,11 @@ static NativeAccess instance() { */ Zstd getZstd(); + /** + * Creates a new {@link CloseableByteBuffer}. The buffer must be used within the same thread + * that it is created. + * @param len the number of bytes the buffer should allocate + * @return the buffer + */ CloseableByteBuffer newBuffer(int len); } diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/Zstd.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/Zstd.java index 6a0d348d5251b..60e65383bf9a2 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/Zstd.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/Zstd.java @@ -25,13 +25,9 @@ public final class Zstd { * Compress the content of {@code src} into {@code dst} at compression level {@code level}, and return the number of compressed bytes. * {@link ByteBuffer#position()} and {@link ByteBuffer#limit()} of both {@link ByteBuffer}s are left unmodified. */ - public int compress(ByteBuffer dst, ByteBuffer src, int level) { + public int compress(CloseableByteBuffer dst, CloseableByteBuffer src, int level) { Objects.requireNonNull(dst, "Null destination buffer"); Objects.requireNonNull(src, "Null source buffer"); - assert dst.isDirect(); - assert dst.isReadOnly() == false; - assert src.isDirect(); - assert src.isReadOnly() == false; long ret = zstdLib.compress(dst, src, level); if (zstdLib.isError(ret)) { throw new IllegalArgumentException(zstdLib.getErrorName(ret)); @@ -45,13 +41,9 @@ public int compress(ByteBuffer dst, ByteBuffer src, int level) { * Compress the content of {@code src} into {@code dst}, and return the number of decompressed bytes. {@link ByteBuffer#position()} and * {@link ByteBuffer#limit()} of both {@link ByteBuffer}s are left unmodified. */ - public int decompress(ByteBuffer dst, ByteBuffer src) { + public int decompress(CloseableByteBuffer dst, CloseableByteBuffer src) { Objects.requireNonNull(dst, "Null destination buffer"); Objects.requireNonNull(src, "Null source buffer"); - assert dst.isDirect(); - assert dst.isReadOnly() == false; - assert src.isDirect(); - assert src.isReadOnly() == false; long ret = zstdLib.decompress(dst, src); if (zstdLib.isError(ret)) { throw new IllegalArgumentException(zstdLib.getErrorName(ret)); diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/ZstdLibrary.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/ZstdLibrary.java index feb1dbe8e3d61..ea4c8efa5318a 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/ZstdLibrary.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/ZstdLibrary.java @@ -8,17 +8,17 @@ package org.elasticsearch.nativeaccess.lib; -import java.nio.ByteBuffer; +import org.elasticsearch.nativeaccess.CloseableByteBuffer; public non-sealed interface ZstdLibrary extends NativeLibrary { long compressBound(int scrLen); - long compress(ByteBuffer dst, ByteBuffer src, int compressionLevel); + long compress(CloseableByteBuffer dst, CloseableByteBuffer src, int compressionLevel); boolean isError(long code); String getErrorName(long code); - long decompress(ByteBuffer dst, ByteBuffer src); + long decompress(CloseableByteBuffer dst, CloseableByteBuffer src); } diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java index d802fd8be7a67..daa012d35598e 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java @@ -11,15 +11,18 @@ import org.elasticsearch.nativeaccess.CloseableByteBuffer; import java.lang.foreign.Arena; +import java.lang.foreign.MemorySegment; import java.nio.ByteBuffer; class JdkCloseableByteBuffer implements CloseableByteBuffer { private final Arena arena; + final MemorySegment segment; private final ByteBuffer bufferView; JdkCloseableByteBuffer(int len) { - this.arena = Arena.ofShared(); - this.bufferView = this.arena.allocate(len).asByteBuffer(); + this.arena = Arena.ofConfined(); + this.segment = arena.allocate(len); + this.bufferView = segment.asByteBuffer(); } @Override diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java index 745b93ac918dd..5313984ac6d61 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java @@ -58,7 +58,7 @@ static String findLibSystemd() { @Override public int sd_notify(int unset_environment, String state) { try (Arena arena = Arena.ofConfined()) { - MemorySegment nativeState = arena.allocateUtf8String(state); + MemorySegment nativeState = MemorySegmentUtil.allocateString(arena, state); return (int) sd_notify$mh.invokeExact(unset_environment, nativeState); } catch (Throwable t) { throw new AssertionError(t); diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkZstdLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkZstdLibrary.java index d193750939b23..e3e972bc19d72 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkZstdLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkZstdLibrary.java @@ -8,12 +8,12 @@ package org.elasticsearch.nativeaccess.jdk; +import org.elasticsearch.nativeaccess.CloseableByteBuffer; import org.elasticsearch.nativeaccess.lib.ZstdLibrary; import java.lang.foreign.FunctionDescriptor; import java.lang.foreign.MemorySegment; import java.lang.invoke.MethodHandle; -import java.nio.ByteBuffer; import static java.lang.foreign.ValueLayout.ADDRESS; import static java.lang.foreign.ValueLayout.JAVA_BOOLEAN; @@ -49,11 +49,17 @@ public long compressBound(int srcLen) { } @Override - public long compress(ByteBuffer dst, ByteBuffer src, int compressionLevel) { - var nativeDst = MemorySegment.ofBuffer(dst); - var nativeSrc = MemorySegment.ofBuffer(src); + public long compress(CloseableByteBuffer dst, CloseableByteBuffer src, int compressionLevel) { + assert dst instanceof JdkCloseableByteBuffer; + assert src instanceof JdkCloseableByteBuffer; + var nativeDst = (JdkCloseableByteBuffer) dst; + var nativeSrc = (JdkCloseableByteBuffer) src; + var dstSize = dst.buffer().remaining(); + var srcSize = src.buffer().remaining(); + var segmentDst = nativeDst.segment.asSlice(dst.buffer().position(), dstSize); + var segmentSrc = nativeSrc.segment.asSlice(src.buffer().position(), srcSize); try { - return (long) compress$mh.invokeExact(nativeDst, dst.remaining(), nativeSrc, src.remaining(), compressionLevel); + return (long) compress$mh.invokeExact(segmentDst, dstSize, segmentSrc, srcSize, compressionLevel); } catch (Throwable t) { throw new AssertionError(t); } @@ -79,11 +85,17 @@ public String getErrorName(long code) { } @Override - public long decompress(ByteBuffer dst, ByteBuffer src) { - var nativeDst = MemorySegment.ofBuffer(dst); - var nativeSrc = MemorySegment.ofBuffer(src); + public long decompress(CloseableByteBuffer dst, CloseableByteBuffer src) { + assert dst instanceof JdkCloseableByteBuffer; + assert src instanceof JdkCloseableByteBuffer; + var nativeDst = (JdkCloseableByteBuffer) dst; + var nativeSrc = (JdkCloseableByteBuffer) src; + var dstSize = dst.buffer().remaining(); + var srcSize = src.buffer().remaining(); + var segmentDst = nativeDst.segment.asSlice(dst.buffer().position(), dstSize); + var segmentSrc = nativeSrc.segment.asSlice(src.buffer().position(), srcSize); try { - return (long) decompress$mh.invokeExact(nativeDst, dst.remaining(), nativeSrc, src.remaining()); + return (long) decompress$mh.invokeExact(segmentDst, dstSize, segmentSrc, srcSize); } catch (Throwable t) { throw new AssertionError(t); } diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java index 53e4c06bf0435..6ac0243c3befe 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java @@ -8,6 +8,7 @@ package org.elasticsearch.nativeaccess.jdk; +import java.lang.foreign.Arena; import java.lang.foreign.MemorySegment; /** @@ -19,5 +20,9 @@ static String getString(MemorySegment segment, long offset) { return segment.getUtf8String(offset); } + static MemorySegment allocateString(Arena arena, String s) { + return arena.allocateUtf8String(s); + } + private MemorySegmentUtil() {} } diff --git a/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java b/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java index c155647a3ccd4..59bb57d174009 100644 --- a/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java +++ b/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java @@ -8,6 +8,7 @@ package org.elasticsearch.nativeaccess.jdk; +import java.lang.foreign.Arena; import java.lang.foreign.MemorySegment; public class MemorySegmentUtil { @@ -16,5 +17,9 @@ static String getString(MemorySegment segment, long offset) { return segment.getString(offset); } + static MemorySegment allocateString(Arena arena, String s) { + return arena.allocateFrom(s); + } + private MemorySegmentUtil() {} } diff --git a/libs/native/src/test/java/org/elasticsearch/nativeaccess/ZstdTests.java b/libs/native/src/test/java/org/elasticsearch/nativeaccess/ZstdTests.java index d051961b06c5f..1282b1fee9206 100644 --- a/libs/native/src/test/java/org/elasticsearch/nativeaccess/ZstdTests.java +++ b/libs/native/src/test/java/org/elasticsearch/nativeaccess/ZstdTests.java @@ -41,16 +41,16 @@ public void testCompressValidation() { var srcBuf = src.buffer(); var dstBuf = dst.buffer(); - var npe1 = expectThrows(NullPointerException.class, () -> zstd.compress(null, srcBuf, 0)); + var npe1 = expectThrows(NullPointerException.class, () -> zstd.compress(null, src, 0)); assertThat(npe1.getMessage(), equalTo("Null destination buffer")); - var npe2 = expectThrows(NullPointerException.class, () -> zstd.compress(dstBuf, null, 0)); + var npe2 = expectThrows(NullPointerException.class, () -> zstd.compress(dst, null, 0)); assertThat(npe2.getMessage(), equalTo("Null source buffer")); // dst capacity too low for (int i = 0; i < srcBuf.remaining(); ++i) { srcBuf.put(i, randomByte()); } - var e = expectThrows(IllegalArgumentException.class, () -> zstd.compress(dstBuf, srcBuf, 0)); + var e = expectThrows(IllegalArgumentException.class, () -> zstd.compress(dst, src, 0)); assertThat(e.getMessage(), equalTo("Destination buffer is too small")); } } @@ -64,21 +64,21 @@ public void testDecompressValidation() { var originalBuf = original.buffer(); var compressedBuf = compressed.buffer(); - var npe1 = expectThrows(NullPointerException.class, () -> zstd.decompress(null, originalBuf)); + var npe1 = expectThrows(NullPointerException.class, () -> zstd.decompress(null, original)); assertThat(npe1.getMessage(), equalTo("Null destination buffer")); - var npe2 = expectThrows(NullPointerException.class, () -> zstd.decompress(compressedBuf, null)); + var npe2 = expectThrows(NullPointerException.class, () -> zstd.decompress(compressed, null)); assertThat(npe2.getMessage(), equalTo("Null source buffer")); // Invalid compressed format for (int i = 0; i < originalBuf.remaining(); ++i) { originalBuf.put(i, (byte) i); } - var e = expectThrows(IllegalArgumentException.class, () -> zstd.decompress(compressedBuf, originalBuf)); + var e = expectThrows(IllegalArgumentException.class, () -> zstd.decompress(compressed, original)); assertThat(e.getMessage(), equalTo("Unknown frame descriptor")); - int compressedLength = zstd.compress(compressedBuf, originalBuf, 0); + int compressedLength = zstd.compress(compressed, original, 0); compressedBuf.limit(compressedLength); - e = expectThrows(IllegalArgumentException.class, () -> zstd.decompress(restored.buffer(), compressedBuf)); + e = expectThrows(IllegalArgumentException.class, () -> zstd.decompress(restored, compressed)); assertThat(e.getMessage(), equalTo("Destination buffer is too small")); } @@ -109,9 +109,9 @@ private void doTestRoundtrip(byte[] data) { var restored = nativeAccess.newBuffer(data.length) ) { original.buffer().put(0, data); - int compressedLength = zstd.compress(compressed.buffer(), original.buffer(), randomIntBetween(-3, 9)); + int compressedLength = zstd.compress(compressed, original, randomIntBetween(-3, 9)); compressed.buffer().limit(compressedLength); - int decompressedLength = zstd.decompress(restored.buffer(), compressed.buffer()); + int decompressedLength = zstd.decompress(restored, compressed); assertThat(restored.buffer(), equalTo(original.buffer())); assertThat(decompressedLength, equalTo(data.length)); } @@ -127,15 +127,15 @@ private void doTestRoundtrip(byte[] data) { original.buffer().put(decompressedOffset, data); original.buffer().position(decompressedOffset); compressed.buffer().position(compressedOffset); - int compressedLength = zstd.compress(compressed.buffer(), original.buffer(), randomIntBetween(-3, 9)); + int compressedLength = zstd.compress(compressed, original, randomIntBetween(-3, 9)); compressed.buffer().limit(compressedOffset + compressedLength); restored.buffer().position(decompressedOffset); - int decompressedLength = zstd.decompress(restored.buffer(), compressed.buffer()); + int decompressedLength = zstd.decompress(restored, compressed); + assertThat(decompressedLength, equalTo(data.length)); assertThat( restored.buffer().slice(decompressedOffset, data.length), equalTo(original.buffer().slice(decompressedOffset, data.length)) ); - assertThat(decompressedLength, equalTo(data.length)); } } } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 6e70e9263df47..6bb16415adfc2 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -11,20 +11,20 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -180,28 +180,40 @@ public InternalBucket getBucketByKey(String key) { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final Map bucketsReducer = new HashMap<>(getBuckets().size()); + final ObjectObjectPagedHashMap> bucketsReducer = new ObjectObjectPagedHashMap<>( + getBuckets().size(), + reduceContext.bigArrays() + ); @Override public void accept(InternalAggregation aggregation) { final InternalAdjacencyMatrix filters = (InternalAdjacencyMatrix) aggregation; for (InternalBucket bucket : filters.buckets) { - MultiBucketAggregatorsReducer reducer = bucketsReducer.computeIfAbsent( - bucket.key, - k -> new MultiBucketAggregatorsReducer(reduceContext, size) - ); + BucketReducer reducer = bucketsReducer.get(bucket.key); + if (reducer == null) { + reducer = new BucketReducer<>(bucket, reduceContext, size); + boolean success = false; + try { + bucketsReducer.put(bucket.key, reducer); + success = true; + } finally { + if (success == false) { + Releasables.close(reducer); + } + } + } reducer.accept(bucket); } } @Override public InternalAggregation get() { - List reducedBuckets = new ArrayList<>(bucketsReducer.size()); - for (Map.Entry entry : bucketsReducer.entrySet()) { - if (entry.getValue().getDocCount() >= 1) { - reducedBuckets.add(new InternalBucket(entry.getKey(), entry.getValue().getDocCount(), entry.getValue().get())); + List reducedBuckets = new ArrayList<>((int) bucketsReducer.size()); + bucketsReducer.forEach(entry -> { + if (entry.value.getDocCount() >= 1) { + reducedBuckets.add(new InternalBucket(entry.key, entry.value.getDocCount(), entry.value.getAggregations())); } - } + }); reduceContext.consumeBucketsAndMaybeBreak(reducedBuckets.size()); reducedBuckets.sort(Comparator.comparing(InternalBucket::getKey)); return new InternalAdjacencyMatrix(name, reducedBuckets, getMetadata()); @@ -209,7 +221,8 @@ public InternalAggregation get() { @Override public void close() { - Releasables.close(bucketsReducer.values()); + bucketsReducer.forEach(entry -> Releasables.close(entry.value)); + Releasables.close(bucketsReducer); } }; } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index f0f7984079d97..ab531b69be947 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -21,7 +21,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -444,7 +444,7 @@ static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, Rou @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - private final LongObjectPagedHashMap bucketsReducer = new LongObjectPagedHashMap<>( + private final LongObjectPagedHashMap> bucketsReducer = new LongObjectPagedHashMap<>( getBuckets().size(), reduceContext.bigArrays() ); @@ -460,9 +460,9 @@ public void accept(InternalAggregation aggregation) { min = Math.min(min, histogram.buckets.get(0).key); max = Math.max(max, histogram.buckets.get(histogram.buckets.size() - 1).key); for (Bucket bucket : histogram.buckets) { - MultiBucketAggregatorsReducer reducer = bucketsReducer.get(bucket.key); + BucketReducer reducer = bucketsReducer.get(bucket.key); if (reducer == null) { - reducer = new MultiBucketAggregatorsReducer(reduceContext, size); + reducer = new BucketReducer<>(bucket, reduceContext, size); bucketsReducer.put(bucket.key, reducer); } reducer.accept(bucket); @@ -480,34 +480,34 @@ public InternalAggregation get() { { // fill the array and sort it final int[] index = new int[] { 0 }; - bucketsReducer.iterator().forEachRemaining(c -> keys[index[0]++] = c.key); + bucketsReducer.forEach(c -> keys[index[0]++] = c.key); Arrays.sort(keys); } final List reducedBuckets = new ArrayList<>(); if (keys.length > 0) { // list of buckets coming from different shards that have the same key - MultiBucketAggregatorsReducer currentReducer = null; + BucketReducer currentReducer = null; long key = reduceRounding.round(keys[0]); for (long top : keys) { if (reduceRounding.round(top) != key) { assert currentReducer != null; // the key changes, reduce what we already buffered and reset the buffer for current buckets - reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.get())); + reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.getAggregations())); currentReducer = null; key = reduceRounding.round(top); } - final MultiBucketAggregatorsReducer nextReducer = bucketsReducer.get(top); + final BucketReducer nextReducer = bucketsReducer.get(top); if (currentReducer == null) { currentReducer = nextReducer; } else { - currentReducer.accept(createBucket(key, nextReducer.getDocCount(), nextReducer.get())); + currentReducer.accept(createBucket(key, nextReducer.getDocCount(), nextReducer.getAggregations())); } } if (currentReducer != null) { - reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.get())); + reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.getAggregations())); } } @@ -546,7 +546,7 @@ public InternalAggregation get() { @Override public void close() { - bucketsReducer.iterator().forEachRemaining(c -> Releasables.close(c.value)); + bucketsReducer.forEach(c -> Releasables.close(c.value)); Releasables.close(bucketsReducer); } }; diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml index 9e8ec6b3f6768..421c0c5800949 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml @@ -9,6 +9,7 @@ setup: index: tsdb body: settings: + number_of_shards: 1 mode: time_series routing_path: [key] time_series: diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java index 332622cc98db8..9f2e6feb91659 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java @@ -129,22 +129,17 @@ public void testRolloverOnAutoShardCondition() throws Exception { for (int i = 0; i < firstGenerationMeta.getNumberOfShards(); i++) { // the shard stats will yield a write load of 75.0 which will make the auto sharding service recommend an optimal number // of 5 shards - shards.add(getShardStats(firstGenerationMeta, i, 75, assignedShardNodeId)); - } - - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), shards, List.of()) - ); - }); + shards.add( + getShardStats( + firstGenerationMeta, + i, + (long) Math.ceil(75.0 / firstGenerationMeta.getNumberOfShards()), + assignedShardNodeId + ) + ); } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, firstGenerationMeta, shards); assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet()); ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state(); @@ -180,21 +175,16 @@ instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), for (int i = 0; i < secondGenerationMeta.getNumberOfShards(); i++) { // the shard stats will yield a write load of 100.0 which will make the auto sharding service recommend an optimal number of // 7 shards - shards.add(getShardStats(secondGenerationMeta, i, 100, assignedShardNodeId)); - } - - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), secondGenerationMeta.getNumberOfShards(), shards, List.of()) - ); - }); + shards.add( + getShardStats( + secondGenerationMeta, + i, + (long) Math.ceil(100.0 / secondGenerationMeta.getNumberOfShards()), + assignedShardNodeId + ) + ); } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, secondGenerationMeta, shards); RolloverResponse response = indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet(); assertAcked(response); @@ -232,21 +222,11 @@ instance.new NodeResponse(node.getId(), secondGenerationMeta.getNumberOfShards() for (int i = 0; i < thirdGenIndex.getNumberOfShards(); i++) { // the shard stats will yield a write load of 100.0 which will make the auto sharding service recommend an optimal // number of 7 shards - shards.add(getShardStats(thirdGenIndex, i, 100, assignedShardNodeId)); - } - - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), thirdGenIndex.getNumberOfShards(), shards, List.of()) - ); - }); + shards.add( + getShardStats(thirdGenIndex, i, (long) Math.ceil(100.0 / thirdGenIndex.getNumberOfShards()), assignedShardNodeId) + ); } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, thirdGenIndex, shards); RolloverRequest request = new RolloverRequest(dataStreamName, null); request.setConditions(RolloverConditions.newBuilder().addMaxIndexDocsCondition(1_000_000L).build()); @@ -309,22 +289,10 @@ public void testReduceShardsOnRollover() throws IOException { for (int i = 0; i < firstGenerationMeta.getNumberOfShards(); i++) { // the shard stats will yield a write load of 2.0 which will make the auto sharding service recommend an optimal number // of 2 shards - shards.add(getShardStats(firstGenerationMeta, i, 2, assignedShardNodeId)); - } - - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), shards, List.of()) - ); - }); + shards.add(getShardStats(firstGenerationMeta, i, i < 2 ? 1 : 0, assignedShardNodeId)); } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, firstGenerationMeta, shards); assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet()); ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state(); @@ -356,23 +324,11 @@ instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), .index(dataStreamBeforeRollover.getIndices().get(1)); List shards = new ArrayList<>(secondGenerationIndex.getNumberOfShards()); for (int i = 0; i < secondGenerationIndex.getNumberOfShards(); i++) { - // the shard stats will yield a write load of 2.0 which will make the auto sharding service recommend an optimal - // number of 2 shards - shards.add(getShardStats(secondGenerationIndex, i, 2, assignedShardNodeId)); - } - - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), secondGenerationIndex.getNumberOfShards(), shards, List.of()) - ); - }); + // the shard stats will yield a write load of 2.0 which will make the auto sharding service recommend an + // optimal number of 2 shards + shards.add(getShardStats(secondGenerationIndex, i, i < 2 ? 1 : 0, assignedShardNodeId)); } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, secondGenerationIndex, shards); RolloverRequest request = new RolloverRequest(dataStreamName, null); // adding condition that does NOT match @@ -438,6 +394,11 @@ public void testLazyRolloverKeepsPreviousAutoshardingDecision() throws IOExcepti IndexMetadata firstGenerationMeta = clusterStateBeforeRollover.getMetadata().index(firstGenerationIndex); List shards = new ArrayList<>(firstGenerationMeta.getNumberOfShards()); + String assignedShardNodeId = clusterStateBeforeRollover.routingTable() + .index(dataStreamBeforeRollover.getWriteIndex()) + .shard(0) + .primaryShard() + .currentNodeId(); for (int i = 0; i < firstGenerationMeta.getNumberOfShards(); i++) { // the shard stats will yield a write load of 75.0 which will make the auto sharding service recommend an optimal number // of 5 shards @@ -445,29 +406,13 @@ public void testLazyRolloverKeepsPreviousAutoshardingDecision() throws IOExcepti getShardStats( firstGenerationMeta, i, - 75, - clusterStateBeforeRollover.routingTable() - .index(dataStreamBeforeRollover.getWriteIndex()) - .shard(0) - .primaryShard() - .currentNodeId() + (long) Math.ceil(75.0 / firstGenerationMeta.getNumberOfShards()), + assignedShardNodeId ) ); } - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), shards, List.of()) - ); - }); - } - + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, firstGenerationMeta, shards); assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet()); ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state(); @@ -491,37 +436,22 @@ instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), ClusterState clusterStateBeforeRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state(); DataStream dataStreamBeforeRollover = clusterStateBeforeRollover.getMetadata().dataStreams().get(dataStreamName); + String assignedShardNodeId = clusterStateBeforeRollover.routingTable() + .index(dataStreamBeforeRollover.getWriteIndex()) + .shard(0) + .primaryShard() + .currentNodeId(); IndexMetadata secondGenIndex = clusterStateBeforeRollover.metadata().index(dataStreamBeforeRollover.getIndices().get(1)); List shards = new ArrayList<>(secondGenIndex.getNumberOfShards()); for (int i = 0; i < secondGenIndex.getNumberOfShards(); i++) { // the shard stats will yield a write load of 100.0 which will make the auto sharding service recommend an optimal // number of 7 shards shards.add( - getShardStats( - secondGenIndex, - i, - 100, - clusterStateBeforeRollover.routingTable() - .index(dataStreamBeforeRollover.getWriteIndex()) - .shard(i) - .primaryShard() - .currentNodeId() - ) + getShardStats(secondGenIndex, i, (long) Math.ceil(100.0 / secondGenIndex.getNumberOfShards()), assignedShardNodeId) ); } - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), secondGenIndex.getNumberOfShards(), shards, List.of()) - ); - }); - } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, secondGenIndex, shards); RolloverRequest request = new RolloverRequest(dataStreamName, null); request.lazy(true); @@ -612,4 +542,33 @@ public Settings additionalSettings() { } } + private static void mockStatsForIndex( + ClusterState clusterState, + String assignedShardNodeId, + IndexMetadata indexMetadata, + List shards + ) { + for (DiscoveryNode node : clusterState.nodes().getAllNodes()) { + // one node returns the stats for all our shards, the other nodes don't return any stats + if (node.getId().equals(assignedShardNodeId)) { + MockTransportService.getInstance(node.getName()) + .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { + TransportIndicesStatsAction instance = internalCluster().getInstance( + TransportIndicesStatsAction.class, + node.getName() + ); + channel.sendResponse(instance.new NodeResponse(node.getId(), indexMetadata.getNumberOfShards(), shards, List.of())); + }); + } else { + MockTransportService.getInstance(node.getName()) + .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { + TransportIndicesStatsAction instance = internalCluster().getInstance( + TransportIndicesStatsAction.class, + node.getName() + ); + channel.sendResponse(instance.new NodeResponse(node.getId(), 0, List.of(), List.of())); + }); + } + } + } } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 08b09bbc78348..772cc0f98d757 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -260,7 +260,7 @@ public void testInvalidTsdbTemplatesNoTimeSeriesDimensionAttribute() throws Exce assertThat( e.getCause().getCause().getMessage(), equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [metricset] was not a dimension." ) @@ -289,7 +289,7 @@ public void testInvalidTsdbTemplatesNoTimeSeriesDimensionAttribute() throws Exce } } - public void testInvalidTsdbTemplatesNoKeywordFieldType() throws Exception { + public void testTsdbTemplatesNoKeywordFieldType() throws Exception { var mappingTemplate = """ { "_doc":{ @@ -315,18 +315,7 @@ public void testInvalidTsdbTemplatesNoKeywordFieldType() throws Exception { .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) .build() ); - Exception e = expectThrows( - IllegalArgumentException.class, - () -> client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet() - ); - assertThat( - e.getCause().getCause().getMessage(), - equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [metricset] was [long]." - ) - ); + client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); } public void testInvalidTsdbTemplatesMissingSettings() throws Exception { diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java index 5d84baa5f6ea4..aa3fa2a730be3 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java @@ -53,6 +53,17 @@ public class TSDBPassthroughIndexingIT extends ESSingleNodeTestCase { public static final String MAPPING_TEMPLATE = """ { "_doc":{ + "dynamic_templates": [ + { + "strings_as_ip": { + "match_mapping_type": "string", + "match": "*ip", + "mapping": { + "type": "ip" + } + } + } + ], "properties": { "@timestamp" : { "type": "date" @@ -87,6 +98,8 @@ public class TSDBPassthroughIndexingIT extends ESSingleNodeTestCase { "@timestamp": "$time", "attributes": { "metricset": "pod", + "number.long": $number1, + "number.double": $number2, "pod": { "name": "$name", "uid": "$uid", @@ -102,6 +115,15 @@ public class TSDBPassthroughIndexingIT extends ESSingleNodeTestCase { } """; + private static String getRandomDoc(Instant time) { + return DOC.replace("$time", formatInstant(time)) + .replace("$uid", randomUUID()) + .replace("$name", randomAlphaOfLength(4)) + .replace("$number1", Long.toString(randomLong())) + .replace("$number2", Double.toString(randomDouble())) + .replace("$ip", InetAddresses.toAddrString(randomIp(randomBoolean()))); + } + @Override protected Collection> getPlugins() { return List.of(DataStreamsPlugin.class, InternalSettingsPlugin.class); @@ -137,13 +159,7 @@ public void testIndexingGettingAndSearching() throws Exception { Instant time = Instant.now(); for (int i = 0; i < indexingIters; i++) { var indexRequest = new IndexRequest("k8s").opType(DocWriteRequest.OpType.CREATE); - indexRequest.source( - DOC.replace("$time", formatInstant(time)) - .replace("$uid", randomUUID()) - .replace("$name", randomAlphaOfLength(4)) - .replace("$ip", InetAddresses.toAddrString(randomIp(randomBoolean()))), - XContentType.JSON - ); + indexRequest.source(getRandomDoc(time), XContentType.JSON); var indexResponse = client().index(indexRequest).actionGet(); index = indexResponse.getIndex(); String id = indexResponse.getId(); @@ -176,7 +192,9 @@ public void testIndexingGettingAndSearching() throws Exception { ); @SuppressWarnings("unchecked") var attributes = (Map>) ObjectPath.eval("properties.attributes.properties", mapping); - assertMap(attributes.get("pod.ip"), matchesMap().entry("type", "keyword").entry("time_series_dimension", true)); + assertMap(attributes.get("number.long"), matchesMap().entry("type", "long").entry("time_series_dimension", true)); + assertMap(attributes.get("number.double"), matchesMap().entry("type", "float").entry("time_series_dimension", true)); + assertMap(attributes.get("pod.ip"), matchesMap().entry("type", "ip").entry("time_series_dimension", true)); assertMap(attributes.get("pod.uid"), matchesMap().entry("type", "keyword").entry("time_series_dimension", true)); assertMap(attributes.get("pod.name"), matchesMap().entry("type", "keyword").entry("time_series_dimension", true)); // alias field mappers: @@ -184,6 +202,14 @@ public void testIndexingGettingAndSearching() throws Exception { ObjectPath.eval("properties.metricset", mapping), matchesMap().entry("type", "alias").entry("path", "attributes.metricset") ); + assertMap( + ObjectPath.eval("properties.number.properties.long", mapping), + matchesMap().entry("type", "alias").entry("path", "attributes.number.long") + ); + assertMap( + ObjectPath.eval("properties.number.properties.double", mapping), + matchesMap().entry("type", "alias").entry("path", "attributes.number.double") + ); assertMap( ObjectPath.eval("properties.pod.properties", mapping), matchesMap().extraOk().entry("name", matchesMap().entry("type", "alias").entry("path", "attributes.pod.name")) @@ -220,13 +246,7 @@ public void testIndexingGettingAndSearchingShrunkIndex() throws Exception { var bulkRequest = new BulkRequest(dataStreamName); for (int i = 0; i < numBulkItems; i++) { var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); - indexRequest.source( - DOC.replace("$time", formatInstant(time)) - .replace("$uid", randomUUID()) - .replace("$name", randomAlphaOfLength(4)) - .replace("$ip", InetAddresses.toAddrString(randomIp(randomBoolean()))), - XContentType.JSON - ); + indexRequest.source(getRandomDoc(time), XContentType.JSON); bulkRequest.add(indexRequest); time = time.plusMillis(1); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index 06dc8919360f8..6e17964aa179a 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -8,12 +8,14 @@ package org.elasticsearch.datastreams; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.datastreams.autosharding.DataStreamAutoShardingService; import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; +import java.util.Map; import java.util.Set; /** @@ -21,6 +23,13 @@ */ public class DataStreamFeatures implements FeatureSpecification { + public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); + + @Override + public Map getHistoricalFeatures() { + return Map.of(DATA_STREAM_LIFECYCLE, Version.V_8_11_0); + } + @Override public Set getFeatures() { return Set.of( diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java index 2aa5c07ad6be5..c7d70fa06162d 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.PointValues; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.datastreams.DataStreamsStatsAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; @@ -44,7 +45,6 @@ import java.util.Map; import java.util.Set; import java.util.SortedMap; -import java.util.stream.Stream; public class DataStreamsStatsTransportAction extends TransportBroadcastByNodeAction< DataStreamsStatsAction.Request, @@ -104,25 +104,12 @@ protected ClusterBlockException checkRequestBlock( @Override protected String[] resolveConcreteIndexNames(ClusterState clusterState, DataStreamsStatsAction.Request request) { - List abstractionNames = indexNameExpressionResolver.dataStreamNames( + return DataStreamsActionUtil.resolveConcreteIndexNames( + indexNameExpressionResolver, clusterState, - request.indicesOptions(), - request.indices() - ); - SortedMap indicesLookup = clusterState.getMetadata().getIndicesLookup(); - - String[] concreteDatastreamIndices = abstractionNames.stream().flatMap(abstractionName -> { - IndexAbstraction indexAbstraction = indicesLookup.get(abstractionName); - assert indexAbstraction != null; - if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM) { - DataStream dataStream = (DataStream) indexAbstraction; - List indices = dataStream.getIndices(); - return indices.stream().map(Index::getName); - } else { - return Stream.empty(); - } - }).toArray(String[]::new); - return concreteDatastreamIndices; + request.indices(), + request.indicesOptions() + ).toArray(String[]::new); } @Override diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java index 6e7528c470d49..c3e8331b856fd 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java @@ -44,7 +44,7 @@ import java.util.Set; import java.util.function.Consumer; -import static org.elasticsearch.datastreams.action.DataStreamsActionUtil.getDataStreamNames; +import static org.elasticsearch.action.datastreams.DataStreamsActionUtil.getDataStreamNames; public class DeleteDataStreamTransportAction extends AcknowledgedTransportMasterNodeAction { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java index e44ee5107711f..41e62508cafbb 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.datastreams.GetDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.IndexProperties; import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.ManagedBy; @@ -20,6 +21,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.health.ClusterStateHealth; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -199,7 +201,8 @@ static GetDataStreamAction.Response innerOperation( } return new GetDataStreamAction.Response( dataStreamInfos, - request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null + request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null, + DataStreamGlobalRetention.getFromClusterState(state) ); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java index 676052f76d564..5bfdf2d382005 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.datastreams.lifecycle.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -16,6 +17,8 @@ import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -136,23 +139,33 @@ public Request indicesOptions(IndicesOptions indicesOptions) { } /** - * Class representing the response for the explain of the data stream lifecycle action for one or more indices. + * Class representing the response for the 'explain' of the data stream lifecycle action for one or more indices. */ public static class Response extends ActionResponse implements ChunkedToXContentObject { public static final ParseField INDICES_FIELD = new ParseField("indices"); - private List indices; + private final List indices; @Nullable private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; - public Response(List indices, @Nullable RolloverConfiguration rolloverConfiguration) { + public Response( + List indices, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) { this.indices = indices; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public Response(StreamInput in) throws IOException { super(in); this.indices = in.readCollectionAsList(ExplainIndexDataStreamLifecycle::new); this.rolloverConfiguration = in.readOptionalWriteable(RolloverConfiguration::new); + this.globalRetention = in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION) + ? in.readOptionalWriteable(DataStreamGlobalRetention::read) + : null; } public List getIndices() { @@ -163,10 +176,17 @@ public RolloverConfiguration getRolloverConfiguration() { return rolloverConfiguration; } + public DataStreamGlobalRetention getGlobalRetention() { + return globalRetention; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(indices); out.writeOptionalWriteable(rolloverConfiguration); + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override @@ -178,12 +198,14 @@ public boolean equals(Object o) { return false; } Response response = (Response) o; - return Objects.equals(indices, response.indices) && Objects.equals(rolloverConfiguration, response.rolloverConfiguration); + return Objects.equals(indices, response.indices) + && Objects.equals(rolloverConfiguration, response.rolloverConfiguration) + && Objects.equals(globalRetention, response.globalRetention); } @Override public int hashCode() { - return Objects.hash(indices, rolloverConfiguration); + return Objects.hash(indices, rolloverConfiguration, globalRetention); } @Override @@ -194,7 +216,11 @@ public Iterator toXContentChunked(ToXContent.Params outerP return builder; }), Iterators.map(indices.iterator(), explainIndexDataLifecycle -> (builder, params) -> { builder.field(explainIndexDataLifecycle.getIndex()); - explainIndexDataLifecycle.toXContent(builder, params, rolloverConfiguration); + ToXContent.Params withEffectiveRetentionParams = new ToXContent.DelegatingMapParams( + DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, + params + ); + explainIndexDataLifecycle.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); return builder; }), Iterators.single((builder, params) -> { builder.endObject(); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java index 8149e1a0df443..79e1b71771559 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.datastreams.lifecycle.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -14,6 +15,7 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -157,19 +159,24 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } /** - * Converts the response to XContent and passes the RolloverConditions, when provided, to the data stream lifecycle. + * Converts the response to XContent and passes the RolloverConditions and the global retention, when provided, + * to the data stream lifecycle. */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); builder.field(NAME_FIELD.getPreferredName(), dataStreamName); if (lifecycle != null) { builder.field(LIFECYCLE_FIELD.getPreferredName()); - lifecycle.toXContent(builder, params, rolloverConfiguration); + lifecycle.toXContent(builder, params, rolloverConfiguration, globalRetention); } builder.endObject(); return builder; @@ -179,18 +186,31 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla private final List dataStreamLifecycles; @Nullable private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; public Response(List dataStreamLifecycles) { - this(dataStreamLifecycles, null); + this(dataStreamLifecycles, null, null); } - public Response(List dataStreamLifecycles, @Nullable RolloverConfiguration rolloverConfiguration) { + public Response( + List dataStreamLifecycles, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) { this.dataStreamLifecycles = dataStreamLifecycles; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public Response(StreamInput in) throws IOException { - this(in.readCollectionAsList(Response.DataStreamLifecycle::new), in.readOptionalWriteable(RolloverConfiguration::new)); + this( + in.readCollectionAsList(Response.DataStreamLifecycle::new), + in.readOptionalWriteable(RolloverConfiguration::new), + in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION) + ? in.readOptionalWriteable(DataStreamGlobalRetention::read) + : null + ); } public List getDataStreamLifecycles() { @@ -206,6 +226,9 @@ public RolloverConfiguration getRolloverConfiguration() { public void writeTo(StreamOutput out) throws IOException { out.writeCollection(dataStreamLifecycles); out.writeOptionalWriteable(rolloverConfiguration); + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override @@ -214,17 +237,17 @@ public Iterator toXContentChunked(ToXContent.Params outerParams) { builder.startObject(); builder.startArray(DATA_STREAMS_FIELD.getPreferredName()); return builder; - }), - Iterators.map( - dataStreamLifecycles.iterator(), - dataStreamLifecycle -> (builder, params) -> dataStreamLifecycle.toXContent(builder, params, rolloverConfiguration) - ), - Iterators.single((builder, params) -> { - builder.endArray(); - builder.endObject(); - return builder; - }) - ); + }), Iterators.map(dataStreamLifecycles.iterator(), dataStreamLifecycle -> (builder, params) -> { + ToXContent.Params withEffectiveRetentionParams = new ToXContent.DelegatingMapParams( + org.elasticsearch.cluster.metadata.DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, + params + ); + return dataStreamLifecycle.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); + }), Iterators.single((builder, params) -> { + builder.endArray(); + builder.endObject(); + return builder; + })); } @Override @@ -233,12 +256,13 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Response response = (Response) o; return dataStreamLifecycles.equals(response.dataStreamLifecycles) - && Objects.equals(rolloverConfiguration, response.rolloverConfiguration); + && Objects.equals(rolloverConfiguration, response.rolloverConfiguration) + && Objects.equals(globalRetention, response.globalRetention); } @Override public int hashCode() { - return Objects.hash(dataStreamLifecycles, rolloverConfiguration); + return Objects.hash(dataStreamLifecycles, rolloverConfiguration, globalRetention); } } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportDeleteDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportDeleteDataStreamLifecycleAction.java index 0381014aed24b..9683588bdcae3 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportDeleteDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportDeleteDataStreamLifecycleAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.datastreams.lifecycle.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -19,7 +20,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.datastreams.action.DataStreamsActionUtil; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java index a42e8dfefc468..a5c3b092a8913 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -111,7 +112,8 @@ protected void masterOperation( listener.onResponse( new ExplainDataStreamLifecycleAction.Response( explainIndices, - request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null + request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null, + DataStreamGlobalRetention.getFromClusterState(state) ) ); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java index 29b88fc5748bf..3a3a54d747920 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java @@ -8,19 +8,20 @@ package org.elasticsearch.datastreams.lifecycle.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.datastreams.action.DataStreamsActionUtil; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -89,7 +90,8 @@ protected void masterOperation( ) .sorted(Comparator.comparing(GetDataStreamLifecycleAction.Response.DataStreamLifecycle::dataStreamName)) .toList(), - request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null + request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null, + DataStreamGlobalRetention.getFromClusterState(state) ) ); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java index 31d7237eeb681..7a33d3011c621 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.datastreams.lifecycle.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -19,7 +20,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.datastreams.action.DataStreamsActionUtil; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java index 637fb44affb6f..2a356e3ebb166 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java @@ -11,11 +11,13 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -35,6 +37,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; public class GetDataStreamsTransportActionTests extends ESTestCase { @@ -248,4 +251,45 @@ public void testGetTimeSeriesMixedDataStream() { ) ); } + + public void testPassingGlobalRetention() { + ClusterState state; + { + var mBuilder = new Metadata.Builder(); + DataStreamTestHelper.getClusterStateWithDataStreams( + mBuilder, + List.of(Tuple.tuple("data-stream-1", 2)), + List.of(), + System.currentTimeMillis(), + Settings.EMPTY, + 0, + false, + false + ); + state = ClusterState.builder(new ClusterName("_name")).metadata(mBuilder).build(); + } + + var req = new GetDataStreamAction.Request(new String[] {}); + var response = GetDataStreamsTransportAction.innerOperation( + state, + req, + resolver, + systemIndices, + ClusterSettings.createBuiltInClusterSettings() + ); + assertThat(response.getGlobalRetention(), nullValue()); + DataStreamGlobalRetention globalRetention = new DataStreamGlobalRetention( + TimeValue.timeValueDays(randomIntBetween(1, 5)), + TimeValue.timeValueDays(randomIntBetween(5, 10)) + ); + state = ClusterState.builder(state).putCustom(DataStreamGlobalRetention.TYPE, globalRetention).build(); + response = GetDataStreamsTransportAction.innerOperation( + state, + req, + resolver, + systemIndices, + ClusterSettings.createBuiltInClusterSettings() + ); + assertThat(response.getGlobalRetention(), equalTo(globalRetention)); + } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java index 829fe454f7463..462c0626c6296 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java @@ -14,7 +14,9 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.action.datastreams.lifecycle.ErrorEntry; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; @@ -35,6 +37,7 @@ import static org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction.Response; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -66,7 +69,7 @@ public void testToXContent() throws IOException { ExplainIndexDataStreamLifecycle explainIndex = createRandomIndexDataStreamLifecycleExplanation(now, lifecycle); explainIndex.setNowSupplier(() -> now); { - Response response = new Response(List.of(explainIndex), null); + Response response = new Response(List.of(explainIndex), null, null); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); response.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xcontent -> { @@ -103,7 +106,7 @@ public void testToXContent() throws IOException { } else { assertThat(explainIndexMap.get("generation_time"), is(nullValue())); } - assertThat(explainIndexMap.get("lifecycle"), is(Map.of("enabled", true))); // empty lifecycle + assertThat(explainIndexMap.get("lifecycle"), is(Map.of("enabled", true))); if (explainIndex.getError() != null) { Map errorObject = (Map) explainIndexMap.get("error"); assertThat(errorObject.get(ErrorEntry.MESSAGE_FIELD.getPreferredName()), is(explainIndex.getError().error())); @@ -132,7 +135,11 @@ public void testToXContent() throws IOException { new MinPrimaryShardDocsCondition(4L) ) ); - Response response = new Response(List.of(explainIndex), new RolloverConfiguration(rolloverConditions)); + Response response = new Response( + List.of(explainIndex), + new RolloverConfiguration(rolloverConditions), + DataStreamTestHelper.randomGlobalRetention() + ); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); response.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xcontent -> { @@ -186,9 +193,27 @@ public void testToXContent() throws IOException { assertThat(explainIndexMap.get("error"), is(nullValue())); } - Map lifecycleRollover = (Map) ((Map) explainIndexMap.get("lifecycle")).get( - "rollover" - ); + Map lifecycleMap = (Map) explainIndexMap.get("lifecycle"); + assertThat(lifecycleMap.get("data_retention"), nullValue()); + + if (response.getGlobalRetention() == null) { + assertThat(lifecycleMap.get("effective_retention"), nullValue()); + assertThat(lifecycleMap.get("retention_determined_by"), nullValue()); + } else if (response.getGlobalRetention().getDefaultRetention() != null) { + assertThat( + lifecycleMap.get("effective_retention"), + equalTo(response.getGlobalRetention().getDefaultRetention().getStringRep()) + ); + assertThat(lifecycleMap.get("retention_determined_by"), equalTo("default_global_retention")); + } else { + assertThat( + lifecycleMap.get("effective_retention"), + equalTo(response.getGlobalRetention().getMaxRetention().getStringRep()) + ); + assertThat(lifecycleMap.get("retention_determined_by"), equalTo("max_global_retention")); + } + + Map lifecycleRollover = (Map) lifecycleMap.get("rollover"); assertThat(lifecycleRollover.get("min_primary_shard_docs"), is(4)); assertThat(lifecycleRollover.get("max_primary_shard_docs"), is(9)); } @@ -212,7 +237,7 @@ public void testToXContent() throws IOException { ) : null ); - Response response = new Response(List.of(explainIndexWithNullGenerationDate), null); + Response response = new Response(List.of(explainIndexWithNullGenerationDate), null, null); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); response.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xcontent -> { @@ -241,6 +266,7 @@ public void testChunkCount() { createRandomIndexDataStreamLifecycleExplanation(now, lifecycle), createRandomIndexDataStreamLifecycleExplanation(now, lifecycle) ), + null, null ); @@ -296,6 +322,12 @@ private Response randomResponse() { Map.of(MaxPrimaryShardDocsCondition.NAME, new MaxPrimaryShardDocsCondition(randomLongBetween(1000, 199_999_000))) ) ) + : null, + randomBoolean() + ? new DataStreamGlobalRetention( + TimeValue.timeValueDays(randomIntBetween(1, 10)), + TimeValue.timeValueDays(randomIntBetween(10, 20)) + ) : null ); } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index 20eb33ecefdee..683cf675cda8e 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -231,13 +231,13 @@ dynamic templates: refresh: true body: - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "data": "10", "attributes.dim1": "A", "attributes.dim2": "1", "attributes.another.dim1": "C", "attributes.another.dim2": "10" }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "data": "10", "attributes.dim1": "A", "attributes.dim2": "1", "attributes.another.dim1": "C", "attributes.another.dim2": "10.5" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:09.138Z", "data": "20", "attributes.dim1": "A", "attributes.dim2": "1", "attributes.another.dim1": "C", "attributes.another.dim2": "10" }' + - '{ "@timestamp": "2023-09-01T13:03:09.138Z", "data": "20", "attributes.dim1": "A", "attributes.dim2": "1", "attributes.another.dim1": "C", "attributes.another.dim2": "10.5" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:10.138Z", "data": "30", "attributes.dim1": "B", "attributes.dim2": "2", "attributes.another.dim1": "D", "attributes.another.dim2": "20" }' + - '{ "@timestamp": "2023-09-01T13:03:10.138Z", "data": "30", "attributes.dim1": "B", "attributes.dim2": "2", "attributes.another.dim1": "D", "attributes.another.dim2": "20.5" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:10.238Z", "data": "40", "attributes.dim1": "B", "attributes.dim2": "2", "attributes.another.dim1": "D", "attributes.another.dim2": "20" }' + - '{ "@timestamp": "2023-09-01T13:03:10.238Z", "data": "40", "attributes.dim1": "B", "attributes.dim2": "2", "attributes.another.dim1": "D", "attributes.another.dim2": "20.5" }' - do: search: @@ -263,7 +263,7 @@ dynamic templates: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiK8yYWLhfZ18WLDvTuBX1YJX1Ll7UMNJqYNES5Eg" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiKqVppKhfZ18WLDvTuNPo7EnyZdkhvafL006Xf2Q" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -282,7 +282,7 @@ dynamic templates: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiK8yYWLhfZ18WLDvTuBX1YJX1Ll7UMNJqYNES5Eg" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiKqVppKhfZ18WLDvTuNPo7EnyZdkhvafL006Xf2Q" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -301,7 +301,7 @@ dynamic templates: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiK8yYWLhfZ18WLDvTuBX1YJX1Ll7UMNJqYNES5Eg" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiKqVppKhfZ18WLDvTuNPo7EnyZdkhvafL006Xf2Q" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -313,14 +313,14 @@ dynamic templates: filterA: filter: term: - another.dim2: 10 + another.dim2: 10.5 aggs: tsids: terms: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiK8yYWLhfZ18WLDvTuBX1YJX1Ll7UMNJqYNES5Eg" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiKqVppKhfZ18WLDvTuNPo7EnyZdkhvafL006Xf2Q" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } --- @@ -466,13 +466,13 @@ dynamic templates with nesting: refresh: true body: - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:08.138Z","data": "10", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10" }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z","data": "10", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10.5", "attributes.a.much.deeper.nested.dim": "AC" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:09.138Z","data": "20", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10" }' + - '{ "@timestamp": "2023-09-01T13:03:09.138Z","data": "20", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10.5", "attributes.a.much.deeper.nested.dim": "AC" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:10.138Z","data": "30", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20" }' + - '{ "@timestamp": "2023-09-01T13:03:10.138Z","data": "30", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20.5", "attributes.a.much.deeper.nested.dim": "BD" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:10.238Z","data": "40", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20" }' + - '{ "@timestamp": "2023-09-01T13:03:10.238Z","data": "40", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20.5", "attributes.a.much.deeper.nested.dim": "BD" }' - do: search: @@ -498,7 +498,7 @@ dynamic templates with nesting: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK7zJhYuIGKYiosO9O4X2dfFtp-JEbk39FSSMEq_vwX7uw" } + - match: { aggregations.filterA.tsids.buckets.0.key: "NNnsRFDTqKogyRBhOBQclM4BkssYqVppKiBimIqLDvTuF9nXxZWMD04YHQKL09tJYL5G4yo" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -517,7 +517,7 @@ dynamic templates with nesting: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK7zJhYuIGKYiosO9O4X2dfFtp-JEbk39FSSMEq_vwX7uw" } + - match: { aggregations.filterA.tsids.buckets.0.key: "NNnsRFDTqKogyRBhOBQclM4BkssYqVppKiBimIqLDvTuF9nXxZWMD04YHQKL09tJYL5G4yo" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -536,7 +536,7 @@ dynamic templates with nesting: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK7zJhYuIGKYiosO9O4X2dfFtp-JEbk39FSSMEq_vwX7uw" } + - match: { aggregations.filterA.tsids.buckets.0.key: "NNnsRFDTqKogyRBhOBQclM4BkssYqVppKiBimIqLDvTuF9nXxZWMD04YHQKL09tJYL5G4yo" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -548,14 +548,227 @@ dynamic templates with nesting: filterA: filter: term: - another.dim2: 10 + another.dim2: 10.5 + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.key: "NNnsRFDTqKogyRBhOBQclM4BkssYqVppKiBimIqLDvTuF9nXxZWMD04YHQKL09tJYL5G4yo" } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + a.much.deeper.nested.dim: AC + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.key: "NNnsRFDTqKogyRBhOBQclM4BkssYqVppKiBimIqLDvTuF9nXxZWMD04YHQKL09tJYL5G4yo" } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + +--- +dynamic templates with incremental indexing: + - skip: + version: " - 8.12.99" + reason: "Support for dynamic fields was added in 8.13" + - do: + allowed_warnings: + - "index template [my-dynamic-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" + indices.put_index_template: + name: my-dynamic-template + body: + index_patterns: [k9s*] + data_stream: {} + template: + settings: + index: + number_of_shards: 1 + mode: time_series + time_series: + start_time: 2023-08-31T13:03:08.138Z + + mappings: + properties: + attributes: + type: passthrough + dynamic: true + time_series_dimension: true + resource: + type: object + properties: + attributes: + type: passthrough + dynamic: true + time_series_dimension: true + dynamic_templates: + - counter_metric: + mapping: + type: integer + time_series_metric: counter + + - do: + bulk: + index: k9s + refresh: true + body: + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z","data": "10", "resource.attributes.dim1": "A", "attributes.dim2": "C" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:09.138Z","data": "20", "resource.attributes.dim1": "A", "attributes.dim2": "C" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:10.138Z","data": "30", "resource.attributes.dim1": "B", "attributes.dim2": "D" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:10.238Z","data": "40", "resource.attributes.dim1": "B", "attributes.dim2": "D" }' + + - do: + bulk: + index: k9s + refresh: true + body: + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:04:08.138Z","data": "110", "resource.attributes.another.dim1": "1", "attributes.another.dim2": "10.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:04:09.138Z","data": "120", "resource.attributes.another.dim1": "1", "attributes.another.dim2": "10.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:04:10.138Z","data": "130", "resource.attributes.another.dim1": "2", "attributes.another.dim2": "20.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:04:10.238Z","data": "140", "resource.attributes.another.dim1": "2", "attributes.another.dim2": "20.5" }' + + - do: + bulk: + index: k9s + refresh: true + body: + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:05:08.138Z","data": "210", "resource.attributes.another.deeper.dim1": "1", "attributes.another.deeper.dim2": "10.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:05:09.138Z","data": "220", "resource.attributes.another.deeper.dim1": "1", "attributes.another.deeper.dim2": "10.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:05:10.138Z","data": "230", "resource.attributes.another.deeper.dim1": "2", "attributes.another.deeper.dim2": "20.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:05:10.238Z","data": "240", "resource.attributes.another.deeper.dim1": "2", "attributes.another.deeper.dim2": "20.5" }' + + - do: + bulk: + index: k9s + refresh: true + body: + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:06:08.138Z","data": "310", "attributes.a.much.deeper.nested.dim": "AC" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:06:09.138Z","data": "320", "attributes.a.much.deeper.nested.dim": "AC" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:06:10.138Z","data": "330", "attributes.a.much.deeper.nested.dim": "BD" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:06:10.238Z","data": "340", "attributes.a.much.deeper.nested.dim": "BD" }' + + - do: + search: + index: k9s + body: + size: 0 + + - match: { hits.total.value: 16 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + dim1: A + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + dim2: C + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + another.deeper.dim1: 1 + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + another.deeper.dim2: 10.5 + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + a.much.deeper.nested.dim: AC aggs: tsids: terms: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK7zJhYuIGKYiosO9O4X2dfFtp-JEbk39FSSMEq_vwX7uw" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } --- diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml index b9621977ff3aa..f22267357104e 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml @@ -108,3 +108,83 @@ teardown: indices.delete: index: .fs-logs-foobar-* - is_true: acknowledged + +--- +"Redirect shard failure in data stream to failure store": + - skip: + version: " - 8.13.99" + reason: "data stream failure stores only redirect shard failures in 8.14+" + features: [allowed_warnings, contains] + + - do: + allowed_warnings: + - "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation" + indices.put_index_template: + name: generic_logs_template + body: + index_patterns: logs-* + data_stream: + failure_store: true + template: + settings: + number_of_shards: 1 + number_of_replicas: 1 + mappings: + properties: + '@timestamp': + type: date + count: + type: long + + + - do: + index: + index: logs-foobar + refresh: true + body: + '@timestamp': '2020-12-12' + count: 'invalid value' + + - do: + indices.get_data_stream: + name: logs-foobar + - match: { data_streams.0.name: logs-foobar } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store: true } + - length: { data_streams.0.failure_indices: 1 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + + - do: + search: + index: logs-foobar + body: { query: { match_all: {} } } + - length: { hits.hits: 0 } + + - do: + search: + index: .fs-logs-foobar-* + - length: { hits.hits: 1 } + - match: { hits.hits.0._index: "/\\.fs-logs-foobar-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } + - exists: hits.hits.0._source.@timestamp + - not_exists: hits.hits.0._source.count + - match: { hits.hits.0._source.document.index: 'logs-foobar' } + - match: { hits.hits.0._source.document.source.@timestamp: '2020-12-12' } + - match: { hits.hits.0._source.document.source.count: 'invalid value' } + - match: { hits.hits.0._source.error.type: 'document_parsing_exception' } + - contains: { hits.hits.0._source.error.message: "failed to parse field [count] of type [long] in document with id " } + - contains: { hits.hits.0._source.error.message: "Preview of field's value: 'invalid value'" } + - contains: { hits.hits.0._source.error.stack_trace: "org.elasticsearch.index.mapper.DocumentParsingException: " } + - contains: { hits.hits.0._source.error.stack_trace: "failed to parse field [count] of type [long] in document with id" } + - contains: { hits.hits.0._source.error.stack_trace: "Preview of field's value: 'invalid value'" } + + - do: + indices.delete_data_stream: + name: logs-foobar + - is_true: acknowledged + + - do: + indices.delete: + index: .fs-logs-foobar-* + - is_true: acknowledged diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStats.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStats.java index acfda99ae42fc..64b704a484058 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStats.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStats.java @@ -13,10 +13,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.ingest.geoip.GeoIpDownloader; import org.elasticsearch.tasks.Task; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; @@ -25,26 +23,12 @@ public class GeoIpDownloaderStats implements Task.Status { public static final GeoIpDownloaderStats EMPTY = new GeoIpDownloaderStats(0, 0, 0, 0, 0, 0); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "geoip_downloader_stats", - a -> new GeoIpDownloaderStats((int) a[0], (int) a[1], (long) a[2], (int) a[3], (int) a[4], a[5] == null ? 0 : (int) a[5]) - ); - - private static final ParseField SUCCESSFUL_DOWNLOADS = new ParseField("successful_downloads"); - private static final ParseField FAILED_DOWNLOADS = new ParseField("failed_downloads"); - private static final ParseField TOTAL_DOWNLOAD_TIME = new ParseField("total_download_time"); - private static final ParseField DATABASES_COUNT = new ParseField("databases_count"); - private static final ParseField SKIPPED_DOWNLOADS = new ParseField("skipped_updates"); - private static final ParseField EXPIRED_DATABASES = new ParseField("expired_databases"); - - static { - PARSER.declareInt(ConstructingObjectParser.constructorArg(), SUCCESSFUL_DOWNLOADS); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), FAILED_DOWNLOADS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_DOWNLOAD_TIME); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), DATABASES_COUNT); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), SKIPPED_DOWNLOADS); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), EXPIRED_DATABASES); - } + static final ParseField SUCCESSFUL_DOWNLOADS = new ParseField("successful_downloads"); + static final ParseField FAILED_DOWNLOADS = new ParseField("failed_downloads"); + static final ParseField TOTAL_DOWNLOAD_TIME = new ParseField("total_download_time"); + static final ParseField DATABASES_COUNT = new ParseField("databases_count"); + static final ParseField SKIPPED_DOWNLOADS = new ParseField("skipped_updates"); + static final ParseField EXPIRED_DATABASES = new ParseField("expired_databases"); private final int successfulDownloads; private final int failedDownloads; @@ -62,7 +46,7 @@ public GeoIpDownloaderStats(StreamInput in) throws IOException { expiredDatabases = in.readVInt(); } - private GeoIpDownloaderStats( + GeoIpDownloaderStats( int successfulDownloads, int failedDownloads, long totalDownloadTime, @@ -170,10 +154,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static GeoIpDownloaderStats fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(successfulDownloads); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsSerializingTests.java index 68b1ac4b28ff7..69e9cc9b5f5e5 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsSerializingTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsSerializingTests.java @@ -10,15 +10,30 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; public class GeoIpDownloaderStatsSerializingTests extends AbstractXContentSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "geoip_downloader_stats", + a -> new GeoIpDownloaderStats((int) a[0], (int) a[1], (long) a[2], (int) a[3], (int) a[4], a[5] == null ? 0 : (int) a[5]) + ); + + static { + PARSER.declareInt(ConstructingObjectParser.constructorArg(), GeoIpDownloaderStats.SUCCESSFUL_DOWNLOADS); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), GeoIpDownloaderStats.FAILED_DOWNLOADS); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), GeoIpDownloaderStats.TOTAL_DOWNLOAD_TIME); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), GeoIpDownloaderStats.DATABASES_COUNT); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), GeoIpDownloaderStats.SKIPPED_DOWNLOADS); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), GeoIpDownloaderStats.EXPIRED_DATABASES); + } + @Override protected GeoIpDownloaderStats doParseInstance(XContentParser parser) throws IOException { - return GeoIpDownloaderStats.fromXContent(parser); + return PARSER.parse(parser, null); } @Override diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java index fc56b0066faa9..415d270ef4fdb 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; @@ -34,7 +35,11 @@ public class RatedSearchHitTests extends ESTestCase { ); static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> SearchHit.fromXContent(p), new ParseField("hit")); + PARSER.declareObject( + ConstructingObjectParser.constructorArg(), + (p, c) -> SearchResponseUtils.parseSearchHit(p), + new ParseField("hit") + ); PARSER.declareField( ConstructingObjectParser.constructorArg(), (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? OptionalInt.empty() : OptionalInt.of(p.intValue()), diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 4080a47c7dabe..94cfce5357857 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -344,6 +344,27 @@ public void testRequestStatsWithOperationPurposes() throws IOException { assertThat(newStats.keySet(), equalTo(allOperations)); assertThat(newStats, not(equalTo(initialStats))); + // Exercise stats report that keep find grained information + final Map fineStats = statsCollectors.statsMap(true); + assertThat( + fineStats.keySet(), + equalTo( + statsCollectors.collectors.keySet().stream().map(S3BlobStore.StatsKey::toString).collect(Collectors.toUnmodifiableSet()) + ) + ); + // fine stats are equal to coarse grained stats (without entries with value 0) by aggregation + assertThat( + fineStats.entrySet() + .stream() + .collect(Collectors.groupingBy(entry -> entry.getKey().split("_", 2)[1], Collectors.summingLong(Map.Entry::getValue))), + equalTo( + newStats.entrySet() + .stream() + .filter(entry -> entry.getValue() != 0L) + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)) + ) + ); + final Set operationsSeenForTheNewPurpose = statsCollectors.collectors.keySet() .stream() .filter(sk -> sk.purpose() != OperationPurpose.SNAPSHOT_METADATA) diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java index 1b4ab7de0c2ff..085e357da5ae9 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.repositories.s3; -import com.amazonaws.AmazonClientException; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; @@ -26,6 +25,7 @@ import org.elasticsearch.common.settings.SecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.recovery.RecoverySettings; @@ -44,12 +44,14 @@ import java.io.IOException; import java.util.Collection; import java.util.List; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.blankOrNullString; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; @@ -238,9 +240,26 @@ public void testReadFromPositionLargerThanBlobLength() { long position = randomLongBetween(blobBytes.length(), Long.MAX_VALUE - 1L); long length = randomLongBetween(1L, Long.MAX_VALUE - position); - var exception = expectThrows(AmazonClientException.class, () -> readBlob(repository, blobName, position, length)); - assertThat(exception, instanceOf(AmazonS3Exception.class)); - assertThat(((AmazonS3Exception) exception).getStatusCode(), equalTo(RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus())); + var exception = expectThrows(UncategorizedExecutionException.class, () -> readBlob(repository, blobName, position, length)); + assertThat(exception.getCause(), instanceOf(ExecutionException.class)); + assertThat(exception.getCause().getCause(), instanceOf(IOException.class)); + assertThat( + exception.getCause().getCause().getMessage(), + containsString( + "Requested range [start=" + + position + + ", end=" + + (position + length - 1L) + + ", currentOffset=0] cannot be satisfied for blob object [" + + repository.basePath().buildAsString() + + blobName + + ']' + ) + ); + assertThat( + asInstanceOf(AmazonS3Exception.class, exception.getRootCause()).getStatusCode(), + equalTo(RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus()) + ); } } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 6b9937b01a433..895f5273dbba0 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -376,7 +376,7 @@ public void close() throws IOException { @Override public Map stats() { - return statsCollectors.statsMap(); + return statsCollectors.statsMap(service.isStateless); } // Package private for testing @@ -461,7 +461,12 @@ static Operation parse(String s) { } } - record StatsKey(Operation operation, OperationPurpose purpose) {} + record StatsKey(Operation operation, OperationPurpose purpose) { + @Override + public String toString() { + return purpose.getKey() + "_" + operation.getKey(); + } + } class StatsCollectors { final Map collectors = new ConcurrentHashMap<>(); @@ -470,10 +475,16 @@ RequestMetricCollector getMetricCollector(Operation operation, OperationPurpose return collectors.computeIfAbsent(new StatsKey(operation, purpose), k -> buildMetricCollector(k.operation(), k.purpose())); } - Map statsMap() { - final Map m = Arrays.stream(Operation.values()).collect(Collectors.toMap(Operation::getKey, e -> 0L)); - collectors.forEach((sk, v) -> m.compute(sk.operation().getKey(), (k, c) -> Objects.requireNonNull(c) + v.counter.sum())); - return Map.copyOf(m); + Map statsMap(boolean isStateless) { + if (isStateless) { + return collectors.entrySet() + .stream() + .collect(Collectors.toUnmodifiableMap(entry -> entry.getKey().toString(), entry -> entry.getValue().counter.sum())); + } else { + final Map m = Arrays.stream(Operation.values()).collect(Collectors.toMap(Operation::getKey, e -> 0L)); + collectors.forEach((sk, v) -> m.compute(sk.operation().getKey(), (k, c) -> Objects.requireNonNull(c) + v.counter.sum())); + return Map.copyOf(m); + } } IgnoreNoResponseMetricsCollector buildMetricCollector(Operation operation, OperationPurpose purpose) { diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 998455a658406..d08ff5eefd20f 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.core.IOUtils; import org.elasticsearch.repositories.s3.S3BlobStore.Operation; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.io.InputStream; @@ -94,16 +95,34 @@ private void openStreamWithRetry() throws IOException { : "requesting beyond end, start = " + start + " offset=" + currentOffset + " end=" + end; getObjectRequest.setRange(Math.addExact(start, currentOffset), end); } - final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); this.currentStreamFirstOffset = Math.addExact(start, currentOffset); + final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); this.currentStreamLastOffset = Math.addExact(currentStreamFirstOffset, getStreamLength(s3Object)); this.currentStream = s3Object.getObjectContent(); return; } catch (AmazonClientException e) { - if (e instanceof AmazonS3Exception amazonS3Exception && 404 == amazonS3Exception.getStatusCode()) { - throw addSuppressedExceptions( - new NoSuchFileException("Blob object [" + blobKey + "] not found: " + amazonS3Exception.getMessage()) - ); + if (e instanceof AmazonS3Exception amazonS3Exception) { + if (amazonS3Exception.getStatusCode() == RestStatus.NOT_FOUND.getStatus()) { + throw addSuppressedExceptions( + new NoSuchFileException("Blob object [" + blobKey + "] not found: " + amazonS3Exception.getMessage()) + ); + } + if (amazonS3Exception.getStatusCode() == RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus()) { + throw addSuppressedExceptions( + new IOException( + "Requested range [start=" + + start + + ", end=" + + end + + ", currentOffset=" + + currentOffset + + "] cannot be satisfied for blob object [" + + blobKey + + ']', + amazonS3Exception + ) + ); + } } if (attempt == 1) { diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index fc58482651fa3..c8a7cc12a90f4 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -31,6 +31,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.coordination.stateless.StoreHeartbeatService; import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -92,6 +93,7 @@ class S3Service implements Closeable { final TimeValue compareAndExchangeTimeToLive; final TimeValue compareAndExchangeAntiContentionDelay; + final boolean isStateless; S3Service(Environment environment, Settings nodeSettings, ResourceWatcherService resourceWatcherService) { webIdentityTokenCredentialsProvider = new CustomWebIdentityTokenCredentialsProvider( @@ -103,6 +105,7 @@ class S3Service implements Closeable { ); compareAndExchangeTimeToLive = REPOSITORY_S3_CAS_TTL_SETTING.get(nodeSettings); compareAndExchangeAntiContentionDelay = REPOSITORY_S3_CAS_ANTI_CONTENTION_DELAY_SETTING.get(nodeSettings); + isStateless = DiscoveryNode.isStateless(nodeSettings); } /** diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 05268d750637c..52cc48ddac589 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -708,8 +708,8 @@ public void handle(HttpExchange exchange) throws IOException { httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_retries_forever"), new FlakyReadHandler()); // Ranged read - final int position = between(0, bytes.length - 1); - final int length = between(0, randomBoolean() ? bytes.length : Integer.MAX_VALUE); + final int position = between(0, bytes.length - 2); + final int length = between(1, randomBoolean() ? bytes.length : Integer.MAX_VALUE); logger.info("--> position={}, length={}", position, length); try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.INDICES, "read_blob_retries_forever", position, length)) { assertMetricsForOpeningStream(); diff --git a/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml b/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml index bd5c45823aaae..e8bf1f84e6437 100644 --- a/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml +++ b/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml @@ -108,6 +108,76 @@ setup: - match: {hits.hits.0.fields.day_of_week_letters: [T, a, d, h, r, s, u, y] } - match: {hits.hits.0.fields.prefixed_node: [node_c] } +--- +"fetch multiple stored fields": + - skip: + version: " - 8.13.99" + reason: "bug fixed in 8.14" + + - do: + indices.create: + index: sensor-test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + runtime: + prefixed_node: + type: keyword + script: + source: | + for (String node : params._fields.node.values) { + emit(params.prefix + node); + } + params: + prefix: node_ + prefixed_region: + type: keyword + script: + source: | + for (String region : params._fields.region.values) { + emit(params.prefix + region) + } + params: + prefix: us- + properties: + timestamp: + type: date + node: + type: keyword + store: true + region: + type: keyword + store: true + + - do: + bulk: + index: sensor-test + refresh: true + body: | + {"index":{}} + {"timestamp": 1516729294000, "node": "a", "region": "west-1" } + {"index":{}} + {"timestamp": 1516642894000, "node": "b", "region": "west-2" } + {"index":{}} + {"timestamp": 1516556494000, "node": "a", "region": "west-1"} + {"index":{}} + {"timestamp": 1516470094000, "node": "b", "region": "west-2"} + {"index":{}} + {"timestamp": 1516383694000, "node": "c", "region": "west-2"} + {"index":{}} + {"timestamp": 1516297294000, "node": "c", "region": "west-2"} + - do: + search: + index: sensor-test + body: + sort: timestamp + fields: [prefixed_node, prefixed_region] + - match: {hits.total.value: 6} + - match: {hits.hits.0.fields.prefixed_node: [node_c] } + - match: {hits.hits.0.fields.prefixed_region: [us-west-2]} + --- "docvalue_fields": - do: diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index 865f1ba80d1e6..fcbbad6dd644c 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d -distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip +distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 57f4fc9a04ecd..2d32d1827cb13 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -6,376 +6,188 @@ * Side Public License, v 1. */ -import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.util.HdfsUtils -import java.nio.file.Path - -import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.OS +import org.elasticsearch.gradle.internal.info.BuildParams -apply plugin: 'elasticsearch.test.fixtures' -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { - description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' - classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin' + description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' + classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin' } versions << [ - 'hadoop': '3.3.3' + 'hadoop': '3.3.3' ] -final int minTestedHadoopVersion = 2; -final int maxTestedHadoopVersion = 3; - -testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs" - configurations { - krb5Config - krb5Keytabs + hdfsFixture2 + hdfsFixture3 } -dependencies { - api project(path: 'hadoop-client-api', configuration: 'shadow') - if (isEclipse) { - /* - * Eclipse can't pick up the shadow dependency so we point it at *something* - * so it can compile things. - */ - api project(path: 'hadoop-client-api') - } - runtimeOnly "org.apache.hadoop:hadoop-client-runtime:${versions.hadoop}" - implementation "org.apache.hadoop:hadoop-hdfs:${versions.hadoop}" - api "com.google.protobuf:protobuf-java:${versions.protobuf}" - api "commons-logging:commons-logging:${versions.commonslogging}" - api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" - api 'commons-cli:commons-cli:1.2' - api "commons-codec:commons-codec:${versions.commonscodec}" - api 'commons-io:commons-io:2.8.0' - api 'org.apache.commons:commons-lang3:3.11' - api 'javax.servlet:javax.servlet-api:3.1.0' - api "org.slf4j:slf4j-api:${versions.slf4j}" - runtimeOnly "org.slf4j:slf4j-nop:${versions.slf4j}" - // runtimeOnly("org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}") https://github.com/elastic/elasticsearch/issues/93714 - krb5Keytabs project(path: ':test:fixtures:krb5kdc-fixture', configuration: 'krb5KeytabsHdfsDir') - krb5Config project(path: ':test:fixtures:krb5kdc-fixture', configuration: 'krb5ConfHdfsFile') +dependencies { + api project(path: 'hadoop-client-api', configuration: 'shadow') + if (isEclipse) { + /* + * Eclipse can't pick up the shadow dependency so we point it at *something* + * so it can compile things. + */ + api project(path: 'hadoop-client-api') + } + runtimeOnly "org.apache.hadoop:hadoop-client-runtime:${versions.hadoop}" + implementation "org.apache.hadoop:hadoop-hdfs:${versions.hadoop}" + api "com.google.protobuf:protobuf-java:${versions.protobuf}" + api "commons-logging:commons-logging:${versions.commonslogging}" + api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" + api 'commons-cli:commons-cli:1.2' + api "commons-codec:commons-codec:${versions.commonscodec}" + api 'commons-io:commons-io:2.8.0' + api 'org.apache.commons:commons-lang3:3.11' + api 'javax.servlet:javax.servlet-api:3.1.0' + api "org.slf4j:slf4j-api:${versions.slf4j}" + runtimeOnly "org.slf4j:slf4j-nop:${versions.slf4j}" + // https://github.com/elastic/elasticsearch/issues/93714 + // runtimeOnly("org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}") + + testImplementation(project(':test:fixtures:hdfs-fixture')) + javaRestTestCompileOnly(project(':test:fixtures:hdfs-fixture')) + + javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') + javaRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" + javaRestTestRuntimeOnly "com.google.guava:guava:16.0.1" + javaRestTestRuntimeOnly "commons-cli:commons-cli:1.2" + javaRestTestRuntimeOnly "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" + + yamlRestTestCompileOnly(project(':test:fixtures:hdfs-fixture')) + yamlRestTestImplementation project(':test:fixtures:krb5kdc-fixture') + yamlRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" + yamlRestTestRuntimeOnly "com.google.guava:guava:16.0.1" + yamlRestTestRuntimeOnly "commons-cli:commons-cli:1.2" + yamlRestTestRuntimeOnly "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" + + hdfsFixture2 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadowedHdfs2') + hdfsFixture3 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadow') } restResources { - restApi { - include '_common', 'cluster', 'nodes', 'indices', 'index', 'snapshot' - } -} - -normalization { - runtimeClasspath { - // ignore generated keytab files for the purposes of build avoidance - ignore '*.keytab' - // ignore fixture ports file which is on the classpath primarily to pacify the security manager - ignore 'ports' - } + restApi { + include '_common', 'cluster', 'nodes', 'indices', 'index', 'snapshot' + } } tasks.named("dependencyLicenses").configure { - mapping from: /hadoop-.*/, to: 'hadoop' + mapping from: /hadoop-.*/, to: 'hadoop' } -// TODO work that into the java rest test plugin when combined with java plugin -sourceSets { - javaRestTest { - compileClasspath = compileClasspath + main.compileClasspath - runtimeClasspath = runtimeClasspath + main.runtimeClasspath + files("src/main/plugin-metadata") - } +tasks.withType(RestIntegTestTask).configureEach { + usesDefaultDistribution() + BuildParams.withFipsEnabledOnly(it) + jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } tasks.named('javaRestTest').configure { - enabled = false + classpath = sourceSets.javaRestTest.runtimeClasspath + configurations.hdfsFixture3 } -tasks.named('yamlRestTest').configure { - enabled = false +tasks.register("javaRestTestHdfs2", RestIntegTestTask) { + description = "Runs rest tests against an elasticsearch cluster with HDFS version 2" + testClassesDirs = sourceSets.javaRestTest.output.classesDirs + classpath = sourceSets.javaRestTest.runtimeClasspath + configurations.hdfsFixture2 } -String realm = "BUILD.ELASTIC.CO" -String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs") - -// Determine HDFS Fixture compatibility for the current build environment. -ext.fixtureSupported = project.provider(() -> HdfsUtils.isHdfsFixtureSupported(project)) - -for (int hadoopVersion = minTestedHadoopVersion; hadoopVersion <= maxTestedHadoopVersion; hadoopVersion++) { - final int hadoopVer = hadoopVersion - - configurations.create("hdfs" + hadoopVersion + "Fixture") - dependencies.add("hdfs" + hadoopVersion + "Fixture", project(':test:fixtures:hdfs' + hadoopVersion + '-fixture')) - - for (String fixtureName : ['hdfs' + hadoopVersion + 'Fixture', 'haHdfs' + hadoopVersion + 'Fixture', 'secureHdfs' + hadoopVersion + 'Fixture', 'secureHaHdfs' + hadoopVersion + 'Fixture']) { - project.tasks.register(fixtureName, org.elasticsearch.gradle.internal.test.AntFixture) { - executable = "${BuildParams.runtimeJavaHome}/bin/java" - dependsOn project.configurations.getByName("hdfs" + hadoopVer + "Fixture"), project.configurations.krb5Config, project.configurations.krb5Keytabs - env 'CLASSPATH', "${-> project.configurations.getByName("hdfs" + hadoopVer + "Fixture").asPath}" - - maxWaitInSeconds 60 - BuildParams.withFipsEnabledOnly(it) - waitCondition = { fixture, ant -> - // the hdfs.MiniHDFS fixture writes the ports file when - // it's ready, so we can just wait for the file to exist - return fixture.portsFile.exists() - } - final List miniHDFSArgs = [] - - // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options - if (name.startsWith('secure')) { - miniHDFSArgs.addAll(["--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"]) - miniHDFSArgs.add("-Djava.security.krb5.conf=${project.configurations.krb5Config.getSingleFile().getPath()}") - miniHDFSArgs.add("-Dhdfs.config.port=" + getSecureNamenodePortForVersion(hadoopVer)) - } else { - miniHDFSArgs.add("-Dhdfs.config.port=" + getNonSecureNamenodePortForVersion(hadoopVer)) - } - // If it's an HA fixture, set a nameservice to use in the JVM options - if (name.startsWith('haHdfs') || name.startsWith('secureHaHdfs')) { - miniHDFSArgs.add("-Dha-nameservice=ha-hdfs") - } - - // Common options - miniHDFSArgs.add('hdfs.MiniHDFS') - miniHDFSArgs.add(baseDir) - - // If it's a secure fixture, then set the principal name and keytab locations to use for auth. - if (name.startsWith('secure')) { - miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") - miniHDFSArgs.add(new File(project.configurations.krb5Keytabs.singleFile, "hdfs_hdfs.build.elastic.co.keytab").getPath()) - } - - args miniHDFSArgs.toArray() - } - } - - for (String integTestTaskName : ['javaRestTest' + hadoopVersion, 'javaRestTestSecure' + hadoopVersion]) { - tasks.register(integTestTaskName, RestIntegTestTask) { - description = "Runs rest tests against an elasticsearch cluster with HDFS" + hadoopVer + "-HA" - - if (name.contains("Secure")) { - dependsOn "secureHaHdfs" + hadoopVer + "Fixture" - } - - File portsFileDir = file("${workingDir}/hdfs" + hadoopVer + "Fixture") - Path portsFile = name.contains("Secure") ? - buildDir.toPath() - .resolve("fixtures") - .resolve("secureHaHdfs" + hadoopVer + "Fixture") - .resolve("ports") : - buildDir.toPath() - .resolve("fixtures") - .resolve("haHdfs" + hadoopVer + "Fixture") - .resolve("ports") - nonInputProperties.systemProperty "test.hdfs-fixture.ports", file("$portsFileDir/ports") - - // Copy ports file to separate location which is placed on the test classpath - doFirst { - mkdir(portsFileDir) - copy { - from portsFile - into portsFileDir - } - } - testClassesDirs = sourceSets.javaRestTest.output.classesDirs - // Set the keytab files in the classpath so that we can access them from test code without the security manager - // freaking out. - classpath = sourceSets.javaRestTest.runtimeClasspath + - configurations.krb5Keytabs + - files(portsFileDir) - } - } - - for (String integTestTaskName : ['yamlRestTest' + hadoopVersion, 'yamlRestTestSecure' + hadoopVersion]) { - tasks.register(integTestTaskName, RestIntegTestTask) { - description = "Runs rest tests against an elasticsearch cluster with HDFS" + hadoopVer - - if (name.contains("Secure")) { - dependsOn "secureHdfs" + hadoopVer + "Fixture" - } - - testClassesDirs = sourceSets.yamlRestTest.output.classesDirs - classpath = sourceSets.yamlRestTest.runtimeClasspath - } - } - - def processHadoopTestResources = tasks.register("processHadoop" + hadoopVer + "TestResources", Copy) - processHadoopTestResources.configure { - Map expansions = [ - 'hdfs_port' : getNonSecureNamenodePortForVersion(hadoopVer), - 'secure_hdfs_port': getSecureNamenodePortForVersion(hadoopVer), - ] - inputs.properties(expansions) - filter("tokens": expansions.collectEntries { k, v -> [k, v.toString()]}, ReplaceTokens.class) - it.into("build/resources/yamlRestTest/rest-api-spec/test") - it.into("hdfs_repository_" + hadoopVer) { - from "src/yamlRestTest/resources/rest-api-spec/test/hdfs_repository" - } - it.into("secure_hdfs_repository_" + hadoopVer) { - from "src/yamlRestTest/resources/rest-api-spec/test/secure_hdfs_repository" - } - } - tasks.named("processYamlRestTestResources").configure { - dependsOn(processHadoopTestResources) - } - - if (fixtureSupported.get()) { - // Check depends on the HA test. Already depends on the standard test. - tasks.named("check").configure { - dependsOn("javaRestTest" + hadoopVer) - } - - // Both standard and HA tests depend on their respective HDFS fixtures - tasks.named("yamlRestTest" + hadoopVer).configure { - dependsOn "hdfs" + hadoopVer + "Fixture" - // The normal test runner only runs the standard hdfs rest tests - systemProperty 'tests.rest.suite', 'hdfs_repository_' + hadoopVer - } - tasks.named("javaRestTest" + hadoopVer).configure { - dependsOn "haHdfs" + hadoopVer + "Fixture" - } - } else { - // The normal integration test runner will just test that the plugin loads - tasks.named("yamlRestTest" + hadoopVer).configure { - systemProperty 'tests.rest.suite', 'hdfs_repository_' + hadoopVer + '/10_basic' - } - // HA fixture is unsupported. Don't run them. - tasks.named("javaRestTestSecure" + hadoopVer).configure { - enabled = false - } - } - - tasks.named("check").configure { - dependsOn("yamlRestTest" + hadoopVer, "yamlRestTestSecure" + hadoopVer, "javaRestTestSecure" + hadoopVer) - } - - // Run just the secure hdfs rest test suite. - tasks.named("yamlRestTestSecure" + hadoopVer).configure { - systemProperty 'tests.rest.suite', 'secure_hdfs_repository_' + hadoopVer - } +tasks.named('yamlRestTest').configure { + classpath = sourceSets.yamlRestTest.runtimeClasspath + configurations.hdfsFixture2 } - -def getSecureNamenodePortForVersion(hadoopVersion) { - return 10002 - (2 * hadoopVersion) +tasks.register("yamlRestTestHdfs2", RestIntegTestTask) { + description = "Runs yaml rest tests against an elasticsearch cluster with HDFS version 2" + testClassesDirs = sourceSets.yamlRestTest.output.classesDirs + classpath = sourceSets.yamlRestTest.runtimeClasspath + configurations.hdfsFixture2 } -def getNonSecureNamenodePortForVersion(hadoopVersion) { - return 10003 - (2 * hadoopVersion) +tasks.named("test").configure { + onlyIf("Not running on windows") { + OS.current().equals(OS.WINDOWS) == false + } } -Set disabledIntegTestTaskNames = [] - -tasks.withType(RestIntegTestTask).configureEach { testTask -> - if (disabledIntegTestTaskNames.contains(name)) { - enabled = false; - } - BuildParams.withFipsEnabledOnly(testTask) - - if (name.contains("Secure")) { - if (disabledIntegTestTaskNames.contains(name) == false) { - nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}" - nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}" - nonInputProperties.systemProperty "java.security.krb5.conf", "${project.configurations.krb5Config.getSingleFile().getPath()}" - nonInputProperties.systemProperty( - "test.krb5.keytab.hdfs", - new File(project.configurations.krb5Keytabs.singleFile, "hdfs_hdfs.build.elastic.co.keytab").getPath() - ) - } - } - - testClusters.matching { it.name == testTask.name }.configureEach { - if (testTask.name.contains("Secure")) { - systemProperty "java.security.krb5.conf", { configurations.krb5Config.singleFile.getPath() }, IGNORE_VALUE - extraConfigFile( - "repository-hdfs/krb5.keytab", - new File(project.configurations.krb5Keytabs.singleFile, "elasticsearch.keytab"), - IGNORE_VALUE - ) - } - } +tasks.named("check").configure { + dependsOn(tasks.withType(RestIntegTestTask)) } - tasks.named("thirdPartyAudit").configure { - ignoreMissingClasses() - ignoreViolations( - // internal java api: sun.misc.Unsafe - 'com.google.protobuf.MessageSchema', - 'com.google.protobuf.UnsafeUtil', - 'com.google.protobuf.UnsafeUtil$1', - 'com.google.protobuf.UnsafeUtil$Android32MemoryAccessor', - 'com.google.protobuf.UnsafeUtil$Android64MemoryAccessor', - 'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor', - 'com.google.protobuf.UnsafeUtil$MemoryAccessor', - 'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper', - 'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper$1', - 'org.apache.hadoop.shaded.com.google.common.cache.Striped64', - 'org.apache.hadoop.shaded.com.google.common.cache.Striped64$1', - 'org.apache.hadoop.shaded.com.google.common.cache.Striped64$Cell', - 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', - 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', - 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', - 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', - 'org.apache.hadoop.shaded.com.google.common.hash.Striped64', - 'org.apache.hadoop.shaded.com.google.common.hash.Striped64$1', - 'org.apache.hadoop.shaded.com.google.common.hash.Striped64$Cell', - 'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', - 'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeBooleanField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeByteField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCachedField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCharField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCustomEncodedField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeDoubleField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeFloatField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeIntField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeLongField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeObjectField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeShortField', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$1', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$Cell', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$1', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$Cell', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', - 'org.apache.hadoop.shaded.org.xbill.DNS.spi.DNSJavaNameServiceDescriptor', - 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64', - 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$1', - 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$Cell', - 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', - 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', - 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', - 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', - 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64', - 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$1', - 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$Cell', - 'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', - 'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$1', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$JvmMemoryAccessor', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor' - ) -} - -tasks.named('resolveAllDependencies') { - // This avoids spinning up the test fixture when downloading all dependencies - configs = project.configurations - [project.configurations.krb5Config] + ignoreMissingClasses() + ignoreViolations( + // internal java api: sun.misc.Unsafe + 'com.google.protobuf.MessageSchema', + 'com.google.protobuf.UnsafeUtil', + 'com.google.protobuf.UnsafeUtil$1', + 'com.google.protobuf.UnsafeUtil$Android32MemoryAccessor', + 'com.google.protobuf.UnsafeUtil$Android64MemoryAccessor', + 'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor', + 'com.google.protobuf.UnsafeUtil$MemoryAccessor', + 'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.shaded.com.google.common.cache.Striped64', + 'org.apache.hadoop.shaded.com.google.common.cache.Striped64$1', + 'org.apache.hadoop.shaded.com.google.common.cache.Striped64$Cell', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'org.apache.hadoop.shaded.com.google.common.hash.Striped64', + 'org.apache.hadoop.shaded.com.google.common.hash.Striped64$1', + 'org.apache.hadoop.shaded.com.google.common.hash.Striped64$Cell', + 'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeBooleanField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeByteField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCachedField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCharField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCustomEncodedField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeDoubleField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeFloatField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeIntField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeLongField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeObjectField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeShortField', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$Cell', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$Cell', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.shaded.org.xbill.DNS.spi.DNSJavaNameServiceDescriptor', + 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64', + 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$1', + 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$Cell', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64', + 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$1', + 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$Cell', + 'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$1', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$JvmMemoryAccessor', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor' + ) } diff --git a/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/AbstractHaHdfsFailoverTestSuiteIT.java b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/AbstractHaHdfsFailoverTestSuiteIT.java new file mode 100644 index 0000000000000..d14cff30caef3 --- /dev/null +++ b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/AbstractHaHdfsFailoverTestSuiteIT.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories.hdfs; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.Assert; + +import java.io.IOException; + +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class, TestContainersThreadFilter.class }) +abstract class AbstractHaHdfsFailoverTestSuiteIT extends ESRestTestCase { + + abstract HdfsFixture getHdfsFixture(); + + String securityCredentials() { + return ""; + } + + public void testHAFailoverWithRepository() throws Exception { + getHdfsFixture().setupHA(); + + RestClient client = client(); + + createRepository(client); + + // Get repository + Response response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + + // Failover the namenode to the second. + getHdfsFixture().failoverHDFS("nn1", "nn2"); + safeSleep(2000); + // Get repository again + response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + } + + private void createRepository(RestClient client) throws IOException { + Request request = new Request("PUT", "/_snapshot/hdfs_ha_repo_read"); + request.setJsonEntity(Strings.format(""" + { + "type": "hdfs", + "settings": { + "uri": "hdfs://ha-hdfs/", + "path": "/user/elasticsearch/existing/readonly-repository", + "readonly": "true", + %s + "conf.dfs.nameservices": "ha-hdfs", + "conf.dfs.ha.namenodes.ha-hdfs": "nn1,nn2", + "conf.dfs.namenode.rpc-address.ha-hdfs.nn1": "localhost:%s", + "conf.dfs.namenode.rpc-address.ha-hdfs.nn2": "localhost:%s", + "conf.dfs.client.failover.proxy.provider.ha-hdfs":\ + "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" + } + }""", securityCredentials(), getHdfsFixture().getPort(0), getHdfsFixture().getPort(1))); + Response response = client.performRequest(request); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + } + +} diff --git a/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java index cb8c4d65d88d6..7bd15ad64582f 100644 --- a/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java +++ b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java @@ -8,271 +8,41 @@ package org.elasticsearch.repositories.hdfs; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ha.BadFencingConfigurationException; -import org.apache.hadoop.ha.HAServiceProtocol; -import org.apache.hadoop.ha.HAServiceTarget; -import org.apache.hadoop.ha.NodeFencer; -import org.apache.hadoop.ha.ZKFCProtocol; -import org.apache.hadoop.ha.protocolPB.HAServiceProtocolClientSideTranslatorPB; -import org.apache.hadoop.hdfs.tools.DFSHAAdmin; -import org.apache.hadoop.security.SecurityUtil; -import org.apache.hadoop.security.UserGroupInformation; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.Strings; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.junit.Assert; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; -import java.io.IOException; -import java.net.InetSocketAddress; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.AccessController; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.util.ArrayList; -import java.util.List; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; /** * Integration test that runs against an HA-Enabled HDFS instance */ -public class HaHdfsFailoverTestSuiteIT extends ESRestTestCase { +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class }) +public class HaHdfsFailoverTestSuiteIT extends AbstractHaHdfsFailoverTestSuiteIT { - public void testHAFailoverWithRepository() throws Exception { - RestClient client = client(); + public static HdfsFixture hdfsFixture = new HdfsFixture().withHAService("ha-hdfs"); - String esKerberosPrincipal = System.getProperty("test.krb5.principal.es"); - String hdfsKerberosPrincipal = System.getProperty("test.krb5.principal.hdfs"); - String kerberosKeytabLocation = System.getProperty("test.krb5.keytab.hdfs"); - String ports = System.getProperty("test.hdfs-fixture.ports"); - String nn1Port = "10001"; - String nn2Port = "10002"; - if (ports.length() > 0) { - final Path path = PathUtils.get(ports); - final List lines = AccessController.doPrivileged((PrivilegedExceptionAction>) () -> { - return Files.readAllLines(path); - }); - nn1Port = lines.get(0); - nn2Port = lines.get(1); - } - boolean securityEnabled = hdfsKerberosPrincipal != null; + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .build(); - Configuration hdfsConfiguration = new Configuration(); - hdfsConfiguration.set("dfs.nameservices", "ha-hdfs"); - hdfsConfiguration.set("dfs.ha.namenodes.ha-hdfs", "nn1,nn2"); - hdfsConfiguration.set("dfs.namenode.rpc-address.ha-hdfs.nn1", "localhost:" + nn1Port); - hdfsConfiguration.set("dfs.namenode.rpc-address.ha-hdfs.nn2", "localhost:" + nn2Port); - hdfsConfiguration.set( - "dfs.client.failover.proxy.provider.ha-hdfs", - "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" - ); + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(hdfsFixture).around(cluster); - AccessController.doPrivileged((PrivilegedExceptionAction) () -> { - if (securityEnabled) { - // ensure that keytab exists - Path kt = PathUtils.get(kerberosKeytabLocation); - if (Files.exists(kt) == false) { - throw new IllegalStateException("Could not locate keytab at " + kerberosKeytabLocation); - } - if (Files.isReadable(kt) != true) { - throw new IllegalStateException("Could not read keytab at " + kerberosKeytabLocation); - } - logger.info("Keytab Length: " + Files.readAllBytes(kt).length); - - // set principal names - hdfsConfiguration.set("dfs.namenode.kerberos.principal", hdfsKerberosPrincipal); - hdfsConfiguration.set("dfs.datanode.kerberos.principal", hdfsKerberosPrincipal); - hdfsConfiguration.set("dfs.data.transfer.protection", "authentication"); - - SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, hdfsConfiguration); - UserGroupInformation.setConfiguration(hdfsConfiguration); - UserGroupInformation.loginUserFromKeytab(hdfsKerberosPrincipal, kerberosKeytabLocation); - } else { - SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE, hdfsConfiguration); - UserGroupInformation.setConfiguration(hdfsConfiguration); - UserGroupInformation.getCurrentUser(); - } - return null; - }); - - // Create repository - { - Request request = new Request("PUT", "/_snapshot/hdfs_ha_repo_read"); - request.setJsonEntity(Strings.format(""" - { - "type": "hdfs", - "settings": { - "uri": "hdfs://ha-hdfs/", - "path": "/user/elasticsearch/existing/readonly-repository", - "readonly": "true", - %s - "conf.dfs.nameservices": "ha-hdfs", - "conf.dfs.ha.namenodes.ha-hdfs": "nn1,nn2", - "conf.dfs.namenode.rpc-address.ha-hdfs.nn1": "localhost:%s", - "conf.dfs.namenode.rpc-address.ha-hdfs.nn2": "localhost:%s", - "conf.dfs.client.failover.proxy.provider.ha-hdfs": \ - "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" - } - }""", securityCredentials(securityEnabled, esKerberosPrincipal), nn1Port, nn2Port)); - Response response = client.performRequest(request); - - Assert.assertEquals(200, response.getStatusLine().getStatusCode()); - } - - // Get repository - { - Response response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); - Assert.assertEquals(200, response.getStatusLine().getStatusCode()); - } - - // Failover the namenode to the second. - failoverHDFS("nn1", "nn2", hdfsConfiguration); - - // Get repository again - { - Response response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); - Assert.assertEquals(200, response.getStatusLine().getStatusCode()); - } - } - - private String securityCredentials(boolean securityEnabled, String kerberosPrincipal) { - if (securityEnabled) { - return String.format(java.util.Locale.ROOT, """ - "security.principal": "%s","conf.dfs.data.transfer.protection": "authentication",""", kerberosPrincipal); - } else { - return ""; - } - } - - /** - * Wraps an HAServiceTarget, keeping track of any HAServiceProtocol proxies it generates in order - * to close them at the end of the test lifecycle. - */ - private static class CloseableHAServiceTarget extends HAServiceTarget { - private final HAServiceTarget delegate; - private final List protocolsToClose = new ArrayList<>(); - - CloseableHAServiceTarget(HAServiceTarget delegate) { - this.delegate = delegate; - } - - @Override - public InetSocketAddress getAddress() { - return delegate.getAddress(); - } - - @Override - public InetSocketAddress getHealthMonitorAddress() { - return delegate.getHealthMonitorAddress(); - } - - @Override - public InetSocketAddress getZKFCAddress() { - return delegate.getZKFCAddress(); - } - - @Override - public NodeFencer getFencer() { - return delegate.getFencer(); - } - - @Override - public void checkFencingConfigured() throws BadFencingConfigurationException { - delegate.checkFencingConfigured(); - } - - @Override - public HAServiceProtocol getProxy(Configuration conf, int timeoutMs) throws IOException { - HAServiceProtocol proxy = delegate.getProxy(conf, timeoutMs); - protocolsToClose.add(proxy); - return proxy; - } - - @Override - public HAServiceProtocol getHealthMonitorProxy(Configuration conf, int timeoutMs) throws IOException { - return delegate.getHealthMonitorProxy(conf, timeoutMs); - } - - @Override - public ZKFCProtocol getZKFCProxy(Configuration conf, int timeoutMs) throws IOException { - return delegate.getZKFCProxy(conf, timeoutMs); - } - - @Override - public boolean isAutoFailoverEnabled() { - return delegate.isAutoFailoverEnabled(); - } - - private void close() { - for (HAServiceProtocol protocol : protocolsToClose) { - if (protocol instanceof HAServiceProtocolClientSideTranslatorPB haServiceProtocolClientSideTranslatorPB) { - haServiceProtocolClientSideTranslatorPB.close(); - } - } - } - } - - /** - * The default HAAdmin tool does not throw exceptions on failures, and does not close any client connection - * resources when it concludes. This subclass overrides the tool to allow for exception throwing, and to - * keep track of and clean up connection resources. - */ - private static class CloseableHAAdmin extends DFSHAAdmin { - private final List serviceTargets = new ArrayList<>(); - - @Override - protected HAServiceTarget resolveTarget(String nnId) { - CloseableHAServiceTarget target = new CloseableHAServiceTarget(super.resolveTarget(nnId)); - serviceTargets.add(target); - return target; - } - - @Override - public int run(String[] argv) throws Exception { - return runCmd(argv); - } - - public int transitionToStandby(String namenodeID) throws Exception { - return run(new String[] { "-transitionToStandby", namenodeID }); - } - - public int transitionToActive(String namenodeID) throws Exception { - return run(new String[] { "-transitionToActive", namenodeID }); - } - - public void close() { - for (CloseableHAServiceTarget serviceTarget : serviceTargets) { - serviceTarget.close(); - } - } + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); } - /** - * Performs a two-phase leading namenode transition. - * @param from Namenode ID to transition to standby - * @param to Namenode ID to transition to active - * @param configuration Client configuration for HAAdmin tool - * @throws IOException In the event of a raised exception during namenode failover. - */ - private void failoverHDFS(String from, String to, Configuration configuration) throws IOException { - logger.info("Swapping active namenodes: [{}] to standby and [{}] to active", from, to); - try { - AccessController.doPrivileged((PrivilegedExceptionAction) () -> { - CloseableHAAdmin haAdmin = new CloseableHAAdmin(); - haAdmin.setConf(configuration); - try { - haAdmin.transitionToStandby(from); - haAdmin.transitionToActive(to); - } finally { - haAdmin.close(); - } - return null; - }); - } catch (PrivilegedActionException pae) { - throw new IOException("Unable to perform namenode failover", pae); - } + @Override + HdfsFixture getHdfsFixture() { + return hdfsFixture; } } diff --git a/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/SecureHaHdfsFailoverTestSuiteIT.java b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/SecureHaHdfsFailoverTestSuiteIT.java new file mode 100644 index 0000000000000..8ba27f703c419 --- /dev/null +++ b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/SecureHaHdfsFailoverTestSuiteIT.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories.hdfs; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.krb5kdc.Krb5kDcContainer; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +public class SecureHaHdfsFailoverTestSuiteIT extends AbstractHaHdfsFailoverTestSuiteIT { + + public static Krb5kDcContainer krb5Fixture = new Krb5kDcContainer(); + + public static HdfsFixture hdfsFixture = new HdfsFixture().withHAService("ha-hdfs") + .withKerberos(() -> krb5Fixture.getPrincipal(), () -> krb5Fixture.getKeytab()); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .systemProperty("java.security.krb5.conf", () -> krb5Fixture.getConfPath().toString()) + .configFile("repository-hdfs/krb5.conf", Resource.fromString(() -> krb5Fixture.getConf())) + .configFile("repository-hdfs/krb5.keytab", Resource.fromFile(() -> krb5Fixture.getEsKeytab())) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(krb5Fixture).around(hdfsFixture).around(cluster); + + @Override + HdfsFixture getHdfsFixture() { + return hdfsFixture; + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + protected String securityCredentials() { + return String.format(java.util.Locale.ROOT, """ + "security.principal": "%s","conf.dfs.data.transfer.protection": "authentication",""", krb5Fixture.getEsPrincipal()); + } + +} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java index 592192f29c262..ee1e54e8a3356 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.core.Streams; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; import org.hamcrest.CoreMatchers; import org.mockito.AdditionalMatchers; import org.mockito.Mockito; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreRepositoryTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreRepositoryTests.java index fed4411f68768..a52724496289a 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreRepositoryTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreRepositoryTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; import java.util.Collection; import java.util.Collections; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java index cd38cc04e6b31..a6d2bdcf8a1d4 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.AbstractThirdPartyRepositoryTestCase; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; import java.util.Collection; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index 313dcdd6623c4..0e2ec25b6cfaa 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; import java.util.Collection; diff --git a/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java b/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java index bdc6368bb5719..a0a4d9379bc78 100644 --- a/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java +++ b/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java @@ -5,22 +5,52 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ + package org.elasticsearch.repositories.hdfs; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.Map; +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class, TestContainersThreadFilter.class }) public class RepositoryHdfsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public static HdfsFixture hdfsFixture = new HdfsFixture(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(hdfsFixture).around(cluster); + public RepositoryHdfsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @ParametersFactory public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); + return createParameters(new String[] { "hdfs_repository" }, Map.of("hdfs_port", hdfsFixture.getPort())); } } diff --git a/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/SecureRepositoryHdfsClientYamlTestSuiteIT.java b/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/SecureRepositoryHdfsClientYamlTestSuiteIT.java new file mode 100644 index 0000000000000..45b992a35d731 --- /dev/null +++ b/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/SecureRepositoryHdfsClientYamlTestSuiteIT.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories.hdfs; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.krb5kdc.Krb5kDcContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.Map; + +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class, TestContainersThreadFilter.class }) +public class SecureRepositoryHdfsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public static Krb5kDcContainer krb5Fixture = new Krb5kDcContainer(); + + public static HdfsFixture hdfsFixture = new HdfsFixture().withKerberos(() -> krb5Fixture.getPrincipal(), () -> krb5Fixture.getKeytab()); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .systemProperty("java.security.krb5.conf", () -> krb5Fixture.getConfPath().toString()) + .configFile("repository-hdfs/krb5.conf", Resource.fromString(() -> krb5Fixture.getConf())) + .configFile("repository-hdfs/krb5.keytab", Resource.fromFile(() -> krb5Fixture.getEsKeytab())) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(krb5Fixture).around(hdfsFixture).around(cluster); + + public SecureRepositoryHdfsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return createParameters(new String[] { "secure_hdfs_repository" }, Map.of("secure_hdfs_port", hdfsFixture.getPort())); + } +} diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index 303b37210535d..36fac5fe89ab5 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -289,15 +289,15 @@ protected void dumpDebug() { protected void assertWhileRunning(Platforms.PlatformAction assertions) throws Exception { try { awaitElasticsearchStartup(runElasticsearchStartCommand(null, true, false)); - } catch (Exception e) { + } catch (AssertionError | Exception e) { dumpDebug(); throw e; } try { assertions.run(); - } catch (Exception e) { - logger.warn("Elasticsearch log:\n" + FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz")); + } catch (AssertionError | Exception e) { + dumpDebug(); throw e; } stopElasticsearch(); @@ -392,15 +392,8 @@ public Shell.Result awaitElasticsearchStartupWithResult(Shell.Result result) thr public void startElasticsearch() throws Exception { try { awaitElasticsearchStartup(runElasticsearchStartCommand(null, true, false)); - } catch (Exception e) { - if (Files.exists(installation.home.resolve("elasticsearch.pid"))) { - String pid = FileUtils.slurp(installation.home.resolve("elasticsearch.pid")).trim(); - logger.info("elasticsearch process ({}) failed to start", pid); - if (sh.run("jps").stdout().contains(pid)) { - logger.info("Dumping jstack of elasticsearch process ({}) ", pid); - sh.runIgnoreExitCode("jstack " + pid); - } - } + } catch (AssertionError | Exception e) { + dumpDebug(); throw e; } } diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java index 88d910b61fa52..b12a70ccb8425 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotInfoUtils; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -524,7 +525,7 @@ private static GetSnapshotsResponse sortedWithLimit( static { GET_SNAPSHOT_PARSER.declareObjectArray( ConstructingObjectParser.constructorArg(), - (p, c) -> SnapshotInfo.SNAPSHOT_INFO_PARSER.apply(p, c).build(), + (p, c) -> SnapshotInfoUtils.snapshotInfoFromXContent(p), new ParseField("snapshots") ); GET_SNAPSHOT_PARSER.declareObject( diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json index 5b58a7b5b59a5..cc989bfec8a8d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json @@ -1,7 +1,7 @@ { "connector.update_api_key_id": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/connector-apis.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-api-key-id-api.html", "description": "Updates the API key id and/or API key secret id fields in the connector document." }, "stability": "experimental", diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml index 0d5455ca22317..2fde1f48e93df 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml @@ -59,8 +59,8 @@ --- "Empty _id with op_type create": - - skip: - version: " - 7.4.99" + - requires: + cluster_features: ["bulk_auto_id"] reason: "auto id + op type create only supported since 7.5" - do: @@ -119,8 +119,8 @@ --- "When setting require_alias flag per request": - - skip: - version: " - 7.9.99" + - requires: + cluster_features: ["bulk_require_alias"] reason: "require_alias flag was added in version 7.10" - do: @@ -162,8 +162,8 @@ index: new_index_not_created --- "When setting require_alias flag": - - skip: - version: " - 7.9.99" + - requires: + cluster_features: ["bulk_require_alias"] reason: "require_alias flag was added in version 7.10" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml index 348d7a6fd0ef1..6e9502fb32f95 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml @@ -1,8 +1,8 @@ --- "Dynamic templates": - - skip: - features: contains - version: " - 8.7.99" + - requires: + test_runner_features: ["contains"] + cluster_features: ["bulk_dynamic_template_document_parse_exception"] reason: "Exception type has changed in 8.8.0" - do: @@ -175,8 +175,8 @@ --- "Dynamic templates with op_type": - - skip: - version: " - 8.6.0" + - requires: + cluster_features: ["bulk_dynamic_template_op_type"] reason: "bug fixed in 8.6.1" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml index ba34604231268..3e919c5960278 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml @@ -1,7 +1,7 @@ --- "One request has pipeline and another not": - - skip: - version: " - 7.9.0" + - requires: + cluster_features: ["bulk_pipeline_validate"] reason: "fixed in 7.9.1" - do: bulk: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml index 2d006f3425790..981a934a719ca 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml @@ -72,7 +72,7 @@ "Test cat indices output for closed index (pre 7.2.0)": - skip: reason: "closed indices are replicated starting version 7.2.0" - cluster_features: ["cat_indices_replicate_closed"] + cluster_features: ["indices_replicate_closed"] - requires: test_runner_features: ["allowed_warnings"] @@ -117,7 +117,7 @@ "Test cat indices output for closed index": - skip: reason: "closed indices are replicated starting version 7.2.0" - cluster_features: ["cat_indices_replicate_closed"] + cluster_features: ["indices_replicate_closed"] - requires: test_runner_features: ["allowed_warnings"] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml index f6f20913e402b..5270d215f8cea 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml @@ -1,7 +1,7 @@ --- "Help": - requires: - cluster_features: ["cat_templates_v2"] + cluster_features: ["templates_v2"] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -32,7 +32,7 @@ --- "Normal templates": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -83,7 +83,7 @@ --- "Filtered templates": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -125,7 +125,7 @@ --- "Column headers": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -163,7 +163,7 @@ --- "Select columns": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -197,7 +197,7 @@ --- "Sort templates": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" test_runner_features: default_shards, no_xpack @@ -250,7 +250,7 @@ --- "Multiple template": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" test_runner_features: default_shards, no_xpack @@ -286,7 +286,7 @@ --- "Mixture of legacy and composable templates": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" test_runner_features: allowed_warnings diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml index 1f0e2b6fd727c..8c350b50a6bf2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml @@ -49,10 +49,10 @@ --- "Cluster shard allocation explanation test with a closed index": - - skip: - version: " - 7.1.99" + - requires: + cluster_features: ["indices_replicate_closed"] reason: closed indices are replicated starting version 7.2.0 - features: ["allowed_warnings"] + test_runner_features: ["allowed_warnings"] - do: indices.create: @@ -95,8 +95,8 @@ --- "Cluster allocation explanation response includes node's roles": - - skip: - version: " - 8.10.99" + - requires: + cluster_features: ["cluster_allocation_role"] reason: The roles field was introduced in 8.11.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml index d27abc3da7081..0308a68dae2cd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml @@ -1,7 +1,7 @@ --- "Basic CRUD": - - skip: - version: " - 7.7.99" + - requires: + cluster_features: ["templates_v2"] reason: "index/component template v2 API unavailable before 7.8" - do: @@ -48,8 +48,8 @@ --- "Delete multiple templates": - - skip: - version: " - 7.99.99" + - requires: + cluster_features: ["cluster_templates_delete_multiple"] reason: "not yet backported" - do: @@ -116,8 +116,8 @@ --- "Add data stream lifecycle": - - skip: - version: " - 8.10.99" + - requires: + cluster_features: ["datastream_lifecycle"] reason: "Data stream lifecycle was available from 8.11" - do: @@ -144,8 +144,8 @@ --- "Get data stream lifecycle with default rollover": - - skip: - version: " - 8.10.99" + - requires: + cluster_features: ["datastream_lifecycle"] reason: "Data stream lifecycle was available from 8.11" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml index a4204034bfd80..cd213ebe72a8e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml @@ -1,7 +1,7 @@ --- setup: - - skip: - version: " - 8.5.99" + - requires: + cluster_features: ["cluster_desired_balance"] reason: "API added in in 8.6.0" --- @@ -61,8 +61,8 @@ setup: --- "Test cluster_balance_stats": - - skip: - version: " - 8.6.99" + - requires: + cluster_features: ["cluster_desired_balance_stats"] reason: "cluster_balance_stats added in in 8.7.0" - do: @@ -109,8 +109,8 @@ setup: --- "Test cluster_info": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_info"] reason: "cluster_info added in in 8.8.0" - do: @@ -121,8 +121,8 @@ setup: --- "Test cluster_balance_stats contains node ID and roles": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_desired_balance_extended"] reason: "node_id and roles added in in 8.8.0" - do: @@ -140,8 +140,8 @@ setup: --- "Test tier_preference": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_desired_balance_extended"] reason: "tier_preference added in in 8.8.0" - do: @@ -165,8 +165,8 @@ setup: --- "Test computed_shard_movements": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_desired_balance_extended"] reason: "computed_shard_movements added in in 8.8.0" - do: @@ -177,8 +177,8 @@ setup: --- "Test reset desired balance": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_desired_balance_extended"] reason: "reset API added in in 8.8.0" - do: @@ -187,8 +187,8 @@ setup: --- "Test undesired_shard_allocation_count": - - skip: - version: " - 8.11.99" + - requires: + cluster_features: ["cluster_desired_balance_stats_undesired_count"] reason: "undesired_shard_allocation_count added in in 8.12.0" - do: @@ -225,8 +225,8 @@ setup: --- "Test unassigned_shards, total_allocations, undesired_allocations and undesired_allocations_fraction": - - skip: - version: " - 8.11.99" + - requires: + cluster_features: [ "cluster_desired_balance_stats_undesired_count" ] reason: "undesired_shard_allocation_count added in in 8.12.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml index 3723c5b2165ca..56a693febec82 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml @@ -1,7 +1,7 @@ --- setup: - - skip: - version: " - 8.3.99" + - requires: + cluster_features: ["cluster_desired_nodes_dry_run"] reason: "Support for the dry run option was added in in 8.4.0" --- teardown: @@ -12,7 +12,7 @@ teardown: "Test dry run doesn't update empty desired nodes": - skip: version: " - 8.12.99" - reason: "version_node removed from version 8.13 onwards" + reason: "version_node removed from version 8.13 onwards, require the new api" - do: cluster.state: {} @@ -42,7 +42,7 @@ teardown: "Test dry run doesn't update existing desired nodes": - skip: version: " - 8.12.99" - reason: "version_node removed from version 8.13 onwards" + reason: "version_node removed from version 8.13 onwards, require the new api" - do: cluster.state: {} @@ -94,7 +94,7 @@ teardown: --- "Test validation works for dry run updates": - skip: - version: "8.9.99 - " + cluster_features: ["cluster_desired_nodes_no_settings_validation"] reason: "We started skipping setting validations in 8.10" - do: cluster.state: { } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml index a051b3626b217..586bd3b7cfb6b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml @@ -132,9 +132,10 @@ --- "cluster health with closed index (pre 7.2.0)": - skip: - version: "7.2.0 - " + cluster_features: ["indices_replicate_closed"] reason: "closed indices are replicated starting version 7.2.0" - features: ["allowed_warnings"] + - requires: + test_runner_features: ["allowed_warnings"] - do: indices.create: @@ -208,10 +209,10 @@ --- "cluster health with closed index": - - skip: - version: " - 7.1.99" + - requires: + cluster_features: ["indices_replicate_closed"] reason: "closed indices are replicated starting version 7.2.0" - features: ["allowed_warnings", "default_shards"] + test_runner_features: ["allowed_warnings", "default_shards"] - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml index 8144a2c1a1dbf..5caa0ebad30b2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml @@ -34,8 +34,8 @@ setup: --- "cluster health with expand_wildcards": - - skip: - version: " - 7.1.99" + - requires: + cluster_features: ["cluster_health_indices_options"] reason: "indices options has been introduced in cluster health request starting version 7.2.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml index 4e88c14a2b1dc..023e35cd4bee1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml @@ -1,7 +1,7 @@ --- setup: - - skip: - version: " - 8.8.99" + - requires: + cluster_features: ["cluster_info_extended"] reason: "/_info/_all only available from v8.9" --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml index f238e5116e146..ff7d982b14fee 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml @@ -1,7 +1,7 @@ --- "Cluster HTTP Info": - - skip: - version: " - 8.8.99" + - requires: + cluster_features: ["cluster_info_extended"] reason: "/_info/http only available from v8.9" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml index 35a538ac3a44a..261f1d8ea56cb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml @@ -1,7 +1,7 @@ --- "Cluster HTTP Info": - - skip: - version: " - 8.8.99" + - requires: + cluster_features: ["cluster_info_extended"] reason: "/_info/thread_pool only available from v8.9" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml index 8c4b3e2832daf..bb7597537014e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml @@ -1,7 +1,7 @@ --- "Cluster HTTP Info": - - skip: - version: " - 8.8.99" + - requires: + cluster_features: ["cluster_info_extended"] reason: "/_info/script only available from v8.9" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml index 740836efcdc46..fda715e416ac2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml @@ -1,8 +1,8 @@ --- "Prevalidation basic test": - - skip: - features: contains - version: "- 8.6.99" + - requires: + test_runner_features: ["contains"] + cluster_features: ["cluster_prevalidate_node_removal_reason"] reason: "The reason field was introduced in 8.7.0" # Fetch a node ID and stash it in node_id @@ -19,8 +19,8 @@ - contains: {nodes: {id: "$node_id", result: {is_safe: true, reason: no_problems, message: ""}}} --- "Prevalidation with no node specified": - - skip: - version: "- 8.5.99" + - requires: + cluster_features: ["cluster_prevalidate_node_removal"] reason: "API added in 8.6.0" - do: catch: bad_request @@ -31,8 +31,8 @@ - match: { error.reason: "Validation Failed: 1: request must contain one of the parameters 'names', 'ids', or 'external_ids';" } --- "Prevalidation with more than one query parameter": - - skip: - version: "- 8.5.99" + - requires: + cluster_features: ["cluster_prevalidate_node_removal"] reason: "API added in 8.6.0" - do: catch: bad_request @@ -45,8 +45,8 @@ - match: { error.reason: "Validation Failed: 1: request must contain only one of the parameters 'names', 'ids', or 'external_ids';" } --- "Prevalidation with non-existing node": - - skip: - version: "- 8.5.99" + - requires: + cluster_features: ["cluster_prevalidate_node_removal"] reason: "API added in 8.6.0" - do: catch: missing diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml index a46d9a67a863d..2c48282332909 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml @@ -75,8 +75,8 @@ --- "get cluster stats returns packaging types": - - skip: - version: " - 7.1.99" + - requires: + cluster_features: ["cluster_stats_packaging_types"] reason: "packaging types are added for v7.2.0" - do: @@ -85,8 +85,8 @@ - is_true: nodes.packaging_types --- "get cluster stats without runtime fields": - - skip: - version: " - 7.12.99" + - requires: + cluster_features: ["cluster_stats_runtime_fields"] reason: "cluster stats includes runtime fields from 7.13 on" - do: indices.create: @@ -98,8 +98,8 @@ --- "Usage stats with script-less runtime fields": - - skip: - version: " - 7.12.99" + - requires: + cluster_features: ["cluster_stats_runtime_fields"] reason: "cluster stats includes runtime fields from 7.13 on" - do: indices.create: @@ -168,8 +168,8 @@ --- "mappings sizes reported in get cluster stats": - - skip: - version: " - 8.3.99" + - requires: + cluster_features: ["cluster_stats_mapping_sizes"] reason: "mapping sizes reported from 8.4 onwards" - do: indices.create: @@ -188,8 +188,8 @@ --- "snapshot stats reported in get cluster stats": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_stats_snapshots"] reason: "snapshot stats reported from 8.8 onwards" - do: @@ -231,8 +231,8 @@ --- "Dense vector stats": - - skip: - version: " - 8.9.99" + - requires: + cluster_features: ["cluster_stats_dense_vectors"] reason: "dense vector stats added in 8.10" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml index 9178fab25e230..648964d9e721f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml @@ -1,7 +1,7 @@ --- "Indexing pressure cluster stats": - - skip: - version: " - 8.0.99" + - requires: + cluster_features: ["cluster_stats_indexing_pressure"] reason: "indexing_pressure in cluster was added in 8.1" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml index 278a7095add5e..8bbfb5cff7ed3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml @@ -3,6 +3,9 @@ - do: indices.create: index: test_count_min_score + body: + settings: + number_of_shards: 1 - do: index: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml new file mode 100644 index 0000000000000..cd3751dbb9653 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml @@ -0,0 +1,72 @@ +setup: + - skip: + version: ' - 8.13.99' + reason: 'check of preTags and postTags params for empty values was added in 8.14' + + - do: + indices.create: + index: test + body: + mappings: + "properties": + "text": + "type": "text" + "term_vector": "with_positions_offsets" + + - do: + index: + index: test + id: "1" + body: + "text" : "The quick brown fox is brown." + - do: + indices.refresh: {} + +--- +"Test with empty pre_tags or post_tags in query body with unified highlight type - should fail" : + - do: + catch: /pre_tags or post_tags must not be empty/ + search: + index: test + body: { + "query": { "match": { "text": "fox" } }, + "highlight": { + "type": "unified", + "fields": { "*": { } }, + "pre_tags": [ ], + "post_tags": [ ] + }, + } + +--- +"Test with empty pre_tags or post_tags in query body with plain highlight type - should fail" : + - do: + catch: /pre_tags or post_tags must not be empty/ + search: + index: test + body: { + "query": { "match": { "text": "fox" } }, + "highlight": { + "type": "plain", + "fields": { "*": { } }, + "pre_tags": [ ], + "post_tags": [ ] + }, + } + +--- +"Test with empty pre_tags or post_tags in query body with fvh highlight type - should fail" : + - do: + catch: /pre_tags or post_tags must not be empty/ + search: + index: test + body: { + "query": { "match": { "text": "fox" } }, + "highlight": { + "type": "fvh", + "fields": { "*": { } }, + "pre_tags": [ ], + "post_tags": [ ] + }, + } + diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml index 5194c95151eda..9ff6319a01af4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml @@ -6,6 +6,8 @@ setup: indices.create: index: knn_query_test_index body: + settings: + number_of_shards: 1 mappings: properties: vector: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml new file mode 100644 index 0000000000000..355ffeebfb1d3 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml @@ -0,0 +1,62 @@ +setup: + - skip: + version: ' - 8.13.99' + reason: 'no trimming highlight snippets when number_of_fragments is 0 was introduced in 8.14' + - do: + indices.create: + index: test_trim + body: + mappings: + properties: + text: + type: text + analyzer: whitespace + + - do: + bulk: + refresh: true + body: + - index: + _index: test_trim + _id: 1 + - text: " a b c d " + +--- +"Test unified highlighter with custom passage formatter and number_of_fragments > 0": + - do: + search: + index: test_trim + body: + query: + match: + text: "c" + highlight: + type: unified + number_of_fragments: 1 + fields: + text: + pre_tags: ["("] + post_tags: [")"] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.highlight.text: ["a b (c) d"] } + +--- +"Test unified highlighter with custom passage formatter when number_of_fragments = 0": + - do: + search: + index: test_trim + body: + query: + match: + text: "c" + highlight: + type: unified + number_of_fragments: 0 + fields: + text: + pre_tags: ["("] + post_tags: [")"] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.highlight.text: [" a b (c) d "] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_range_query_out_of_bounds.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_range_query_out_of_bounds.yml new file mode 100644 index 0000000000000..b2f35fe724410 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_range_query_out_of_bounds.yml @@ -0,0 +1,290 @@ +setup: + - skip: + version: " - 8.13.99" + reason: fixed in 8.14.0 + - do: + indices.create: + index: range_query_test_index + body: + mappings: + properties: + half_float_field: + type: half_float + float_field: + type: float + keyword_field: + type: keyword + + - do: + bulk: + refresh: true + body: + - '{ "index" : { "_index" : "range_query_test_index", "_id" : "min_boundary_doc" } }' + - '{"half_float_field" : -65504, "float_field" : -3.4028235E38 }' + - '{ "index" : { "_index" : "range_query_test_index", "_id" : "max_boundary_doc" } }' + - '{"half_float_field" : 65504, "float_field" : 3.4028235E38 }' + - '{ "index" : { "_index" : "range_query_test_index", "_id" : "1" } }' + - '{"half_float_field" : -1, "float_field" : -1 }' + - '{ "index" : { "_index" : "range_query_test_index", "_id" : "2" } }' + - '{"half_float_field" : 1, "float_field" : 1 }' + - '{ "index" : { "_index" : "range_query_test_index", "_id" : "3" } }' + - '{"keyword": "I am missing the half_float/float fields and should not be part of the results" }' + +--- +"Test range query for half_float field with out of bounds upper limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + lte: 1e+300 + gt: 0 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "max_boundary_doc" } + +--- +"Test range query for float field with out of bounds upper limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + lte: 1e+300 + gt: 0 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "max_boundary_doc" } + +--- +"Test range query for half_float field with out of bounds lower limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + gte: -1e+300 + lt: 0 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "min_boundary_doc" } + - match: { hits.hits.1._id: "1" } + +--- +"Test range query for float field with out of bounds lower limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + gte: -1e+300 + lt: 0 + sort: float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "min_boundary_doc" } + - match: { hits.hits.1._id: "1" } + +--- +"Test range query for float field with greater or equal than half float min value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + gte: -65504 + lt: 0 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "min_boundary_doc" } + - match: { hits.hits.1._id: "1" } + +--- +"Test range query for float field with greater or equal than float min value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + gte: -3.4028235E38 + lt: 0 + sort: float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "min_boundary_doc" } + - match: { hits.hits.1._id: "1" } + +--- +"Test range query for float field with greater than half float min value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + gt: -65504 + lt: 0 + + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "1" } + +--- +"Test range query for float field with greater than float min value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + gt: -3.4028235E38 + lt: 0 + sort: float_field + + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "1" } + +--- +"Test range query for half_float field with lower or equal than half float max value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + lte: 65504 + gt: 0 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "max_boundary_doc" } + +--- +"Test range query for float field with lower or equal than float max value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + lte: 3.4028235E38 + gt: 0 + sort: float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "max_boundary_doc" } + +--- +"Test range query for half_float field with lower than half float max value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + lt: 65504 + gt: 0 + + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "2" } + +--- +"Test range query for float field with lower than float max value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + lt: 3.4028235E38 + gt: 0 + + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "2" } + +--- +"Test range query for half float field with lt and gt limits": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + lt: 1 + gt: -1 + + - length: { hits.hits: 0 } + +--- +"Test range query for float field with lt and gt limits": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + lt: 1 + gt: -1 + + - length: { hits.hits: 0 } + +--- +"Test range query for half_float field with gte and lte limits": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + lte: 1 + gte: -1 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + +--- +"Test range query for float field with gte and lte limits": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + lte: 1 + gte: -1 + sort: float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml index 5813445326ef6..2ee2391458b03 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml @@ -90,11 +90,11 @@ missing routing path field: --- missing dimension on routing path field: - skip: - version: " - 8.7.99" - reason: error message changed in 8.8.0 + version: " - 8.13.99" + reason: error message changed in 8.14.0 - do: - catch: '/All fields that match routing_path must be keywords with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[tag\] was not a dimension./' + catch: '/All fields that match routing_path must be configured with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[tag\] was not a dimension./' indices.create: index: test body: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 1ff32192b9e08..807f60a0faf35 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -121,11 +121,11 @@ top level wildcard dim object: --- exact match object type: - skip: - version: " - 8.7.99" - reason: routing_path error message updated in 8.8.0 + version: " - 8.13.99" + reason: routing_path error message updated in 8.14.0 - do: - catch: '/All fields that match routing_path must be keywords with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[dim\] was \[object\]./' + catch: '/All fields that match routing_path must be configured with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[dim\] was \[object\]./' indices.create: index: tsdb_index body: @@ -154,11 +154,11 @@ exact match object type: --- non keyword matches routing_path: - skip: - version: " - 8.7.99" - reason: routing_path error message updated in 8.8.0 + version: " - 8.13.99" + reason: routing_path error message updated in 8.14.0 - do: - catch: '/All fields that match routing_path must be keywords with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[@timestamp\] was \[date\]./' + catch: '/All fields that match routing_path must be configured with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[@timestamp\] was not a dimension./' indices.create: index: test_index body: @@ -273,7 +273,7 @@ runtime field matching routing path: body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim_kw": "dim", "dim": {"foo": "a"}, "extra_field": 100}' - - match: {items.0.index.error.reason: "All fields that match routing_path must be keywords with [time_series_dimension: true] or flattened fields with a list of dimensions in [time_series_dimensions] and without the [script] parameter. [dim.foo] was a runtime [keyword]."} + - match: {items.0.index.error.reason: "All fields that match routing_path must be configured with [time_series_dimension: true] or flattened fields with a list of dimensions in [time_series_dimensions] and without the [script] parameter. [dim.foo] was a runtime [keyword]."} --- "dynamic: false matches routing_path": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml index 6ef03ba8ebcc4..621906820e4ad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml @@ -1,12 +1,12 @@ --- setup: - skip: - version: "- 8.12.99" - reason: _tsid hashing introduced in 8.13 + version: "- 8.13.99" + reason: _tsid hashing introduced in 8.13 and tsid routing changed in 8.14 - do: indices.create: - index: test + index: id_generation_test body: settings: index: @@ -44,7 +44,7 @@ setup: - do: bulk: refresh: true - index: test + index: id_generation_test body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' @@ -65,14 +65,10 @@ setup: --- generates a consistent id: - - skip: - version: " - 8.1.99" - reason: ID generation added in 8.2 - - do: bulk: refresh: true - index: test + index: id_generation_test body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:52:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' @@ -81,23 +77,46 @@ generates a consistent id: - do: bulk: refresh: true - index: test + index: id_generation_test body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:52:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' - match: {items.0.index._id: cZZNs7B9sSWsyrL5AAABeRnS7fM} + - do: + get: + index: id_generation_test + id: cn4excfoxSs_KdA5AAABeRnRFAY + - match: {_index: id_generation_test} + - match: {_id: cn4excfoxSs_KdA5AAABeRnRFAY} + - match: + _source: + "@timestamp": "2021-04-28T18:50:03.142Z" + metricset: pod + k8s: + pod: + name: dog + uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 + ip: 10.10.55.3 + network: + tx: 1434521831 + rx: 530575198 + + - do: search: - index: test + index: id_generation_test body: query: match_all: {} sort: ["@timestamp"] + _source: true + docvalue_fields: [_ts_routing_hash] - match: {hits.total.value: 9} - match: { hits.hits.0._id: cn4excfoxSs_KdA5AAABeRnRFAY } + - match: { hits.hits.0.fields._ts_routing_hash: [ cn4exQ ] } - match: { hits.hits.0._source.@timestamp: 2021-04-28T18:50:03.142Z } - match: { hits.hits.0._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } @@ -135,13 +154,9 @@ generates a consistent id: --- index a new document on top of an old one: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: search: - index: test + index: id_generation_test body: size: 0 aggs: @@ -157,7 +172,7 @@ index a new document on top of an old one: - do: index: refresh: true - index: test + index: id_generation_test op_type: index body: "@timestamp": "2021-04-28T18:51:03.142Z" @@ -174,7 +189,7 @@ index a new document on top of an old one: - do: search: - index: test + index: id_generation_test body: size: 0 aggs: @@ -189,13 +204,9 @@ index a new document on top of an old one: --- index a new document on top of an old one over bulk: - - skip: - version: " - 8.1.99" - reason: indexing on top of another document support added in 8.2 - - do: search: - index: test + index: id_generation_test body: size: 0 aggs: @@ -211,7 +222,7 @@ index a new document on top of an old one over bulk: - do: bulk: refresh: true - index: test + index: id_generation_test body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.3", "network": {"tx": 111434595272, "rx": 430605511}}}}' @@ -219,7 +230,7 @@ index a new document on top of an old one over bulk: - do: search: - index: test + index: id_generation_test body: size: 0 aggs: @@ -234,15 +245,11 @@ index a new document on top of an old one over bulk: --- create operation on top of old document fails: - - skip: - version: " - 8.1.99" - reason: id generation changed in 8.2 - - do: catch: "/\\[cn4excfoxSs_KdA5AAABeRnR_mY\\]\\[.*@2021-04-28T18:51:03.142Z\\]: version conflict, document already exists \\(current version \\[1\\]\\)/" index: refresh: true - index: test + index: id_generation_test body: "@timestamp": "2021-04-28T18:51:03.142Z" metricset: pod @@ -257,14 +264,10 @@ create operation on top of old document fails: --- create operation on top of old document fails over bulk: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: bulk: refresh: true - index: test + index: id_generation_test body: - '{"create": {}}' - '{"@timestamp": "2021-04-28T18:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.3", "network": {"tx": 111434595272, "rx": 430605511}}}}' @@ -272,13 +275,9 @@ create operation on top of old document fails over bulk: --- ids query: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: search: - index: test + index: id_generation_test body: fields: - field: k8s.pod.network.tx @@ -294,15 +293,11 @@ ids query: --- get: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: get: - index: test + index: id_generation_test id: cZZNs7B9sSWsyrL5AAABeRnSA5M - - match: {_index: test} + - match: {_index: id_generation_test} - match: {_id: cZZNs7B9sSWsyrL5AAABeRnSA5M} - match: _source: @@ -319,89 +314,65 @@ get: --- get not found: - - skip: - version: " - 8.1.99" - reason: ids generation changed in 8.2 - - do: catch: missing get: - index: test + index: id_generation_test id: not found --- get with routing: - - skip: - version: " - 8.1.99" - reason: ids generation changed in 8.2 - - do: catch: bad_request get: - index: test + index: id_generation_test id: cZZNs-xII2fZweptAAABeRnSA5M routing: routing --- delete: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: delete: - index: test + index: id_generation_test id: cn4excfoxSs_KdA5AAABeRnR_mY - match: {result: deleted} --- delete not found: - - skip: - version: " - 8.1.99" - reason: ids generation changed in 8.2 - - do: catch: missing delete: - index: test + index: id_generation_test id: not found --- delete with routing: - - skip: - version: " - 8.1.99" - reason: ids generation changed in 8.2 - - do: catch: bad_request delete: - index: test + index: id_generation_test id: not found routing: routing --- delete over _bulk: - - skip: - version: " - 8.1.99" - reason: ids generation changed in 8.2 - # mget call added to investigate test failure: https://github.com/elastic/elasticsearch/issues/93852 # (should be removed when test issue is resolved) - do: mget: - index: test + index: id_generation_test body: ids: [ cn4excfoxSs_KdA5AAABeRnR_mY, cn4excfoxSs_KdA5AAABeRnR11Y ] - - match: { docs.0._index: "test" } + - match: { docs.0._index: "id_generation_test" } - match: { docs.0._id: "cn4excfoxSs_KdA5AAABeRnR_mY" } - match: { docs.0.found: true } - - match: { docs.1._index: "test" } + - match: { docs.1._index: "id_generation_test" } - match: { docs.1._id: "cn4excfoxSs_KdA5AAABeRnR11Y" } - match: { docs.1.found: true } - do: bulk: - index: test + index: id_generation_test body: - '{"delete": {"_id": "cn4excfoxSs_KdA5AAABeRnR_mY"}}' - '{"delete": {"_id": "cn4excfoxSs_KdA5AAABeRnR11Y"}}' @@ -409,17 +380,13 @@ delete over _bulk: - match: {items.0.delete.result: deleted} - match: {items.1.delete.result: deleted} - match: {items.2.delete.status: 404} - - match: {items.2.delete.error.reason: "invalid id [not found ++ not found] for index [test] in time series mode"} + - match: {items.2.delete.error.reason: "invalid id [not found ++ not found] for index [id_generation_test] in time series mode"} --- routing_path matches deep object: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: indices.create: - index: test2 + index: routing_path_test body: settings: index: @@ -449,7 +416,7 @@ routing_path matches deep object: - do: bulk: refresh: true - index: test2 + index: routing_path_test body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": {"foo": {"bar": {"baz": {"uid": "uid1"}}}}}' @@ -458,13 +425,9 @@ routing_path matches deep object: --- routing_path matches object: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: indices.create: - index: test2 + index: routing_path_test_2 body: settings: index: @@ -490,7 +453,7 @@ routing_path matches object: - do: bulk: refresh: true - index: test2 + index: routing_path_test_2 body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": {"foo": {"uid": "uid1"}}}' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml index 049b9670b6b46..57ad446eaf637 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml @@ -278,6 +278,7 @@ synthetic source text field: type: keyword name: type: text + store: false value: type: long time_series_metric: gauge diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index 8a2071584b4a0..1b2d66fc12c76 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -49,6 +49,7 @@ import java.util.Map; import java.util.Set; import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; @@ -1287,4 +1288,92 @@ public void testScriptWithValueType() throws Exception { assertThat(ex.getCause().getMessage(), containsString("Unknown value type [foobar]")); } } + + public void testOrderByKey() throws Exception { + Map data = new HashMap<>(); + for (int i = 0; i < 5; i++) { + assertAcked( + indicesAdmin().prepareCreate("idx" + i).setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", "filter", "type=boolean") + ); + List builders = new ArrayList<>(); + for (int j = 0; j < 100; j++) { + String val = "val" + random().nextInt(1000); + boolean filter = randomBoolean(); + long[] counter = data.computeIfAbsent(val, s -> new long[] { 0 }); + if (filter == false) { + counter[0]++; + } + builders.add( + prepareIndex("idx" + i).setSource( + jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, val).field("filter", filter).endObject() + ) + ); + } + indexRandom(true, builders); + } + List allKeys = new ArrayList<>(data.keySet()); + List keysMinDocCount1 = allKeys.stream().filter(key -> data.get(key)[0] > 0).collect(Collectors.toList()); + List keysMinDocCount2 = allKeys.stream().filter(key -> data.get(key)[0] > 1).collect(Collectors.toList()); + // test for different batch sizes to exercise partial reduces + for (int batchReduceSize = 2; batchReduceSize < 6; batchReduceSize++) { + // with min_doc_count = 0 + allKeys.sort(String::compareTo); + assertOrderByKeyResponse(allKeys, data, true, 0, batchReduceSize); + Collections.reverse(allKeys); + assertOrderByKeyResponse(allKeys, data, false, 0, batchReduceSize); + // with min_doc_count = 1 + keysMinDocCount1.sort(String::compareTo); + assertOrderByKeyResponse(keysMinDocCount1, data, true, 1, batchReduceSize); + Collections.reverse(keysMinDocCount1); + assertOrderByKeyResponse(keysMinDocCount1, data, false, 1, batchReduceSize); + // with min_doc_count = 2 + keysMinDocCount2.sort(String::compareTo); + assertOrderByKeyResponse(keysMinDocCount2, data, true, 2, batchReduceSize); + Collections.reverse(keysMinDocCount2); + assertOrderByKeyResponse(keysMinDocCount2, data, false, 2, batchReduceSize); + } + for (int i = 0; i < 5; i++) { + assertAcked(indicesAdmin().prepareDelete("idx" + i)); + } + } + + private void assertOrderByKeyResponse( + List keys, + Map counts, + boolean asc, + int minDocCount, + int batchReduceSize + ) { + int size = randomIntBetween(1, keys.size()); + long sumOtherCount = 0; + for (int i = size; i < keys.size(); i++) { + sumOtherCount += counts.get(keys.get(i))[0]; + } + final long finalSumOtherCount = sumOtherCount; + assertNoFailuresAndResponse( + prepareSearch("idx0", "idx1", "idx2", "idx3", "idx4").setBatchedReduceSize(batchReduceSize) + .setQuery(QueryBuilders.termQuery("filter", false)) + .addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .size(size) + .shardSize(500) + .minDocCount(minDocCount) + .order(BucketOrder.key(asc)) + ), + response -> { + StringTerms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(size)); + assertThat(terms.getSumOfOtherDocCounts(), equalTo(finalSumOtherCount)); + + for (int i = 0; i < size; i++) { + StringTerms.Bucket bucket = terms.getBuckets().get(i); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKeyAsString(), equalTo(keys.get(i))); + assertThat(bucket.getDocCount(), equalTo(counts.get(keys.get(i))[0])); + } + } + ); + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index 282e29866a699..64f04d46a9a90 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -48,6 +48,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; @@ -78,7 +79,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.function.Function; -import java.util.function.Predicate; import java.util.stream.IntStream; import static java.util.Collections.singletonList; @@ -809,8 +809,23 @@ public Map getMetadataMappers() { } @Override - public Function> getFieldFilter() { - return index -> field -> field.equals("playlist") == false; + public Function getFieldFilter() { + return index -> new FieldPredicate() { + @Override + public boolean test(String field) { + return field.equals("playlist") == false; + } + + @Override + public String modifyHash(String hash) { + return "not-playlist:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index c9c648e57169a..fa6126c13c741 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -28,12 +28,18 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; +import java.util.function.Function; import java.util.regex.Pattern; +import static java.util.function.Function.identity; +import static org.elasticsearch.common.util.Maps.toUnmodifiableSortedMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.matchesRegex; @@ -143,34 +149,40 @@ public void testThreadPoolMetrics() throws Exception { assertNoFailures(prepareSearch("idx").setQuery(QueryBuilders.termQuery("l_value", i))); } final var tp = internalCluster().getInstance(ThreadPool.class, dataNodeName); + // wait for all threads to complete so that we get deterministic results + waitUntil(() -> tp.stats().stats().stream().allMatch(s -> s.active() == 0)); ThreadPoolStats tps = tp.stats(); plugin.collect(); ArrayList registeredMetrics = plugin.getRegisteredMetrics(InstrumentType.LONG_GAUGE); registeredMetrics.addAll(plugin.getRegisteredMetrics(InstrumentType.LONG_ASYNC_COUNTER)); + tps.forEach(stats -> { - Map threadPoolMetrics = Map.of( - ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED, - stats.completed(), - ThreadPool.THREAD_POOL_METRIC_NAME_ACTIVE, - (long) stats.active(), - ThreadPool.THREAD_POOL_METRIC_NAME_CURRENT, - (long) stats.threads(), - ThreadPool.THREAD_POOL_METRIC_NAME_LARGEST, - (long) stats.largest(), - ThreadPool.THREAD_POOL_METRIC_NAME_QUEUE, - (long) stats.queue() - ); - threadPoolMetrics.forEach((suffix, value) -> { - String metricName = ThreadPool.THREAD_POOL_METRIC_PREFIX + stats.name() + suffix; - List measurements; - if (suffix.equals(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED)) { - measurements = plugin.getLongAsyncCounterMeasurement(metricName); - } else { - measurements = plugin.getLongGaugeMeasurement(metricName); - } + Map threadPoolStats = List.of( + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED, stats.completed()), + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_ACTIVE, (long) stats.active()), + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_CURRENT, (long) stats.threads()), + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_LARGEST, (long) stats.largest()), + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_QUEUE, (long) stats.queue()) + ).stream().collect(toUnmodifiableSortedMap(e -> stats.name() + e.getKey(), Entry::getValue)); + + Function> measurementExtractor = name -> { + String metricName = ThreadPool.THREAD_POOL_METRIC_PREFIX + name; assertThat(metricName, in(registeredMetrics)); - assertThat(measurements.get(0).getLong(), greaterThanOrEqualTo(value)); - }); + + List measurements = name.endsWith(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED) + ? plugin.getLongAsyncCounterMeasurement(metricName) + : plugin.getLongGaugeMeasurement(metricName); + return measurements.stream().map(Measurement::getLong).toList(); + }; + + Map> measurements = threadPoolStats.keySet() + .stream() + .collect(toUnmodifiableSortedMap(identity(), measurementExtractor)); + + logger.info("Stats of `{}`: {}", stats.name(), threadPoolStats); + logger.info("Measurements of `{}`: {}", stats.name(), measurements); + + threadPoolStats.forEach((metric, value) -> assertThat(measurements, hasEntry(equalTo(metric), contains(equalTo(value))))); }); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index 22e02652e9f68..48970b97e480e 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -101,6 +101,30 @@ public static TransportVersion fromString(String str) { return TransportVersion.fromId(Integer.parseInt(str)); } + /** + * Returns {@code true} if this version is a patch version at or after {@code version}. + *

+ * This should not be used normally. It is used for matching patch versions of the same base version, + * using the standard version number format specified in {@link TransportVersions}. + * When a patch version of an existing transport version is created, {@code transportVersion.isPatchFrom(patchVersion)} + * will match any transport version at or above {@code patchVersion} that is also of the same base version. + *

+ * For example, {@code version.isPatchFrom(8_800_00_4)} will return the following for the given {@code version}: + *

    + *
  • {@code 8_799_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • + *
  • {@code 8_799_00_9.isPatchFrom(8_800_00_4)}: {@code false}
  • + *
  • {@code 8_800_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • + *
  • {@code 8_800_00_3.isPatchFrom(8_800_00_4)}: {@code false}
  • + *
  • {@code 8_800_00_4.isPatchFrom(8_800_00_4)}: {@code true}
  • + *
  • {@code 8_800_00_9.isPatchFrom(8_800_00_4)}: {@code true}
  • + *
  • {@code 8_800_01_0.isPatchFrom(8_800_00_4)}: {@code false}
  • + *
  • {@code 8_801_00_0.isPatchFrom(8_800_00_4)}: {@code false}
  • + *
+ */ + public boolean isPatchFrom(TransportVersion version) { + return onOrAfter(version) && id < version.id + 10 - (version.id % 10); + } + /** * Returns a string representing the Elasticsearch release version of this transport version, * if applicable for this deployment, otherwise the raw version number. diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5f904c872045a..3f0622a9c855c 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -150,7 +150,11 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_SERIALIZE_BIG_ARRAY = def(8_610_00_0); public static final TransportVersion AUTO_SHARDING_ROLLOVER_CONDITION = def(8_611_00_0); public static final TransportVersion KNN_QUERY_VECTOR_BUILDER = def(8_612_00_0); - public static final TransportVersion SEMANTIC_TEXT_FIELD_ADDED = def(8_613_00_0); + public static final TransportVersion USE_DATA_STREAM_GLOBAL_RETENTION = def(8_613_00_0); + public static final TransportVersion ML_COMPLETION_INFERENCE_SERVICE_ADDED = def(8_614_00_0); + public static final TransportVersion ML_INFERENCE_EMBEDDING_BYTE_ADDED = def(8_615_00_0); + public static final TransportVersion ML_INFERENCE_L2_NORM_SIMILARITY_ADDED = def(8_616_00_0); + public static final TransportVersion SEMANTIC_TEXT_FIELD_ADDED = def(8_617_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 241af6e7b6c45..3a4958e046a82 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -119,6 +119,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_7_17_17 = new Version(7_17_17_99); public static final Version V_7_17_18 = new Version(7_17_18_99); public static final Version V_7_17_19 = new Version(7_17_19_99); + public static final Version V_7_17_20 = new Version(7_17_20_99); public static final Version V_8_0_0 = new Version(8_00_00_99); public static final Version V_8_0_1 = new Version(8_00_01_99); @@ -166,8 +167,8 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_12_0 = new Version(8_12_00_99); public static final Version V_8_12_1 = new Version(8_12_01_99); public static final Version V_8_12_2 = new Version(8_12_02_99); - public static final Version V_8_12_3 = new Version(8_12_03_99); public static final Version V_8_13_0 = new Version(8_13_00_99); + public static final Version V_8_13_1 = new Version(8_13_01_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version CURRENT = V_8_14_0; diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 230a8154b64ce..685fc032431c3 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -12,13 +12,11 @@ import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.support.WriteResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; -import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -26,7 +24,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.net.URLEncoder; @@ -34,7 +31,6 @@ import java.util.Locale; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -43,14 +39,14 @@ */ public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, ToXContentObject { - private static final String _SHARDS = "_shards"; - private static final String _INDEX = "_index"; - private static final String _ID = "_id"; - private static final String _VERSION = "_version"; - private static final String _SEQ_NO = "_seq_no"; - private static final String _PRIMARY_TERM = "_primary_term"; - private static final String RESULT = "result"; - private static final String FORCED_REFRESH = "forced_refresh"; + public static final String _SHARDS = "_shards"; + public static final String _INDEX = "_index"; + public static final String _ID = "_id"; + public static final String _VERSION = "_version"; + public static final String _SEQ_NO = "_seq_no"; + public static final String _PRIMARY_TERM = "_primary_term"; + public static final String RESULT = "result"; + public static final String FORCED_REFRESH = "forced_refresh"; /** * An enum that represents the results of CRUD operations, primarily used to communicate the type of @@ -302,54 +298,6 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t return builder; } - /** - * Parse the output of the {@link #innerToXContent(XContentBuilder, Params)} method. - * - * This method is intended to be called by subclasses and must be called multiple times to parse all the information concerning - * {@link DocWriteResponse} objects. It always parses the current token, updates the given parsing context accordingly - * if needed and then immediately returns. - */ - protected static void parseInnerToXContent(XContentParser parser, Builder context) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - - if (token.isValue()) { - if (_INDEX.equals(currentFieldName)) { - // index uuid and shard id are unknown and can't be parsed back for now. - context.setShardId(new ShardId(new Index(parser.text(), IndexMetadata.INDEX_UUID_NA_VALUE), -1)); - } else if (_ID.equals(currentFieldName)) { - context.setId(parser.text()); - } else if (_VERSION.equals(currentFieldName)) { - context.setVersion(parser.longValue()); - } else if (RESULT.equals(currentFieldName)) { - String result = parser.text(); - for (Result r : Result.values()) { - if (r.getLowercase().equals(result)) { - context.setResult(r); - break; - } - } - } else if (FORCED_REFRESH.equals(currentFieldName)) { - context.setForcedRefresh(parser.booleanValue()); - } else if (_SEQ_NO.equals(currentFieldName)) { - context.setSeqNo(parser.longValue()); - } else if (_PRIMARY_TERM.equals(currentFieldName)) { - context.setPrimaryTerm(parser.longValue()); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (_SHARDS.equals(currentFieldName)) { - context.setShardInfo(ShardInfo.fromXContent(parser)); - } else { - parser.skipChildren(); // skip potential inner objects for forward compatibility - } - } else if (token == XContentParser.Token.START_ARRAY) { - parser.skipChildren(); // skip potential inner arrays for forward compatibility - } - } - /** * Base class of all {@link DocWriteResponse} builders. These {@link DocWriteResponse.Builder} are used during * xcontent parsing to temporarily store the parsed values, then the {@link Builder#build()} method is called to diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java index 7d883ad60b4e7..6d052c242c55c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -23,9 +22,6 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -35,13 +31,11 @@ import java.util.function.Supplier; import java.util.stream.Collectors; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * Returns the list of tasks currently running on the nodes */ public class ListTasksResponse extends BaseTasksResponse { - private static final String TASKS = "tasks"; + public static final String TASKS = "tasks"; private final List tasks; @@ -69,35 +63,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(tasks); } - protected static ConstructingObjectParser setupParser( - String name, - TriFunction, List, List, T> ctor - ) { - ConstructingObjectParser parser = new ConstructingObjectParser<>(name, true, constructingObjects -> { - int i = 0; - @SuppressWarnings("unchecked") - List tasks = (List) constructingObjects[i++]; - @SuppressWarnings("unchecked") - List tasksFailures = (List) constructingObjects[i++]; - @SuppressWarnings("unchecked") - List nodeFailures = (List) constructingObjects[i]; - return ctor.apply(tasks, tasksFailures, nodeFailures); - }); - parser.declareObjectArray(optionalConstructorArg(), TaskInfo.PARSER, new ParseField(TASKS)); - parser.declareObjectArray(optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p), new ParseField(TASK_FAILURES)); - parser.declareObjectArray( - optionalConstructorArg(), - (p, c) -> ElasticsearchException.fromXContent(p), - new ParseField(NODE_FAILURES) - ); - return parser; - } - - private static final ConstructingObjectParser PARSER = setupParser( - "list_tasks_response", - ListTasksResponse::new - ); - /** * Returns the list of tasks by node */ @@ -250,10 +215,6 @@ public ChunkedToXContentObject groupedByNone() { })); } - public static ListTasksResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public String toString() { return Strings.toString(ChunkedToXContent.wrapAsToXContent(groupedByNone()), true, true); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java index 354c67cfb416b..8d48141f9e268 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java @@ -14,11 +14,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Arrays; @@ -34,12 +31,6 @@ public class VerifyRepositoryResponse extends ActionResponse implements ToXConte static final String NAME = "name"; public static class NodeView implements Writeable, ToXContentObject { - private static final ObjectParser.NamedObjectParser PARSER; - static { - ObjectParser internalParser = new ObjectParser<>(NODES, true, null); - internalParser.declareString(NodeView::setName, new ParseField(NAME)); - PARSER = (p, v, name) -> internalParser.parse(p, new NodeView(name), null); - } final String nodeId; String name; @@ -104,15 +95,6 @@ public int hashCode() { private List nodes; - private static final ObjectParser PARSER = new ObjectParser<>( - VerifyRepositoryResponse.class.getName(), - true, - VerifyRepositoryResponse::new - ); - static { - PARSER.declareNamedObjects(VerifyRepositoryResponse::setNodes, NodeView.PARSER, new ParseField("nodes")); - } - public VerifyRepositoryResponse() {} public VerifyRepositoryResponse(StreamInput in) throws IOException { @@ -157,10 +139,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static VerifyRepositoryResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public String toString() { return Strings.toString(this); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java index a4ec5222e2392..d99cc0b0ef8df 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java @@ -12,36 +12,19 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * A response for a cluster update settings action. */ public class ClusterUpdateSettingsResponse extends AcknowledgedResponse { - private static final ParseField PERSISTENT = new ParseField("persistent"); - private static final ParseField TRANSIENT = new ParseField("transient"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "cluster_update_settings_response", - true, - args -> { - return new ClusterUpdateSettingsResponse((boolean) args[0], (Settings) args[1], (Settings) args[2]); - } - ); - static { - declareAcknowledgedField(PARSER); - PARSER.declareObject(constructorArg(), (p, c) -> Settings.fromXContent(p), TRANSIENT); - PARSER.declareObject(constructorArg(), (p, c) -> Settings.fromXContent(p), PERSISTENT); - } + static final ParseField PERSISTENT = new ParseField("persistent"); + static final ParseField TRANSIENT = new ParseField("transient"); final Settings transientSettings; final Settings persistentSettings; @@ -83,10 +66,6 @@ protected void addCustomFields(XContentBuilder builder, Params params) throws IO builder.endObject(); } - public static ClusterUpdateSettingsResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object o) { if (super.equals(o)) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java index 7062efd301991..4e04506d03d6a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java @@ -14,12 +14,8 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.SnapshotInfo; -import org.elasticsearch.snapshots.SnapshotInfo.SnapshotInfoBuilder; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; @@ -29,24 +25,8 @@ */ public class CreateSnapshotResponse extends ActionResponse implements ToXContentObject { - private static final ObjectParser PARSER = new ObjectParser<>( - CreateSnapshotResponse.class.getName(), - true, - CreateSnapshotResponse::new - ); - - static { - PARSER.declareObject( - CreateSnapshotResponse::setSnapshotInfoFromBuilder, - SnapshotInfo.SNAPSHOT_INFO_PARSER, - new ParseField("snapshot") - ); - } - @Nullable - private SnapshotInfo snapshotInfo; - - CreateSnapshotResponse() {} + private final SnapshotInfo snapshotInfo; public CreateSnapshotResponse(@Nullable SnapshotInfo snapshotInfo) { this.snapshotInfo = snapshotInfo; @@ -57,10 +37,6 @@ public CreateSnapshotResponse(StreamInput in) throws IOException { snapshotInfo = in.readOptionalWriteable(SnapshotInfo::readFrom); } - private void setSnapshotInfoFromBuilder(SnapshotInfoBuilder snapshotInfoBuilder) { - this.snapshotInfo = snapshotInfoBuilder.build(); - } - /** * Returns snapshot information if snapshot was completed by the time this method returned or null otherwise. * @@ -103,10 +79,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static CreateSnapshotResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public String toString() { return "CreateSnapshotResponse{" + "snapshotInfo=" + snapshotInfo + '}'; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java index 8d776b7ae6ecb..02592b722c9e0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -68,7 +69,7 @@ protected void masterOperation( if (request.waitForCompletion()) { snapshotsService.executeSnapshot(request, listener.map(CreateSnapshotResponse::new)); } else { - snapshotsService.createSnapshot(request, listener.map(snapshot -> new CreateSnapshotResponse())); + snapshotsService.createSnapshot(request, listener.map(snapshot -> new CreateSnapshotResponse((SnapshotInfo) null))); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicate.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicate.java new file mode 100644 index 0000000000000..44c75e95cbd6d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicate.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.get; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.repositories.ResolvedRepositories; +import org.elasticsearch.snapshots.SnapshotMissingException; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Set; + +/** + * Represents a filter on snapshots by name, including some special values such as {@code _all} and {@code _current}, as supported by + * {@link TransportGetSnapshotsAction}. + */ +public interface SnapshotNamePredicate { + /** + * @return Whether a snapshot with the given name should be selected. + */ + boolean test(String snapshotName, boolean isCurrentSnapshot); + + /** + * @return the snapshot names that must be present in a repository. If one of these snapshots is missing then this repository should + * yield a {@link SnapshotMissingException} rather than any snapshots. + */ + Collection requiredNames(); + + /** + * A {@link SnapshotNamePredicate} which matches all snapshots (and requires no specific names). + */ + SnapshotNamePredicate MATCH_ALL = new SnapshotNamePredicate() { + @Override + public boolean test(String snapshotName, boolean isCurrentSnapshot) { + return true; + } + + @Override + public Collection requiredNames() { + return Collections.emptyList(); + } + }; + + /** + * A {@link SnapshotNamePredicate} which matches all currently-executing snapshots (and requires no specific names). + */ + SnapshotNamePredicate MATCH_CURRENT_ONLY = new SnapshotNamePredicate() { + @Override + public boolean test(String snapshotName, boolean isCurrentSnapshot) { + return isCurrentSnapshot; + } + + @Override + public Collection requiredNames() { + return Collections.emptyList(); + } + }; + + /** + * @return a {@link SnapshotNamePredicate} from the given {@link GetSnapshotsRequest} parameters. + */ + static SnapshotNamePredicate forSnapshots(boolean ignoreUnavailable, String[] snapshots) { + if (ResolvedRepositories.isMatchAll(snapshots)) { + return MATCH_ALL; + } + + if (snapshots.length == 1 && GetSnapshotsRequest.CURRENT_SNAPSHOT.equalsIgnoreCase(snapshots[0])) { + return MATCH_CURRENT_ONLY; + } + + final List includesBuilder = new ArrayList<>(snapshots.length); + final List excludesBuilder = new ArrayList<>(snapshots.length); + final Set requiredNamesBuilder = ignoreUnavailable ? null : Sets.newHashSetWithExpectedSize(snapshots.length); + boolean seenCurrent = false; + boolean seenWildcard = false; + for (final var snapshot : snapshots) { + if (seenWildcard && snapshot.length() > 1 && snapshot.startsWith("-")) { + excludesBuilder.add(snapshot.substring(1)); + } else { + if (Regex.isSimpleMatchPattern(snapshot)) { + seenWildcard = true; + includesBuilder.add(snapshot); + } else if (GetSnapshotsRequest.CURRENT_SNAPSHOT.equalsIgnoreCase(snapshot)) { + seenCurrent = true; + seenWildcard = true; + } else { + if (ignoreUnavailable == false) { + requiredNamesBuilder.add(snapshot); + } + includesBuilder.add(snapshot); + } + } + } + + final boolean includeCurrent = seenCurrent; + final String[] includes = includesBuilder.toArray(Strings.EMPTY_ARRAY); + final String[] excludes = excludesBuilder.toArray(Strings.EMPTY_ARRAY); + final Set requiredNames = requiredNamesBuilder == null ? Set.of() : Set.copyOf(requiredNamesBuilder); + + return new SnapshotNamePredicate() { + @Override + public boolean test(String snapshotName, boolean isCurrentSnapshot) { + return ((includeCurrent && isCurrentSnapshot) || Regex.simpleMatch(includes, snapshotName)) + && (Regex.simpleMatch(excludes, snapshotName) == false); + } + + @Override + public Collection requiredNames() { + return requiredNames; + } + }; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 3563192be2eb4..6d29c36bdcda1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -10,8 +10,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.SnapshotsInProgress; @@ -27,7 +29,6 @@ import org.elasticsearch.common.util.concurrent.AbstractThrottledTaskRunner; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.concurrent.ThrottledIterator; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Predicates; @@ -153,8 +154,7 @@ private class GetSnapshotsOperation { private final boolean isMultiRepoRequest; // snapshots selection - private final String[] snapshots; - private final boolean ignoreUnavailable; + private final SnapshotNamePredicate snapshotNamePredicate; private final SnapshotPredicates fromSortValuePredicates; private final Predicate slmPolicyPredicate; @@ -164,14 +164,14 @@ private class GetSnapshotsOperation { @Nullable private final String fromSortValue; private final int offset; - @Nullable - private final SnapshotSortKey.After after; + private final Predicate afterPredicate; private final int size; // current state private final SnapshotsInProgress snapshotsInProgress; // output detail + private final boolean ignoreUnavailable; private final boolean verbose; private final boolean indices; @@ -204,20 +204,20 @@ private class GetSnapshotsOperation { this.cancellableTask = cancellableTask; this.repositories = resolvedRepositories.repositoryMetadata(); this.isMultiRepoRequest = isMultiRepoRequest; - this.snapshots = snapshots; this.ignoreUnavailable = ignoreUnavailable; this.sortBy = sortBy; this.order = order; this.fromSortValue = fromSortValue; this.offset = offset; - this.after = after; this.size = size; this.snapshotsInProgress = snapshotsInProgress; this.verbose = verbose; this.indices = indices; + this.snapshotNamePredicate = SnapshotNamePredicate.forSnapshots(ignoreUnavailable, snapshots); this.fromSortValuePredicates = SnapshotPredicates.forFromSortValue(fromSortValue, sortBy, order); this.slmPolicyPredicate = SlmPolicyPredicate.forPolicies(policies); + this.afterPredicate = sortBy.getAfterPredicate(after, order); this.getSnapshotInfoExecutor = new GetSnapshotInfoExecutor( threadPool.info(ThreadPool.Names.SNAPSHOT_META).getMax(), @@ -230,46 +230,71 @@ private class GetSnapshotsOperation { } void getMultipleReposSnapshotInfo(ActionListener listener) { - try (var listeners = new RefCountingListener(listener.map(ignored -> { - cancellableTask.ensureNotCancelled(); - final var sortedSnapshotsInRepos = sortSnapshots( - allSnapshotInfos.stream().flatMap(Collection::stream), - totalCount.get(), - offset, - size - ); - final var snapshotInfos = sortedSnapshotsInRepos.snapshotInfos(); - assert indices || snapshotInfos.stream().allMatch(snapshotInfo -> snapshotInfo.indices().isEmpty()); - final int finalRemaining = sortedSnapshotsInRepos.remaining() + remaining.get(); - return new GetSnapshotsResponse( - snapshotInfos, - failuresByRepository, - finalRemaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, - totalCount.get(), - finalRemaining - ); - }))) { - for (final RepositoryMetadata repository : repositories) { - final String repoName = repository.name(); - if (skipRepository(repoName)) { - // TODO we should still count the matching snapshots in totalCount - continue; - } + SubscribableListener + + .newForked(repositoriesDoneListener -> { + try (var listeners = new RefCountingListener(repositoriesDoneListener)) { + for (final RepositoryMetadata repository : repositories) { + final String repoName = repository.name(); + if (skipRepository(repoName)) { + continue; + } - getSingleRepoSnapshotInfo(repoName, listeners.acquire((SnapshotsInRepo snapshotsInRepo) -> { - allSnapshotInfos.add(snapshotsInRepo.snapshotInfos()); - remaining.addAndGet(snapshotsInRepo.remaining()); - totalCount.addAndGet(snapshotsInRepo.totalCount()); - }).delegateResponse((l, e) -> { - if (isMultiRepoRequest && e instanceof ElasticsearchException elasticsearchException) { - failuresByRepository.put(repoName, elasticsearchException); - l.onResponse(SnapshotsInRepo.EMPTY); - } else { - l.onFailure(e); + SubscribableListener + + .newForked(repositoryDataListener -> { + if (snapshotNamePredicate == SnapshotNamePredicate.MATCH_CURRENT_ONLY) { + repositoryDataListener.onResponse(null); + } else { + repositoriesService.repository(repoName).getRepositoryData(executor, repositoryDataListener); + } + }) + + .andThen((l, repositoryData) -> loadSnapshotInfos(repoName, repositoryData, l)) + + .addListener(new DelegatingActionListener<>(listeners.acquire()) { + @Override + public void onResponse(SnapshotsInRepo snapshotsInRepo) { + allSnapshotInfos.add(snapshotsInRepo.snapshotInfos()); + remaining.addAndGet(snapshotsInRepo.remaining()); + totalCount.addAndGet(snapshotsInRepo.totalCount()); + delegate.onResponse(null); + } + + @Override + public void onFailure(Exception e) { + if (isMultiRepoRequest && e instanceof ElasticsearchException elasticsearchException) { + failuresByRepository.put(repoName, elasticsearchException); + delegate.onResponse(null); + } else { + delegate.onFailure(e); + } + } + }); } - })); - } - } + } + }) + + .addListener(listener.map(ignored -> { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); + cancellableTask.ensureNotCancelled(); + final var sortedSnapshotsInRepos = sortSnapshots( + allSnapshotInfos.stream().flatMap(Collection::stream), + totalCount.get(), + offset, + size + ); + final var snapshotInfos = sortedSnapshotsInRepos.snapshotInfos(); + assert indices || snapshotInfos.stream().allMatch(snapshotInfo -> snapshotInfo.indices().isEmpty()); + final int finalRemaining = sortedSnapshotsInRepos.remaining() + remaining.get(); + return new GetSnapshotsResponse( + snapshotInfos, + failuresByRepository, + finalRemaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, + totalCount.get(), + finalRemaining + ); + })); } private boolean skipRepository(String repositoryName) { @@ -281,114 +306,36 @@ private boolean skipRepository(String repositoryName) { } } - private void getSingleRepoSnapshotInfo(String repo, ActionListener listener) { - final Map allSnapshotIds = new HashMap<>(); - final List currentSnapshots = new ArrayList<>(); - for (final SnapshotInfo snapshotInfo : currentSnapshots(repo)) { - Snapshot snapshot = snapshotInfo.snapshot(); - allSnapshotIds.put(snapshot.getSnapshotId().getName(), snapshot); - currentSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); - } + private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryData, ActionListener listener) { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); - final ListenableFuture repositoryDataListener = new ListenableFuture<>(); - if (isCurrentSnapshotsOnly()) { - repositoryDataListener.onResponse(null); - } else { - repositoriesService.getRepositoryData(repo, repositoryDataListener); + if (cancellableTask.notifyIfCancelled(listener)) { + return; } - repositoryDataListener.addListener( - listener.delegateFailureAndWrap( - (l, repositoryData) -> loadSnapshotInfos(repo, allSnapshotIds, currentSnapshots, repositoryData, l) - ) - ); - } - - /** - * Returns a list of currently running snapshots from repository sorted by snapshot creation date - * - * @param repositoryName repository name - * @return list of snapshots - */ - private List currentSnapshots(String repositoryName) { - List snapshotList = new ArrayList<>(); - List entries = SnapshotsService.currentSnapshots( - snapshotsInProgress, - repositoryName, - Collections.emptyList() - ); - for (SnapshotsInProgress.Entry entry : entries) { - snapshotList.add(SnapshotInfo.inProgress(entry)); - } - return snapshotList; - } + final Set unmatchedRequiredNames = new HashSet<>(snapshotNamePredicate.requiredNames()); + final Set toResolve = new HashSet<>(); - private void loadSnapshotInfos( - String repo, - Map allSnapshotIds, - List currentSnapshots, - @Nullable RepositoryData repositoryData, - ActionListener listener - ) { - if (cancellableTask.notifyIfCancelled(listener)) { - return; + for (final var snapshotInProgress : snapshotsInProgress.forRepo(repo)) { + final var snapshotName = snapshotInProgress.snapshot().getSnapshotId().getName(); + unmatchedRequiredNames.remove(snapshotName); + if (snapshotNamePredicate.test(snapshotName, true)) { + toResolve.add(snapshotInProgress.snapshot()); + } } if (repositoryData != null) { - for (SnapshotId snapshotId : repositoryData.getSnapshotIds()) { - if (matchesPredicates(snapshotId, repositoryData)) { - allSnapshotIds.put(snapshotId.getName(), new Snapshot(repo, snapshotId)); + for (final var snapshotId : repositoryData.getSnapshotIds()) { + final var snapshotName = snapshotId.getName(); + unmatchedRequiredNames.remove(snapshotName); + if (snapshotNamePredicate.test(snapshotName, false) && matchesPredicates(snapshotId, repositoryData)) { + toResolve.add(new Snapshot(repo, snapshotId)); } } } - final Set toResolve = new HashSet<>(); - if (ResolvedRepositories.isMatchAll(snapshots)) { - toResolve.addAll(allSnapshotIds.values()); - } else { - final List includePatterns = new ArrayList<>(); - final List excludePatterns = new ArrayList<>(); - boolean hasCurrent = false; - boolean seenWildcard = false; - for (String snapshotOrPattern : snapshots) { - if (seenWildcard && snapshotOrPattern.length() > 1 && snapshotOrPattern.startsWith("-")) { - excludePatterns.add(snapshotOrPattern.substring(1)); - } else { - if (Regex.isSimpleMatchPattern(snapshotOrPattern)) { - seenWildcard = true; - includePatterns.add(snapshotOrPattern); - } else if (GetSnapshotsRequest.CURRENT_SNAPSHOT.equalsIgnoreCase(snapshotOrPattern)) { - hasCurrent = true; - seenWildcard = true; - } else { - if (ignoreUnavailable == false && allSnapshotIds.containsKey(snapshotOrPattern) == false) { - throw new SnapshotMissingException(repo, snapshotOrPattern); - } - includePatterns.add(snapshotOrPattern); - } - } - } - final String[] includes = includePatterns.toArray(Strings.EMPTY_ARRAY); - final String[] excludes = excludePatterns.toArray(Strings.EMPTY_ARRAY); - for (Map.Entry entry : allSnapshotIds.entrySet()) { - final Snapshot snapshot = entry.getValue(); - if (toResolve.contains(snapshot) == false - && Regex.simpleMatch(includes, entry.getKey()) - && Regex.simpleMatch(excludes, entry.getKey()) == false) { - toResolve.add(snapshot); - } - } - if (hasCurrent) { - for (SnapshotInfo snapshotInfo : currentSnapshots) { - final Snapshot snapshot = snapshotInfo.snapshot(); - if (Regex.simpleMatch(excludes, snapshot.getSnapshotId().getName()) == false) { - toResolve.add(snapshot); - } - } - } - if (toResolve.isEmpty() && ignoreUnavailable == false && isCurrentSnapshotsOnly() == false) { - throw new SnapshotMissingException(repo, snapshots[0]); - } + if (unmatchedRequiredNames.isEmpty() == false) { + throw new SnapshotMissingException(repo, unmatchedRequiredNames.iterator().next()); } if (verbose) { @@ -396,15 +343,15 @@ private void loadSnapshotInfos( } else { assert fromSortValuePredicates.isMatchAll() : "filtering is not supported in non-verbose mode"; assert slmPolicyPredicate == SlmPolicyPredicate.MATCH_ALL_POLICIES : "filtering is not supported in non-verbose mode"; - final SnapshotsInRepo snapshotInfos; - if (repositoryData != null) { - // want non-current snapshots as well, which are found in the repository data - snapshotInfos = buildSimpleSnapshotInfos(toResolve, repo, repositoryData, currentSnapshots); - } else { - // only want current snapshots - snapshotInfos = sortSnapshotsWithNoOffsetOrLimit(currentSnapshots.stream().map(SnapshotInfo::basic).toList()); - } - listener.onResponse(snapshotInfos); + + listener.onResponse( + buildSimpleSnapshotInfos( + toResolve, + repo, + repositoryData, + snapshotsInProgress.forRepo(repo).stream().map(entry -> SnapshotInfo.inProgress(entry).basic()).toList() + ) + ); } } @@ -435,60 +382,65 @@ private void snapshots(String repositoryName, Collection snapshotIds } } // then, look in the repository if there's any matching snapshots left - try ( - var listeners = new RefCountingListener( - // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here - listener.safeMap(v -> sortSnapshotsWithNoOffsetOrLimit(snapshots)) - ) - ) { - if (snapshotIdsToIterate.isEmpty()) { - return; - } + SubscribableListener - final Repository repository; - try { - repository = repositoriesService.repository(repositoryName); - } catch (RepositoryMissingException e) { - listeners.acquire().onFailure(e); - return; - } + .newForked(l -> { + try (var listeners = new RefCountingListener(l)) { + if (snapshotIdsToIterate.isEmpty()) { + return; + } - // only need to synchronize accesses related to reading SnapshotInfo from the repo - final List syncSnapshots = Collections.synchronizedList(snapshots); - - ThrottledIterator.run( - Iterators.failFast(snapshotIdsToIterate.iterator(), () -> cancellableTask.isCancelled() || listeners.isFailing()), - (ref, snapshotId) -> { - final var refListener = ActionListener.runBefore(listeners.acquire(), ref::close); - getSnapshotInfoExecutor.getSnapshotInfo(repository, snapshotId, new ActionListener<>() { - @Override - public void onResponse(SnapshotInfo snapshotInfo) { - if (matchesPredicates(snapshotInfo)) { - syncSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); - } - refListener.onResponse(null); - } + final Repository repository; + try { + repository = repositoriesService.repository(repositoryName); + } catch (RepositoryMissingException e) { + listeners.acquire().onFailure(e); + return; + } - @Override - public void onFailure(Exception e) { - if (ignoreUnavailable) { - logger.warn(Strings.format("failed to fetch snapshot info for [%s:%s]", repository, snapshotId), e); - refListener.onResponse(null); - } else { - refListener.onFailure(e); - } - } - }); - }, - getSnapshotInfoExecutor.getMaxRunningTasks(), - () -> {}, - () -> {} - ); - } - } + // only need to synchronize accesses related to reading SnapshotInfo from the repo + final List syncSnapshots = Collections.synchronizedList(snapshots); + + ThrottledIterator.run( + Iterators.failFast( + snapshotIdsToIterate.iterator(), + () -> cancellableTask.isCancelled() || listeners.isFailing() + ), + (ref, snapshotId) -> { + final var refListener = ActionListener.runBefore(listeners.acquire(), ref::close); + getSnapshotInfoExecutor.getSnapshotInfo(repository, snapshotId, new ActionListener<>() { + @Override + public void onResponse(SnapshotInfo snapshotInfo) { + if (matchesPredicates(snapshotInfo)) { + syncSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + } + refListener.onResponse(null); + } + + @Override + public void onFailure(Exception e) { + if (ignoreUnavailable) { + logger.warn( + Strings.format("failed to fetch snapshot info for [%s:%s]", repository, snapshotId), + e + ); + refListener.onResponse(null); + } else { + refListener.onFailure(e); + } + } + }); + }, + getSnapshotInfoExecutor.getMaxRunningTasks(), + () -> {}, + () -> {} + ); + } + }) - private boolean isCurrentSnapshotsOnly() { - return snapshots.length == 1 && GetSnapshotsRequest.CURRENT_SNAPSHOT.equalsIgnoreCase(snapshots[0]); + .addListener(listener.safeMap(v -> + // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here + applyAfterPredicate(snapshots)), executor, threadPool.getThreadContext()); } private SnapshotsInRepo buildSimpleSnapshotInfos( @@ -497,10 +449,16 @@ private SnapshotsInRepo buildSimpleSnapshotInfos( final RepositoryData repositoryData, final List currentSnapshots ) { + if (repositoryData == null) { + // only want current snapshots + return applyAfterPredicate(currentSnapshots); + } // else want non-current snapshots as well, which are found in the repository data + List snapshotInfos = new ArrayList<>(); for (SnapshotInfo snapshotInfo : currentSnapshots) { + assert snapshotInfo.startTime() == 0L && snapshotInfo.endTime() == 0L && snapshotInfo.totalShards() == 0L : snapshotInfo; if (toResolve.remove(snapshotInfo.snapshot())) { - snapshotInfos.add(snapshotInfo.basic()); + snapshotInfos.add(snapshotInfo); } } Map> snapshotsToIndices = new HashMap<>(); @@ -524,15 +482,16 @@ private SnapshotsInRepo buildSimpleSnapshotInfos( ) ); } - return sortSnapshotsWithNoOffsetOrLimit(snapshotInfos); + return applyAfterPredicate(snapshotInfos); } - private SnapshotsInRepo sortSnapshotsWithNoOffsetOrLimit(List snapshotInfos) { - return sortSnapshots(snapshotInfos.stream(), snapshotInfos.size(), 0, GetSnapshotsRequest.NO_LIMIT); + private SnapshotsInRepo applyAfterPredicate(List snapshotInfos) { + return new SnapshotsInRepo(snapshotInfos.stream().filter(afterPredicate).toList(), snapshotInfos.size(), 0); } private SnapshotsInRepo sortSnapshots(Stream snapshotInfoStream, int totalCount, int offset, int size) { - final var resultsStream = snapshotInfoStream.filter(sortBy.getAfterPredicate(after, order)) + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); + final var resultsStream = snapshotInfoStream.peek(this::assertSatisfiesAllPredicates) .sorted(sortBy.getSnapshotInfoComparator(order)) .skip(offset); if (size == GetSnapshotsRequest.NO_LIMIT) { @@ -553,6 +512,12 @@ private SnapshotsInRepo sortSnapshots(Stream snapshotInfoStream, i } } + private void assertSatisfiesAllPredicates(SnapshotInfo snapshotInfo) { + assert matchesPredicates(snapshotInfo); + assert afterPredicate.test(snapshotInfo); + assert indices || snapshotInfo.indices().isEmpty(); + } + private boolean matchesPredicates(SnapshotId snapshotId, RepositoryData repositoryData) { if (fromSortValuePredicates.test(snapshotId, repositoryData) == false) { return false; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java index 5d66baf0216ad..4a98ff62f6293 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java @@ -8,26 +8,17 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.Objects; -import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; /** * Represents snapshot status of all shards in the index @@ -118,45 +109,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - static final ObjectParser.NamedObjectParser PARSER; - static { - ConstructingObjectParser innerParser = new ConstructingObjectParser<>( - "snapshot_index_status", - true, - (Object[] parsedObjects, String index) -> { - int i = 0; - SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]); - SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]); - @SuppressWarnings("unchecked") - List shardStatuses = (List) parsedObjects[i]; - - final Map indexShards; - if (shardStatuses == null || shardStatuses.isEmpty()) { - indexShards = emptyMap(); - } else { - indexShards = Maps.newMapWithExpectedSize(shardStatuses.size()); - for (SnapshotIndexShardStatus shardStatus : shardStatuses) { - indexShards.put(shardStatus.getShardId().getId(), shardStatus); - } - } - return new SnapshotIndexStatus(index, indexShards, shardsStats, stats); - } - ); - innerParser.declareObject( - constructorArg(), - (p, c) -> SnapshotShardsStats.PARSER.apply(p, null), - new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS) - ); - innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), new ParseField(SnapshotStats.Fields.STATS)); - innerParser.declareNamedObjects(constructorArg(), SnapshotIndexShardStatus.PARSER, new ParseField(Fields.SHARDS)); - PARSER = ((p, c, name) -> innerParser.apply(p, name)); - } - - public static SnapshotIndexStatus fromXContent(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); - return PARSER.parse(parser, null, parser.currentName()); - } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java index 5bbc5368505db..28806b0aca87e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java @@ -8,18 +8,13 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collection; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * Status of a snapshot shards */ @@ -129,33 +124,6 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par return builder; } - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - Fields.SHARDS_STATS, - true, - (Object[] parsedObjects) -> { - int i = 0; - int initializingShards = (int) parsedObjects[i++]; - int startedShards = (int) parsedObjects[i++]; - int finalizingShards = (int) parsedObjects[i++]; - int doneShards = (int) parsedObjects[i++]; - int failedShards = (int) parsedObjects[i++]; - int totalShards = (int) parsedObjects[i]; - return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards); - } - ); - static { - PARSER.declareInt(constructorArg(), new ParseField(Fields.INITIALIZING)); - PARSER.declareInt(constructorArg(), new ParseField(Fields.STARTED)); - PARSER.declareInt(constructorArg(), new ParseField(Fields.FINALIZING)); - PARSER.declareInt(constructorArg(), new ParseField(Fields.DONE)); - PARSER.declareInt(constructorArg(), new ParseField(Fields.FAILED)); - PARSER.declareInt(constructorArg(), new ParseField(Fields.TOTAL)); - } - - public static SnapshotShardsStats fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java index 956ce57d168e0..e228ad18641fe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; -import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress.State; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; @@ -19,12 +18,7 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; import org.elasticsearch.snapshots.Snapshot; -import org.elasticsearch.snapshots.SnapshotId; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -34,11 +28,7 @@ import java.util.Map; import java.util.Objects; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Status of a snapshot @@ -87,7 +77,7 @@ public class SnapshotStatus implements ChunkedToXContentObject, Writeable { updateShardStats(startTime, time); } - private SnapshotStatus( + SnapshotStatus( Snapshot snapshot, State state, List shards, @@ -182,12 +172,12 @@ public SnapshotStats getStats() { return stats; } - private static final String SNAPSHOT = "snapshot"; - private static final String REPOSITORY = "repository"; - private static final String UUID = "uuid"; - private static final String STATE = "state"; - private static final String INDICES = "indices"; - private static final String INCLUDE_GLOBAL_STATE = "include_global_state"; + static final String SNAPSHOT = "snapshot"; + static final String REPOSITORY = "repository"; + static final String UUID = "uuid"; + static final String STATE = "state"; + static final String INDICES = "indices"; + static final String INCLUDE_GLOBAL_STATE = "include_global_state"; @Override public Iterator toXContentChunked(ToXContent.Params params) { @@ -206,59 +196,6 @@ public Iterator toXContentChunked(ToXContent.Params params }), getIndices().values().iterator(), Iterators.single((b, p) -> b.endObject().endObject())); } - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "snapshot_status", - true, - (Object[] parsedObjects) -> { - int i = 0; - String name = (String) parsedObjects[i++]; - String repository = (String) parsedObjects[i++]; - String uuid = (String) parsedObjects[i++]; - String rawState = (String) parsedObjects[i++]; - Boolean includeGlobalState = (Boolean) parsedObjects[i++]; - SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]); - SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]); - @SuppressWarnings("unchecked") - List indices = ((List) parsedObjects[i]); - - Snapshot snapshot = new Snapshot(repository, new SnapshotId(name, uuid)); - SnapshotsInProgress.State state = SnapshotsInProgress.State.valueOf(rawState); - Map indicesStatus; - List shards; - if (indices == null || indices.isEmpty()) { - indicesStatus = emptyMap(); - shards = emptyList(); - } else { - indicesStatus = Maps.newMapWithExpectedSize(indices.size()); - shards = new ArrayList<>(); - for (SnapshotIndexStatus index : indices) { - indicesStatus.put(index.getIndex(), index); - shards.addAll(index.getShards().values()); - } - } - return new SnapshotStatus(snapshot, state, shards, indicesStatus, shardsStats, stats, includeGlobalState); - } - ); - static { - PARSER.declareString(constructorArg(), new ParseField(SNAPSHOT)); - PARSER.declareString(constructorArg(), new ParseField(REPOSITORY)); - PARSER.declareString(constructorArg(), new ParseField(UUID)); - PARSER.declareString(constructorArg(), new ParseField(STATE)); - PARSER.declareBoolean(optionalConstructorArg(), new ParseField(INCLUDE_GLOBAL_STATE)); - PARSER.declareField( - constructorArg(), - SnapshotStats::fromXContent, - new ParseField(SnapshotStats.Fields.STATS), - ObjectParser.ValueType.OBJECT - ); - PARSER.declareObject(constructorArg(), SnapshotShardsStats.PARSER, new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS)); - PARSER.declareNamedObjects(constructorArg(), SnapshotIndexStatus.PARSER, new ParseField(INDICES)); - } - - public static SnapshotStatus fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - private void updateShardStats(long startTime, long time) { stats = new SnapshotStats(startTime, time, 0, 0, 0, 0, 0, 0); shardsStats = new SnapshotShardsStats(shards); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java index 578800edfb691..941f1c8d30b2c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java @@ -13,18 +13,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * Snapshot status response */ @@ -55,23 +50,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(snapshots); } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "snapshots_status_response", - true, - (Object[] parsedObjects) -> { - @SuppressWarnings("unchecked") - List snapshots = (List) parsedObjects[0]; - return new SnapshotsStatusResponse(snapshots); - } - ); - static { - PARSER.declareObjectArray(constructorArg(), SnapshotStatus.PARSER, new ParseField("snapshots")); - } - - public static SnapshotsStatusResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java index e7568a0c66a37..73cfeb48b96bc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java @@ -13,11 +13,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; import org.elasticsearch.script.ScriptContextInfo; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; @@ -31,28 +29,9 @@ public class GetScriptContextResponse extends ActionResponse implements ToXContentObject { - private static final ParseField CONTEXTS = new ParseField("contexts"); + static final ParseField CONTEXTS = new ParseField("contexts"); final Map contexts; - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_script_context", - true, - (a) -> { - Map contexts = ((List) a[0]).stream() - .collect(Collectors.toMap(ScriptContextInfo::getName, c -> c)); - return new GetScriptContextResponse(contexts); - } - ); - - static { - PARSER.declareObjectArray( - ConstructingObjectParser.constructorArg(), - (parser, ctx) -> ScriptContextInfo.PARSER.apply(parser, ctx), - CONTEXTS - ); - } - GetScriptContextResponse(StreamInput in) throws IOException { super(in); int size = in.readInt(); @@ -70,7 +49,7 @@ public class GetScriptContextResponse extends ActionResponse implements ToXConte } // Parser constructor - private GetScriptContextResponse(Map contexts) { + GetScriptContextResponse(Map contexts) { this.contexts = Map.copyOf(contexts); } @@ -96,10 +75,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static GetScriptContextResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponse.java index 4f388804f2340..98b11de5bffc9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponse.java @@ -9,19 +9,14 @@ package org.elasticsearch.action.admin.indices.analyze; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -29,8 +24,6 @@ import java.util.Objects; import java.util.Set; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * The response object that will be returned when reloading analyzers */ @@ -38,10 +31,10 @@ public class ReloadAnalyzersResponse extends BroadcastResponse { private final Map reloadDetails; - private static final ParseField RELOAD_DETAILS_FIELD = new ParseField("reload_details"); - private static final ParseField INDEX_FIELD = new ParseField("index"); - private static final ParseField RELOADED_ANALYZERS_FIELD = new ParseField("reloaded_analyzers"); - private static final ParseField RELOADED_NODE_IDS_FIELD = new ParseField("reloaded_node_ids"); + static final ParseField RELOAD_DETAILS_FIELD = new ParseField("reload_details"); + static final ParseField INDEX_FIELD = new ParseField("index"); + static final ParseField RELOADED_ANALYZERS_FIELD = new ParseField("reloaded_analyzers"); + static final ParseField RELOADED_NODE_IDS_FIELD = new ParseField("reloaded_node_ids"); public ReloadAnalyzersResponse(StreamInput in) throws IOException { super(in); @@ -80,48 +73,6 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.endArray(); } - @SuppressWarnings({ "unchecked" }) - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "reload_analyzer", - true, - arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - List results = (List) arg[1]; - Map reloadedNodeIds = new HashMap<>(); - for (ReloadDetails result : results) { - reloadedNodeIds.put(result.getIndexName(), result); - } - return new ReloadAnalyzersResponse( - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()), - reloadedNodeIds - ); - } - ); - - @SuppressWarnings({ "unchecked" }) - private static final ConstructingObjectParser ENTRY_PARSER = new ConstructingObjectParser<>( - "reload_analyzer.entry", - true, - arg -> { - return new ReloadDetails((String) arg[0], new HashSet<>((List) arg[1]), new HashSet<>((List) arg[2])); - } - ); - - static { - declareBroadcastFields(PARSER); - PARSER.declareObjectArray(constructorArg(), ENTRY_PARSER, RELOAD_DETAILS_FIELD); - ENTRY_PARSER.declareString(constructorArg(), INDEX_FIELD); - ENTRY_PARSER.declareStringArray(constructorArg(), RELOADED_ANALYZERS_FIELD); - ENTRY_PARSER.declareStringArray(constructorArg(), RELOADED_NODE_IDS_FIELD); - } - - public static ReloadAnalyzersResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponse.java index 39d16e35e61a2..605aab5ab02d9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponse.java @@ -11,8 +11,6 @@ import org.elasticsearch.action.support.master.ShardsAcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -21,16 +19,6 @@ */ public class OpenIndexResponse extends ShardsAcknowledgedResponse { - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "open_index", - true, - args -> new OpenIndexResponse((boolean) args[0], (boolean) args[1]) - ); - - static { - declareAcknowledgedAndShardsAcknowledgedFields(PARSER); - } - public OpenIndexResponse(StreamInput in) throws IOException { super(in, true); } @@ -44,8 +32,4 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); writeShardsAcknowledged(out); } - - public static OpenIndexResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index 6645de880ad86..b03353a11793f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -292,12 +292,14 @@ private RolloverResult rolloverDataStream( DataStreamAutoShardingEvent dataStreamAutoShardingEvent = autoShardingResult == null ? dataStream.getAutoShardingEvent() : switch (autoShardingResult.type()) { - case NO_CHANGE_REQUIRED -> { - logger.info( - "Rolling over data stream [{}] using existing auto-sharding recommendation [{}]", - dataStreamName, - dataStream.getAutoShardingEvent() - ); + case NO_CHANGE_REQUIRED, COOLDOWN_PREVENTED_INCREASE, COOLDOWN_PREVENTED_DECREASE -> { + if (dataStream.getAutoShardingEvent() != null) { + logger.info( + "Rolling over data stream [{}] using existing auto-sharding recommendation [{}]", + dataStreamName, + dataStream.getAutoShardingEvent() + ); + } yield dataStream.getAutoShardingEvent(); } case INCREASE_SHARDS, DECREASE_SHARDS -> { @@ -308,18 +310,6 @@ yield new DataStreamAutoShardingEvent( now.toEpochMilli() ); } - case COOLDOWN_PREVENTED_INCREASE, COOLDOWN_PREVENTED_DECREASE -> { - // we're in the cooldown period for this particular recommendation so perhaps use a previous autosharding - // recommendation (or the value configured in the backing index template otherwise) - if (dataStream.getAutoShardingEvent() != null) { - logger.info( - "Rolling over data stream [{}] using existing auto-sharding recommendation [{}]", - dataStreamName, - dataStream.getAutoShardingEvent() - ); - } - yield dataStream.getAutoShardingEvent(); - } // data sharding might not be available due to the feature not being available/enabled or due to cluster level excludes // being configured. the index template will dictate the number of shards as usual case NOT_APPLICABLE -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index c295ccde01623..bd507ee9054f1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -215,6 +215,16 @@ protected void masterOperation( } } + final IndexAbstraction rolloverTargetAbstraction = clusterState.metadata() + .getIndicesLookup() + .get(rolloverRequest.getRolloverTarget()); + if (rolloverTargetAbstraction.getType() == IndexAbstraction.Type.ALIAS && rolloverTargetAbstraction.isDataStreamRelated()) { + listener.onFailure( + new IllegalStateException("Aliases to data streams cannot be rolled over. Please rollover the data stream itself.") + ); + return; + } + IndicesStatsRequest statsRequest = new IndicesStatsRequest().indices(rolloverRequest.getRolloverTarget()) .clear() .indicesOptions(IndicesOptions.fromOptions(true, false, true, true)) @@ -240,11 +250,16 @@ protected void masterOperation( final Optional indexStats = Optional.ofNullable(statsResponse) .map(stats -> stats.getIndex(dataStream.getWriteIndex().getName())); - Double writeLoad = indexStats.map(stats -> stats.getTotal().getIndexing()) - .map(indexing -> indexing.getTotal().getWriteLoad()) - .orElse(null); + Double indexWriteLoad = indexStats.map( + stats -> Arrays.stream(stats.getShards()) + .filter(shardStats -> shardStats.getStats().indexing != null) + // only take primaries into account as in stateful the replicas also index data + .filter(shardStats -> shardStats.getShardRouting().primary()) + .map(shardStats -> shardStats.getStats().indexing.getTotal().getWriteLoad()) + .reduce(0.0, Double::sum) + ).orElse(null); - rolloverAutoSharding = dataStreamAutoShardingService.calculate(clusterState, dataStream, writeLoad); + rolloverAutoSharding = dataStreamAutoShardingService.calculate(clusterState, dataStream, indexWriteLoad); logger.debug("auto sharding result for data stream [{}] is [{}]", dataStream.getName(), rolloverAutoSharding); // if auto sharding recommends increasing the number of shards we want to trigger a rollover even if there are no diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index 7d2dad80bf35a..626feeed161f4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -15,6 +15,8 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -117,6 +119,8 @@ public static class Response extends ActionResponse implements ToXContentObject private final Map componentTemplates; @Nullable private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; public Response(StreamInput in) throws IOException { super(in); @@ -126,16 +130,27 @@ public Response(StreamInput in) throws IOException { } else { rolloverConfiguration = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + globalRetention = in.readOptionalWriteable(DataStreamGlobalRetention::read); + } else { + globalRetention = null; + } } public Response(Map componentTemplates) { this.componentTemplates = componentTemplates; this.rolloverConfiguration = null; + this.globalRetention = null; } - public Response(Map componentTemplates, @Nullable RolloverConfiguration rolloverConfiguration) { + public Response( + Map componentTemplates, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) { this.componentTemplates = componentTemplates; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public Map getComponentTemplates() { @@ -146,12 +161,19 @@ public RolloverConfiguration getRolloverConfiguration() { return rolloverConfiguration; } + public DataStreamGlobalRetention getGlobalRetention() { + return globalRetention; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(componentTemplates, StreamOutput::writeWriteable); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override @@ -160,23 +182,25 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Response that = (Response) o; return Objects.equals(componentTemplates, that.componentTemplates) - && Objects.equals(rolloverConfiguration, that.rolloverConfiguration); + && Objects.equals(rolloverConfiguration, that.rolloverConfiguration) + && Objects.equals(globalRetention, that.globalRetention); } @Override public int hashCode() { - return Objects.hash(componentTemplates, rolloverConfiguration); + return Objects.hash(componentTemplates, rolloverConfiguration, globalRetention); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); builder.startArray(COMPONENT_TEMPLATES.getPreferredName()); for (Map.Entry componentTemplate : this.componentTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), componentTemplate.getKey()); builder.field(COMPONENT_TEMPLATE.getPreferredName()); - componentTemplate.getValue().toXContent(builder, params, rolloverConfiguration); + componentTemplate.getValue().toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); builder.endObject(); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java index f75443bad2854..515b8f9fd5c1a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java @@ -15,6 +15,8 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -118,7 +120,10 @@ public static class Response extends ActionResponse implements ToXContentObject public static final ParseField INDEX_TEMPLATE = new ParseField("index_template"); private final Map indexTemplates; + @Nullable private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; public Response(StreamInput in) throws IOException { super(in); @@ -128,16 +133,27 @@ public Response(StreamInput in) throws IOException { } else { rolloverConfiguration = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + globalRetention = in.readOptionalWriteable(DataStreamGlobalRetention::read); + } else { + globalRetention = null; + } } public Response(Map indexTemplates) { this.indexTemplates = indexTemplates; this.rolloverConfiguration = null; + this.globalRetention = null; } - public Response(Map indexTemplates, RolloverConfiguration rolloverConfiguration) { + public Response( + Map indexTemplates, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) { this.indexTemplates = indexTemplates; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public Map indexTemplates() { @@ -150,6 +166,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override @@ -157,23 +176,26 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetComposableIndexTemplateAction.Response that = (GetComposableIndexTemplateAction.Response) o; - return Objects.equals(indexTemplates, that.indexTemplates) && Objects.equals(rolloverConfiguration, that.rolloverConfiguration); + return Objects.equals(indexTemplates, that.indexTemplates) + && Objects.equals(rolloverConfiguration, that.rolloverConfiguration) + && Objects.equals(globalRetention, that.globalRetention); } @Override public int hashCode() { - return Objects.hash(indexTemplates, rolloverConfiguration); + return Objects.hash(indexTemplates, rolloverConfiguration, globalRetention); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); builder.startArray(INDEX_TEMPLATES.getPreferredName()); for (Map.Entry indexTemplate : this.indexTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), indexTemplate.getKey()); builder.field(INDEX_TEMPLATE.getPreferredName()); - indexTemplate.getValue().toXContent(builder, params, rolloverConfiguration); + indexTemplate.getValue().toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); builder.endObject(); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java index e76dc0f46eea2..d081570b2a365 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -96,7 +97,8 @@ protected void masterOperation( listener.onResponse( new GetComponentTemplateAction.Response( results, - clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) + clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING), + DataStreamGlobalRetention.getFromClusterState(state) ) ); } else { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComposableIndexTemplateAction.java index c9b2a23c38828..99360d2eb7bf8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComposableIndexTemplateAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -94,7 +95,8 @@ protected void masterOperation( listener.onResponse( new GetComposableIndexTemplateAction.Response( results, - clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) + clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING), + DataStreamGlobalRetention.getFromClusterState(state) ) ); } else { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java index 106f1a7e4f393..378df2d7d53e7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java @@ -11,6 +11,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -37,27 +39,35 @@ public class SimulateIndexTemplateResponse extends ActionResponse implements ToX @Nullable // the resolved settings, mappings and aliases for the matched templates, if any - private Template resolvedTemplate; + private final Template resolvedTemplate; @Nullable // a map of template names and their index patterns that would overlap when matching the given index name - private Map> overlappingTemplates; + private final Map> overlappingTemplates; @Nullable - private RolloverConfiguration rolloverConfiguration = null; + private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; - public SimulateIndexTemplateResponse(@Nullable Template resolvedTemplate, @Nullable Map> overlappingTemplates) { - this(resolvedTemplate, overlappingTemplates, null); + public SimulateIndexTemplateResponse( + @Nullable Template resolvedTemplate, + @Nullable Map> overlappingTemplates, + DataStreamGlobalRetention globalRetention + ) { + this(resolvedTemplate, overlappingTemplates, null, globalRetention); } public SimulateIndexTemplateResponse( @Nullable Template resolvedTemplate, @Nullable Map> overlappingTemplates, - @Nullable RolloverConfiguration rolloverConfiguration + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention ) { this.resolvedTemplate = resolvedTemplate; this.overlappingTemplates = overlappingTemplates; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public SimulateIndexTemplateResponse(StreamInput in) throws IOException { @@ -73,9 +83,12 @@ public SimulateIndexTemplateResponse(StreamInput in) throws IOException { } else { this.overlappingTemplates = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { - rolloverConfiguration = in.readOptionalWriteable(RolloverConfiguration::new); - } + rolloverConfiguration = in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) + ? in.readOptionalWriteable(RolloverConfiguration::new) + : null; + globalRetention = in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION) + ? in.readOptionalWriteable(DataStreamGlobalRetention::read) + : null; } @Override @@ -94,14 +107,18 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); if (this.resolvedTemplate != null) { builder.field(TEMPLATE.getPreferredName()); - this.resolvedTemplate.toXContent(builder, params, rolloverConfiguration); + this.resolvedTemplate.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); } if (this.overlappingTemplates != null) { builder.startArray(OVERLAPPING.getPreferredName()); @@ -127,12 +144,14 @@ public boolean equals(Object o) { } SimulateIndexTemplateResponse that = (SimulateIndexTemplateResponse) o; return Objects.equals(resolvedTemplate, that.resolvedTemplate) - && Objects.deepEquals(overlappingTemplates, that.overlappingTemplates); + && Objects.deepEquals(overlappingTemplates, that.overlappingTemplates) + && Objects.equals(rolloverConfiguration, that.rolloverConfiguration) + && Objects.equals(globalRetention, that.globalRetention); } @Override public int hashCode() { - return Objects.hash(resolvedTemplate, overlappingTemplates); + return Objects.hash(resolvedTemplate, overlappingTemplates, rolloverConfiguration, globalRetention); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index bee6ab7f78be0..51e17999da5c5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -112,6 +113,7 @@ protected void masterOperation( ClusterState state, ActionListener listener ) throws Exception { + final DataStreamGlobalRetention globalRetention = DataStreamGlobalRetention.getFromClusterState(state); final ClusterState stateWithTemplate; if (request.getIndexTemplateRequest() != null) { // we'll "locally" add the template defined by the user in the cluster state (as if it existed in the system) @@ -137,7 +139,7 @@ protected void masterOperation( String matchingTemplate = findV2Template(stateWithTemplate.metadata(), request.getIndexName(), false); if (matchingTemplate == null) { - listener.onResponse(new SimulateIndexTemplateResponse(null, null)); + listener.onResponse(new SimulateIndexTemplateResponse(null, null, null)); return; } @@ -165,11 +167,12 @@ protected void masterOperation( new SimulateIndexTemplateResponse( template, overlapping, - clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) + clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING), + globalRetention ) ); } else { - listener.onResponse(new SimulateIndexTemplateResponse(template, overlapping)); + listener.onResponse(new SimulateIndexTemplateResponse(template, overlapping, globalRetention)); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java index 1f35d0b8a1268..39cf5f43f39ec 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; @@ -99,6 +100,7 @@ protected void masterOperation( ClusterState state, ActionListener listener ) throws Exception { + final DataStreamGlobalRetention globalRetention = DataStreamGlobalRetention.getFromClusterState(state); String uuid = UUIDs.randomBase64UUID().toLowerCase(Locale.ROOT); final String temporaryIndexName = "simulate_template_index_" + uuid; final ClusterState stateWithTemplate; @@ -176,11 +178,12 @@ protected void masterOperation( new SimulateIndexTemplateResponse( template, overlapping, - clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) + clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING), + globalRetention ) ); } else { - listener.onResponse(new SimulateIndexTemplateResponse(template, overlapping)); + listener.onResponse(new SimulateIndexTemplateResponse(template, overlapping, globalRetention)); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java index 6ec0be33e3766..aaa06908f72f0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java @@ -9,23 +9,14 @@ package org.elasticsearch.action.admin.indices.validate.query; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Arrays; import java.util.Collections; import java.util.List; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * The response of the validate action. * @@ -36,38 +27,10 @@ public class ValidateQueryResponse extends BroadcastResponse { public static final String VALID_FIELD = "valid"; public static final String EXPLANATIONS_FIELD = "explanations"; - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "validate_query", - true, - arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new ValidateQueryResponse( - (boolean) arg[1], - (List) arg[2], - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()) - ); - } - ); - static { - declareBroadcastFields(PARSER); - PARSER.declareBoolean(constructorArg(), new ParseField(VALID_FIELD)); - PARSER.declareObjectArray(optionalConstructorArg(), QueryExplanation.PARSER, new ParseField(EXPLANATIONS_FIELD)); - } - private final boolean valid; private final List queryExplanations; - ValidateQueryResponse(StreamInput in) throws IOException { - super(in); - valid = in.readBoolean(); - queryExplanations = in.readCollectionAsList(QueryExplanation::new); - } - ValidateQueryResponse( boolean valid, List queryExplanations, @@ -115,8 +78,4 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.endArray(); } } - - public static ValidateQueryResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 452a9ec90443a..a6439769b51b4 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -16,18 +16,21 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.RoutingMissingException; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.IndexRouting; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; @@ -39,11 +42,16 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.LongSupplier; import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.EXCLUDED_DATA_STREAMS_KEY; @@ -59,14 +67,16 @@ final class BulkOperation extends ActionRunnable { private final Task task; private final ThreadPool threadPool; private final ClusterService clusterService; - private BulkRequest bulkRequest; // set to null once all requests are sent out + private BulkRequest bulkRequest; // set to null once all requests are completed private final ActionListener listener; private final AtomicArray responses; + private final ConcurrentLinkedQueue failureStoreRedirects = new ConcurrentLinkedQueue<>(); private final long startTimeNanos; private final ClusterStateObserver observer; private final Map indicesThatCannotBeCreated; private final String executorName; private final LongSupplier relativeTimeProvider; + private final FailureStoreDocumentConverter failureStoreDocumentConverter; private IndexNameExpressionResolver indexNameExpressionResolver; private NodeClient client; @@ -83,6 +93,40 @@ final class BulkOperation extends ActionRunnable { LongSupplier relativeTimeProvider, long startTimeNanos, ActionListener listener + ) { + this( + task, + threadPool, + executorName, + clusterService, + bulkRequest, + client, + responses, + indicesThatCannotBeCreated, + indexNameExpressionResolver, + relativeTimeProvider, + startTimeNanos, + listener, + new ClusterStateObserver(clusterService, bulkRequest.timeout(), logger, threadPool.getThreadContext()), + new FailureStoreDocumentConverter() + ); + } + + BulkOperation( + Task task, + ThreadPool threadPool, + String executorName, + ClusterService clusterService, + BulkRequest bulkRequest, + NodeClient client, + AtomicArray responses, + Map indicesThatCannotBeCreated, + IndexNameExpressionResolver indexNameExpressionResolver, + LongSupplier relativeTimeProvider, + long startTimeNanos, + ActionListener listener, + ClusterStateObserver observer, + FailureStoreDocumentConverter failureStoreDocumentConverter ) { super(listener); this.task = task; @@ -97,68 +141,90 @@ final class BulkOperation extends ActionRunnable { this.relativeTimeProvider = relativeTimeProvider; this.indexNameExpressionResolver = indexNameExpressionResolver; this.client = client; - this.observer = new ClusterStateObserver(clusterService, bulkRequest.timeout(), logger, threadPool.getThreadContext()); + this.observer = observer; + this.failureStoreDocumentConverter = failureStoreDocumentConverter; } @Override protected void doRun() { assert bulkRequest != null; final ClusterState clusterState = observer.setAndGetObservedState(); - if (handleBlockExceptions(clusterState)) { + if (handleBlockExceptions(clusterState, BulkOperation.this, this::onFailure)) { + return; + } + Map> requestsByShard = groupBulkRequestsByShards(clusterState); + executeBulkRequestsByShard(requestsByShard, clusterState, this::redirectFailuresOrCompleteBulkOperation); + } + + private void doRedirectFailures() { + assert failureStoreRedirects.isEmpty() != true : "Attempting to redirect failures, but none were present in the queue"; + final ClusterState clusterState = observer.setAndGetObservedState(); + // If the cluster is blocked at this point, discard the failure store redirects and complete the response with the original failures + if (handleBlockExceptions(clusterState, ActionRunnable.run(listener, this::doRedirectFailures), this::discardRedirectsAndFinish)) { return; } - Map> requestsByShard = groupRequestsByShards(clusterState); - executeBulkRequestsByShard(requestsByShard, clusterState); + Map> requestsByShard = drainAndGroupRedirectsByShards(clusterState); + executeBulkRequestsByShard(requestsByShard, clusterState, this::completeBulkOperation); } private long buildTookInMillis(long startTimeNanos) { return TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeNanos); } - private Map> groupRequestsByShards(ClusterState clusterState) { + private Map> groupBulkRequestsByShards(ClusterState clusterState) { + return groupRequestsByShards( + clusterState, + Iterators.enumerate(bulkRequest.requests.iterator(), BulkItemRequest::new), + BulkOperation::validateWriteIndex + ); + } + + private Map> drainAndGroupRedirectsByShards(ClusterState clusterState) { + return groupRequestsByShards( + clusterState, + Iterators.fromSupplier(failureStoreRedirects::poll), + (ia, ignore) -> validateRedirectIndex(ia) + ); + } + + private Map> groupRequestsByShards( + ClusterState clusterState, + Iterator it, + BiConsumer> indexOperationValidator + ) { final ConcreteIndices concreteIndices = new ConcreteIndices(clusterState, indexNameExpressionResolver); Metadata metadata = clusterState.metadata(); // Group the requests by ShardId -> Operations mapping Map> requestsByShard = new HashMap<>(); - for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest docWriteRequest = bulkRequest.requests.get(i); + while (it.hasNext()) { + BulkItemRequest bulkItemRequest = it.next(); + DocWriteRequest docWriteRequest = bulkItemRequest.request(); + // the request can only be null because we set it to null in the previous step, so it gets ignored if (docWriteRequest == null) { continue; } - if (addFailureIfRequiresAliasAndAliasIsMissing(docWriteRequest, i, metadata)) { + if (addFailureIfRequiresAliasAndAliasIsMissing(docWriteRequest, bulkItemRequest.id(), metadata)) { continue; } - if (addFailureIfIndexCannotBeCreated(docWriteRequest, i)) { + if (addFailureIfIndexCannotBeCreated(docWriteRequest, bulkItemRequest.id())) { continue; } - if (addFailureIfRequiresDataStreamAndNoParentDataStream(docWriteRequest, i, metadata)) { + if (addFailureIfRequiresDataStreamAndNoParentDataStream(docWriteRequest, bulkItemRequest.id(), metadata)) { continue; } IndexAbstraction ia = null; - boolean includeDataStreams = docWriteRequest.opType() == DocWriteRequest.OpType.CREATE; try { ia = concreteIndices.resolveIfAbsent(docWriteRequest); - if (ia.isDataStreamRelated() && includeDataStreams == false) { - throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams"); - } - // The ConcreteIndices#resolveIfAbsent(...) method validates via IndexNameExpressionResolver whether - // an operation is allowed in index into a data stream, but this isn't done when resolve call is cached, so - // the validation needs to be performed here too. - if (ia.getParentDataStream() != null && - // avoid valid cases when directly indexing into a backing index - // (for example when directly indexing into .ds-logs-foobar-000001) - ia.getName().equals(docWriteRequest.index()) == false && docWriteRequest.opType() != DocWriteRequest.OpType.CREATE) { - throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams"); - } + indexOperationValidator.accept(ia, docWriteRequest); TransportBulkAction.prohibitCustomRoutingOnDataStream(docWriteRequest, metadata); TransportBulkAction.prohibitAppendWritesInBackingIndices(docWriteRequest, metadata); docWriteRequest.routing(metadata.resolveWriteIndexRouting(docWriteRequest.routing(), docWriteRequest.index())); final Index concreteIndex = docWriteRequest.getConcreteWriteIndex(ia, metadata); - if (addFailureIfIndexIsClosed(docWriteRequest, concreteIndex, i, metadata)) { + if (addFailureIfIndexIsClosed(docWriteRequest, concreteIndex, bulkItemRequest.id(), metadata)) { continue; } IndexRouting indexRouting = concreteIndices.routing(concreteIndex); @@ -168,37 +234,56 @@ private Map> groupRequestsByShards(ClusterState c new ShardId(concreteIndex, shardId), shard -> new ArrayList<>() ); - shardRequests.add(new BulkItemRequest(i, docWriteRequest)); + shardRequests.add(bulkItemRequest); } catch (ElasticsearchParseException | IllegalArgumentException | RoutingMissingException | ResourceNotFoundException e) { String name = ia != null ? ia.getName() : docWriteRequest.index(); - BulkItemResponse.Failure failure = new BulkItemResponse.Failure(name, docWriteRequest.id(), e); - BulkItemResponse bulkItemResponse = BulkItemResponse.failure(i, docWriteRequest.opType(), failure); - responses.set(i, bulkItemResponse); - // make sure the request gets never processed again - bulkRequest.requests.set(i, null); + addFailureAndDiscardRequest(docWriteRequest, bulkItemRequest.id(), name, e); } } return requestsByShard; } - private void executeBulkRequestsByShard(Map> requestsByShard, ClusterState clusterState) { + /** + * Validates that an index abstraction is capable of receiving the provided write request + */ + private static void validateWriteIndex(IndexAbstraction ia, DocWriteRequest docWriteRequest) { + boolean includeDataStreams = docWriteRequest.opType() == DocWriteRequest.OpType.CREATE; + if (ia.isDataStreamRelated() && includeDataStreams == false) { + throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams"); + } + // The ConcreteIndices#resolveIfAbsent(...) method validates via IndexNameExpressionResolver whether + // an operation is allowed in index into a data stream, but this isn't done when resolve call is cached, so + // the validation needs to be performed here too. + if (ia.getParentDataStream() != null && + // avoid valid cases when directly indexing into a backing index + // (for example when directly indexing into .ds-logs-foobar-000001) + ia.getName().equals(docWriteRequest.index()) == false && docWriteRequest.opType() != DocWriteRequest.OpType.CREATE) { + throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams"); + } + } + + /** + * Validates that an index abstraction is capable of receiving a failure store redirect + */ + private static void validateRedirectIndex(IndexAbstraction ia) { + if (ia.isDataStreamRelated() == false) { + // We should only be dealing with traffic targeting concrete data streams. + throw new IllegalArgumentException("only write ops to data streams with enabled failure stores can be redirected on failure."); + } + } + + private void executeBulkRequestsByShard( + Map> requestsByShard, + ClusterState clusterState, + Runnable onRequestsCompleted + ) { if (requestsByShard.isEmpty()) { - listener.onResponse( - new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTimeNanos)) - ); + onRequestsCompleted.run(); return; } String nodeId = clusterService.localNode().getId(); - Runnable onBulkItemsComplete = () -> { - listener.onResponse( - new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTimeNanos)) - ); - // Allow memory for bulk shard request items to be reclaimed before all items have been completed - bulkRequest = null; - }; - - try (RefCountingRunnable bulkItemRequestCompleteRefCount = new RefCountingRunnable(onBulkItemsComplete)) { + try (RefCountingRunnable bulkItemRequestCompleteRefCount = new RefCountingRunnable(onRequestsCompleted)) { for (Map.Entry> entry : requestsByShard.entrySet()) { final ShardId shardId = entry.getKey(); final List requests = entry.getValue(); @@ -223,18 +308,75 @@ private void executeBulkRequestsByShard(Map> requ } } + private void redirectFailuresOrCompleteBulkOperation() { + if (DataStream.isFailureStoreEnabled() && failureStoreRedirects.isEmpty() == false) { + doRedirectFailures(); + } else { + completeBulkOperation(); + } + } + + private void completeBulkOperation() { + listener.onResponse( + new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTimeNanos)) + ); + // Allow memory for bulk shard request items to be reclaimed before all items have been completed + bulkRequest = null; + } + + /** + * Discards all failure store redirections and completes the bulk request. + * @param exception any documents that could have been redirected will have this exception added as a suppressed exception + * on their original failure information. + */ + private void discardRedirectsAndFinish(Exception exception) { + assert failureStoreRedirects.isEmpty() != true : "Attempting to discard redirects, but there were none to discard"; + Iterator redirectedBulkItemIterator = Iterators.fromSupplier(failureStoreRedirects::poll); + while (redirectedBulkItemIterator.hasNext()) { + BulkItemRequest cancelledRedirectBulkItem = redirectedBulkItemIterator.next(); + int slot = cancelledRedirectBulkItem.id(); + BulkItemResponse originalFailure = responses.get(slot); + if (originalFailure.isFailed()) { + originalFailure.getFailure().getCause().addSuppressed(exception); + } + } + completeBulkOperation(); + } + private void executeBulkShardRequest(BulkShardRequest bulkShardRequest, Releasable releaseOnFinish) { client.executeLocally(TransportShardBulkAction.TYPE, bulkShardRequest, new ActionListener<>() { + + // Lazily get the cluster state to avoid keeping it around longer than it is needed + private ClusterState clusterState = null; + + private ClusterState getClusterState() { + if (clusterState == null) { + clusterState = clusterService.state(); + } + return clusterState; + } + @Override public void onResponse(BulkShardResponse bulkShardResponse) { - for (BulkItemResponse bulkItemResponse : bulkShardResponse.getResponses()) { - // we may have no response if item failed - if (bulkItemResponse.getResponse() != null) { + for (int idx = 0; idx < bulkShardResponse.getResponses().length; idx++) { + // We zip the requests and responses together so that we can identify failed documents and potentially store them + BulkItemResponse bulkItemResponse = bulkShardResponse.getResponses()[idx]; + + if (bulkItemResponse.isFailed()) { + BulkItemRequest bulkItemRequest = bulkShardRequest.items()[idx]; + assert bulkItemRequest.id() == bulkItemResponse.getItemId() : "Bulk items were returned out of order"; + + String failureStoreReference = getRedirectTarget(bulkItemRequest.request(), getClusterState().metadata()); + if (failureStoreReference != null) { + addDocumentToRedirectRequests(bulkItemRequest, bulkItemResponse.getFailure().getCause(), failureStoreReference); + } + addFailure(bulkItemResponse); + } else { bulkItemResponse.getResponse().setShardInfo(bulkShardResponse.getShardInfo()); + responses.set(bulkItemResponse.getItemId(), bulkItemResponse); } - responses.set(bulkItemResponse.getItemId(), bulkItemResponse); } - releaseOnFinish.close(); + completeShardOperation(); } @Override @@ -243,33 +385,135 @@ public void onFailure(Exception e) { for (BulkItemRequest request : bulkShardRequest.items()) { final String indexName = request.index(); DocWriteRequest docWriteRequest = request.request(); - BulkItemResponse.Failure failure = new BulkItemResponse.Failure(indexName, docWriteRequest.id(), e); - responses.set(request.id(), BulkItemResponse.failure(request.id(), docWriteRequest.opType(), failure)); + + String failureStoreReference = getRedirectTarget(docWriteRequest, getClusterState().metadata()); + if (failureStoreReference != null) { + addDocumentToRedirectRequests(request, e, failureStoreReference); + } + addFailure(docWriteRequest, request.id(), indexName, e); } + completeShardOperation(); + } + + private void completeShardOperation() { + // Clear our handle on the cluster state to allow it to be cleaned up + clusterState = null; releaseOnFinish.close(); } }); } - private boolean handleBlockExceptions(ClusterState state) { + /** + * Determines if the write request can be redirected if it fails. Write requests can be redirected IFF they are targeting a data stream + * with a failure store and are not already redirected themselves. If the document can be redirected, the data stream name to use for + * the redirection is returned. + * + * @param docWriteRequest the write request to check + * @param metadata cluster state metadata for resolving index abstractions + * @return a data stream name if the write request points to a data stream that has the failure store enabled, + * or {@code null} if it does + */ + private static String getRedirectTarget(DocWriteRequest docWriteRequest, Metadata metadata) { + // Feature flag guard + if (DataStream.isFailureStoreEnabled() == false) { + return null; + } + // Do not resolve a failure store for documents that were already headed to one + if (docWriteRequest instanceof IndexRequest indexRequest && indexRequest.isWriteToFailureStore()) { + return null; + } + // If there is no index abstraction, then the request is using a pattern of some sort, which data streams do not support + IndexAbstraction ia = metadata.getIndicesLookup().get(docWriteRequest.index()); + if (ia == null) { + return null; + } + if (ia.isDataStreamRelated()) { + // The index abstraction could be an alias. Alias abstractions (even for data streams) only keep track of which _index_ they + // will write to, not which _data stream_. + // We work backward to find the data stream from the concrete write index to cover this case. + Index concreteIndex = ia.getWriteIndex(); + IndexAbstraction writeIndexAbstraction = metadata.getIndicesLookup().get(concreteIndex.getName()); + DataStream parentDataStream = writeIndexAbstraction.getParentDataStream(); + if (parentDataStream != null && parentDataStream.isFailureStore()) { + // Keep the data stream name around to resolve the redirect to failure store if the shard level request fails. + return parentDataStream.getName(); + } + } + return null; + } + + /** + * Marks a failed bulk item for redirection. At the end of the first round of shard requests, any documents in the + * redirect list are processed to their final destinations. + * + * @param request The bulk item request that failed + * @param cause The exception for the experienced the failure + * @param failureStoreReference The data stream that contains the failure store for this item + */ + private void addDocumentToRedirectRequests(BulkItemRequest request, Exception cause, String failureStoreReference) { + // Convert the document into a failure document + IndexRequest failureStoreRequest; + try { + failureStoreRequest = failureStoreDocumentConverter.transformFailedRequest( + TransportBulkAction.getIndexWriteRequest(request.request()), + cause, + failureStoreReference, + threadPool::absoluteTimeInMillis + ); + } catch (IOException ioException) { + logger.debug( + () -> "Could not transform failed bulk request item into failure store document. Attempted for [" + + request.request().opType() + + ": index=" + + request.index() + + "; id=" + + request.request().id() + + "; bulk_slot=" + + request.id() + + "] Proceeding with failing the original.", + ioException + ); + // Suppress and do not redirect + cause.addSuppressed(ioException); + return; + } + + // Store for second phase + BulkItemRequest redirected = new BulkItemRequest(request.id(), failureStoreRequest); + failureStoreRedirects.add(redirected); + } + + /** + * Examine the cluster state for blocks before continuing. If any block exists in the cluster state, this function will return + * {@code true}. If the block is retryable, the {@code retryOperation} runnable will be called asynchronously if the cluster ever + * becomes unblocked. If a non retryable block exists, or if we encounter a timeout before the blocks could be cleared, the + * {@code onClusterBlocked} consumer will be invoked with the cluster block exception. + * + * @param state The current state to check for blocks + * @param retryOperation If retryable blocks exist, the runnable to execute after they have cleared. + * @param onClusterBlocked Consumes the block exception if the cluster has a non retryable block or if we encounter a timeout while + * waiting for a block to clear. + * @return {@code true} if the cluster is currently blocked at all, {@code false} if the cluster has no blocks. + */ + private boolean handleBlockExceptions(ClusterState state, Runnable retryOperation, Consumer onClusterBlocked) { ClusterBlockException blockException = state.blocks().globalBlockedException(ClusterBlockLevel.WRITE); if (blockException != null) { if (blockException.retryable()) { logger.trace("cluster is blocked, scheduling a retry", blockException); - retry(blockException); + retry(blockException, retryOperation, onClusterBlocked); } else { - onFailure(blockException); + onClusterBlocked.accept(blockException); } return true; } return false; } - void retry(Exception failure) { + void retry(Exception failure, final Runnable operation, final Consumer onClusterBlocked) { assert failure != null; if (observer.isTimedOut()) { - // we running as a last attempt after a timeout has happened. don't retry - onFailure(failure); + // we are running as a last attempt after a timeout has happened. don't retry + onClusterBlocked.accept(failure); return; } observer.waitForNextChange(new ClusterStateObserver.Listener() { @@ -286,6 +530,8 @@ public void onNewClusterState(ClusterState state) { @Override public void onClusterServiceClose() { + // There is very little we can do about this, and our time in this JVM is likely short. + // Let's just try to get out of here ASAP. onFailure(new NodeClosedException(clusterService.localNode())); } @@ -301,7 +547,7 @@ public void onTimeout(TimeValue timeout) { } private void dispatchRetry() { - threadPool.executor(executorName).submit(BulkOperation.this); + threadPool.executor(executorName).submit(operation); } }); } @@ -312,7 +558,7 @@ private boolean addFailureIfRequiresAliasAndAliasIsMissing(DocWriteRequest re "[" + DocWriteRequest.REQUIRE_ALIAS + "] request flag is [true] and [" + request.index() + "] is not an alias", request.index() ); - addFailure(request, idx, exception); + addFailureAndDiscardRequest(request, idx, request.index(), exception); return true; } return false; @@ -324,7 +570,7 @@ private boolean addFailureIfRequiresDataStreamAndNoParentDataStream(DocWriteRequ "[" + DocWriteRequest.REQUIRE_DATA_STREAM + "] request flag is [true] and [" + request.index() + "] is not a data stream", request.index() ); - addFailure(request, idx, exception); + addFailureAndDiscardRequest(request, idx, request.index(), exception); return true; } return false; @@ -333,7 +579,7 @@ private boolean addFailureIfRequiresDataStreamAndNoParentDataStream(DocWriteRequ private boolean addFailureIfIndexIsClosed(DocWriteRequest request, Index concreteIndex, int idx, final Metadata metadata) { IndexMetadata indexMetadata = metadata.getIndexSafe(concreteIndex); if (indexMetadata.getState() == IndexMetadata.State.CLOSE) { - addFailure(request, idx, new IndexClosedException(concreteIndex)); + addFailureAndDiscardRequest(request, idx, request.index(), new IndexClosedException(concreteIndex)); return true; } return false; @@ -342,20 +588,73 @@ private boolean addFailureIfIndexIsClosed(DocWriteRequest request, Index conc private boolean addFailureIfIndexCannotBeCreated(DocWriteRequest request, int idx) { IndexNotFoundException cannotCreate = indicesThatCannotBeCreated.get(request.index()); if (cannotCreate != null) { - addFailure(request, idx, cannotCreate); + addFailureAndDiscardRequest(request, idx, request.index(), cannotCreate); return true; } return false; } - private void addFailure(DocWriteRequest request, int idx, Exception unavailableException) { - BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.id(), unavailableException); - BulkItemResponse bulkItemResponse = BulkItemResponse.failure(idx, request.opType(), failure); - responses.set(idx, bulkItemResponse); + /** + * Like {@link BulkOperation#addFailure(DocWriteRequest, int, String, Exception)} but this method will remove the corresponding entry + * from the working bulk request so that it never gets processed again during this operation. + */ + private void addFailureAndDiscardRequest(DocWriteRequest request, int idx, String index, Exception exception) { + addFailure(request, idx, index, exception); // make sure the request gets never processed again bulkRequest.requests.set(idx, null); } + /** + * Checks if a bulk item response exists for this entry. If none exists, a failure response is created and set in the response array. + * If a response exists already, the failure information provided to this call will be added to the existing failure as a suppressed + * exception. + * + * @param request The document write request that should be failed + * @param idx The slot of the bulk entry this request corresponds to + * @param index The resource that this entry was being written to when it failed + * @param exception The exception encountered for this entry + * @see BulkOperation#addFailure(BulkItemResponse) BulkOperation.addFailure if you have a bulk item response object already + */ + private void addFailure(DocWriteRequest request, int idx, String index, Exception exception) { + BulkItemResponse bulkItemResponse = responses.get(idx); + if (bulkItemResponse == null) { + BulkItemResponse.Failure failure = new BulkItemResponse.Failure(index, request.id(), exception); + bulkItemResponse = BulkItemResponse.failure(idx, request.opType(), failure); + } else { + // Response already recorded. We should only be here if the existing response is a failure and + // we are encountering a new failure while redirecting. + assert bulkItemResponse.isFailed() : "Attempting to overwrite successful bulk item result with a failure"; + bulkItemResponse.getFailure().getCause().addSuppressed(exception); + } + // Always replace the item in the responses for thread visibility of any mutations + responses.set(idx, bulkItemResponse); + } + + /** + * Checks if a bulk item response exists for this entry. If none exists, the failure is set in the response array. If a response exists + * already, the failure information provided to this call will be added to the existing failure as a suppressed exception. + * + * @param bulkItemResponse the item response to add to the overall result array + * @see BulkOperation#addFailure(DocWriteRequest, int, String, Exception) BulkOperation.addFailure which conditionally creates the + * failure response only when one does not exist already + */ + private void addFailure(BulkItemResponse bulkItemResponse) { + assert bulkItemResponse.isFailed() : "Attempting to add a successful bulk item response via the addFailure method"; + BulkItemResponse existingBulkItemResponse = responses.get(bulkItemResponse.getItemId()); + if (existingBulkItemResponse != null) { + // Response already recorded. We should only be here if the existing response is a failure and + // we are encountering a new failure while redirecting. + assert existingBulkItemResponse.isFailed() : "Attempting to overwrite successful bulk item result with a failure"; + existingBulkItemResponse.getFailure().getCause().addSuppressed(bulkItemResponse.getFailure().getCause()); + bulkItemResponse = existingBulkItemResponse; + } + // Always replace the item in the responses for thread visibility of any mutations + responses.set(bulkItemResponse.getItemId(), bulkItemResponse); + } + + /** + * Resolves and caches index and routing abstractions to more efficiently group write requests into shards. + */ private static class ConcreteIndices { private final ClusterState state; private final IndexNameExpressionResolver indexNameExpressionResolver; @@ -367,6 +666,13 @@ private static class ConcreteIndices { this.indexNameExpressionResolver = indexNameExpressionResolver; } + /** + * Resolves the index abstraction that the write request is targeting, potentially obtaining it from a cache. This instance isn't + * fully resolved, meaning that {@link IndexAbstraction#getWriteIndex()} should be invoked in order to get concrete write index. + * + * @param request a write request + * @return the index abstraction that the write request is targeting + */ IndexAbstraction resolveIfAbsent(DocWriteRequest request) { try { IndexAbstraction indexAbstraction = indexAbstractions.get(request.index()); @@ -384,6 +690,12 @@ IndexAbstraction resolveIfAbsent(DocWriteRequest request) { } } + /** + * Determines which routing strategy to use for a document being written to the provided index, potentially obtaining the result + * from a cache. + * @param index the index to determine routing strategy for + * @return an {@link IndexRouting} object to use for assigning a write request to a shard + */ IndexRouting routing(Index index) { IndexRouting routing = routings.get(index); if (routing == null) { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java index 5e630bf9cdef5..2112ad48bec62 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java @@ -53,6 +53,7 @@ final class BulkRequestModifier implements Iterator> { final SparseFixedBitSet failedSlots; final List itemResponses; final AtomicIntegerArray originalSlots; + final FailureStoreDocumentConverter failureStoreDocumentConverter; volatile int currentSlot = -1; @@ -61,6 +62,7 @@ final class BulkRequestModifier implements Iterator> { this.failedSlots = new SparseFixedBitSet(bulkRequest.requests().size()); this.itemResponses = new ArrayList<>(bulkRequest.requests().size()); this.originalSlots = new AtomicIntegerArray(bulkRequest.requests().size()); // oversize, but that's ok + this.failureStoreDocumentConverter = new FailureStoreDocumentConverter(); } @Override @@ -243,7 +245,7 @@ public void markItemForFailureStore(int slot, String targetIndexName, Exception ); } else { try { - IndexRequest errorDocument = FailureStoreDocument.transformFailedRequest(indexRequest, e, targetIndexName); + IndexRequest errorDocument = failureStoreDocumentConverter.transformFailedRequest(indexRequest, e, targetIndexName); // This is a fresh index request! We need to do some preprocessing on it. If we do not, when this is returned to // the bulk action, the action will see that it hasn't been processed by ingest yet and attempt to ingest it again. errorDocument.isPipelineResolved(true); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocument.java b/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java similarity index 94% rename from server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocument.java rename to server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java index e0d6e8200e86d..ce76f377ac94e 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocument.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java @@ -22,9 +22,7 @@ /** * Transforms an indexing request using error information into a new index request to be stored in a data stream's failure store. */ -public final class FailureStoreDocument { - - private FailureStoreDocument() {} +public class FailureStoreDocumentConverter { /** * Combines an {@link IndexRequest} that has failed during the bulk process with the error thrown for that request. The result is a @@ -35,7 +33,7 @@ private FailureStoreDocument() {} * @return A new {@link IndexRequest} with a failure store compliant structure * @throws IOException If there is a problem when the document's new source is serialized */ - public static IndexRequest transformFailedRequest(IndexRequest source, Exception exception, String targetIndexName) throws IOException { + public IndexRequest transformFailedRequest(IndexRequest source, Exception exception, String targetIndexName) throws IOException { return transformFailedRequest(source, exception, targetIndexName, System::currentTimeMillis); } @@ -49,7 +47,7 @@ public static IndexRequest transformFailedRequest(IndexRequest source, Exception * @return A new {@link IndexRequest} with a failure store compliant structure * @throws IOException If there is a problem when the document's new source is serialized */ - public static IndexRequest transformFailedRequest( + public IndexRequest transformFailedRequest( IndexRequest source, Exception exception, String targetIndexName, diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsActionUtil.java b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsActionUtil.java similarity index 58% rename from modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsActionUtil.java rename to server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsActionUtil.java index a05de46d0e12a..e9ff74459c153 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsActionUtil.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsActionUtil.java @@ -6,13 +6,18 @@ * Side Public License, v 1. */ -package org.elasticsearch.datastreams.action; +package org.elasticsearch.action.datastreams; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.index.Index; import java.util.List; +import java.util.SortedMap; +import java.util.stream.Stream; public class DataStreamsActionUtil { @@ -40,4 +45,26 @@ public static IndicesOptions updateIndicesOptions(IndicesOptions indicesOptions) } return indicesOptions; } + + public static Stream resolveConcreteIndexNames( + IndexNameExpressionResolver indexNameExpressionResolver, + ClusterState clusterState, + String[] names, + IndicesOptions indicesOptions + ) { + List abstractionNames = getDataStreamNames(indexNameExpressionResolver, clusterState, names, indicesOptions); + SortedMap indicesLookup = clusterState.getMetadata().getIndicesLookup(); + + return abstractionNames.stream().flatMap(abstractionName -> { + IndexAbstraction indexAbstraction = indicesLookup.get(abstractionName); + assert indexAbstraction != null; + if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM) { + DataStream dataStream = (DataStream) indexAbstraction; + List indices = dataStream.getIndices(); + return indices.stream().map(Index::getName); + } else { + return Stream.empty(); + } + }); + } } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 8c469f7dffc4d..36f2ff4fffa96 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -19,6 +19,8 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamAutoShardingEvent; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -276,14 +278,19 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } /** - * Converts the response to XContent and passes the RolloverConditions, when provided, to the data stream. + * Converts the response to XContent and passes the RolloverConditions and the global retention, when provided, + * to the data stream. */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); builder.field(DataStream.NAME_FIELD.getPreferredName(), dataStream.getName()); builder.field(DataStream.TIMESTAMP_FIELD_FIELD.getPreferredName()) @@ -339,7 +346,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla } if (dataStream.getLifecycle() != null) { builder.field(LIFECYCLE_FIELD.getPreferredName()); - dataStream.getLifecycle().toXContent(builder, params, rolloverConfiguration); + dataStream.getLifecycle().toXContent(builder, params, rolloverConfiguration, globalRetention); } if (ilmPolicyName != null) { builder.field(ILM_POLICY_FIELD.getPreferredName(), ilmPolicyName); @@ -483,20 +490,30 @@ public void writeTo(StreamOutput out) throws IOException { private final List dataStreams; @Nullable private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; public Response(List dataStreams) { - this(dataStreams, null); + this(dataStreams, null, null); } - public Response(List dataStreams, @Nullable RolloverConfiguration rolloverConfiguration) { + public Response( + List dataStreams, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) { this.dataStreams = dataStreams; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public Response(StreamInput in) throws IOException { this( in.readCollectionAsList(DataStreamInfo::new), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(RolloverConfiguration::new) : null + in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(RolloverConfiguration::new) : null, + in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION) + ? in.readOptionalWriteable(DataStreamGlobalRetention::read) + : null ); } @@ -509,20 +526,29 @@ public RolloverConfiguration getRolloverConfiguration() { return rolloverConfiguration; } + @Nullable + public DataStreamGlobalRetention getGlobalRetention() { + return globalRetention; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(dataStreams); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); builder.startArray(DATA_STREAMS_FIELD.getPreferredName()); for (DataStreamInfo dataStream : dataStreams) { - dataStream.toXContent(builder, params, rolloverConfiguration); + dataStream.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); } builder.endArray(); builder.endObject(); @@ -534,12 +560,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Response response = (Response) o; - return dataStreams.equals(response.dataStreams) && Objects.equals(rolloverConfiguration, response.rolloverConfiguration); + return dataStreams.equals(response.dataStreams) + && Objects.equals(rolloverConfiguration, response.rolloverConfiguration) + && Objects.equals(globalRetention, response.globalRetention); } @Override public int hashCode() { - return Objects.hash(dataStreams, rolloverConfiguration); + return Objects.hash(dataStreams, rolloverConfiguration, globalRetention); } } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java index 06aec69bc97da..a045c73cc83a1 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java @@ -29,7 +29,6 @@ import java.util.List; import java.util.Objects; import java.util.OptionalDouble; -import java.util.OptionalLong; import java.util.function.Function; import java.util.function.LongSupplier; @@ -381,27 +380,11 @@ static double getMaxIndexLoadWithinCoolingPeriod( // assume the current write index load is the highest observed and look back to find the actual maximum double maxIndexLoadWithinCoolingPeriod = writeIndexLoad; for (IndexWriteLoad writeLoad : writeLoadsWithinCoolingPeriod) { - // the IndexWriteLoad stores _for each shard_ a shard average write load ( calculated using : shard indexing time / shard - // uptime ) and its corresponding shard uptime - // - // to reconstruct the average _index_ write load we recalculate the shard indexing time by multiplying the shard write load - // to its uptime, and then, having the indexing time and uptime for each shard we calculate the average _index_ write load using - // (indexingTime_shard0 + indexingTime_shard1) / (uptime_shard0 + uptime_shard1) - // as {@link org.elasticsearch.index.shard.IndexingStats#add} does - double totalShardIndexingTime = 0; - long totalShardUptime = 0; + double totalIndexLoad = 0; for (int shardId = 0; shardId < writeLoad.numberOfShards(); shardId++) { final OptionalDouble writeLoadForShard = writeLoad.getWriteLoadForShard(shardId); - final OptionalLong uptimeInMillisForShard = writeLoad.getUptimeInMillisForShard(shardId); - if (writeLoadForShard.isPresent()) { - assert uptimeInMillisForShard.isPresent(); - double shardIndexingTime = writeLoadForShard.getAsDouble() * uptimeInMillisForShard.getAsLong(); - long shardUptimeInMillis = uptimeInMillisForShard.getAsLong(); - totalShardIndexingTime += shardIndexingTime; - totalShardUptime += shardUptimeInMillis; - } + totalIndexLoad += writeLoadForShard.orElse(0); } - double totalIndexLoad = totalShardUptime == 0 ? 0.0 : (totalShardIndexingTime / totalShardUptime); if (totalIndexLoad > maxIndexLoadWithinCoolingPeriod) { maxIndexLoadWithinCoolingPeriod = totalIndexLoad; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java index 640d8a9efe8ac..2b79377fb71e0 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.datastreams.lifecycle; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -98,11 +99,15 @@ public ExplainIndexDataStreamLifecycle(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); builder.field(INDEX_FIELD.getPreferredName(), index); builder.field(MANAGED_BY_LIFECYCLE_FIELD.getPreferredName(), managedByLifecycle); @@ -127,7 +132,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla } if (this.lifecycle != null) { builder.field(LIFECYCLE_FIELD.getPreferredName()); - lifecycle.toXContent(builder, params, rolloverConfiguration); + Params withEffectiveRetentionParams = new DelegatingMapParams( + DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, + params + ); + lifecycle.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); } if (this.error != null) { if (error.firstOccurrenceTimestamp() != -1L && error.recordedTimestamp() != -1L && error.retryCount() != -1) { diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java index 9fd9c5fcd791f..47202998d3193 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java @@ -12,12 +12,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * The response of the delete action. * @@ -64,23 +61,6 @@ public String toString() { return builder.append("]").toString(); } - public static DeleteResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - - Builder context = new Builder(); - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - parseXContentFields(parser, context); - } - return context.build(); - } - - /** - * Parse the current token and update the parsing context appropriately. - */ - public static void parseXContentFields(XContentParser parser, Builder context) throws IOException { - DocWriteResponse.parseInnerToXContent(parser, context); - } - /** * Builder class for {@link DeleteResponse}. This builder is usually used during xcontent parsing to * temporarily store the parsed values, then the {@link DocWriteResponse.Builder#build()} method is called to diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java index 58c50df47c3ce..9d8ba5f126fd5 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java @@ -17,14 +17,11 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Collection; import java.util.Objects; import static org.elasticsearch.common.lucene.Lucene.readExplanation; @@ -35,14 +32,14 @@ */ public class ExplainResponse extends ActionResponse implements ToXContentObject { - private static final ParseField _INDEX = new ParseField("_index"); - private static final ParseField _ID = new ParseField("_id"); + static final ParseField _INDEX = new ParseField("_index"); + static final ParseField _ID = new ParseField("_id"); private static final ParseField MATCHED = new ParseField("matched"); - private static final ParseField EXPLANATION = new ParseField("explanation"); - private static final ParseField VALUE = new ParseField("value"); - private static final ParseField DESCRIPTION = new ParseField("description"); - private static final ParseField DETAILS = new ParseField("details"); - private static final ParseField GET = new ParseField("get"); + static final ParseField EXPLANATION = new ParseField("explanation"); + static final ParseField VALUE = new ParseField("value"); + static final ParseField DESCRIPTION = new ParseField("description"); + static final ParseField DETAILS = new ParseField("details"); + static final ParseField GET = new ParseField("get"); private final String index; private final String id; @@ -136,43 +133,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "explain", - true, - (arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], exists, (Explanation) arg[2], (GetResult) arg[3]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), _INDEX); - PARSER.declareString(ConstructingObjectParser.constructorArg(), _ID); - final ConstructingObjectParser explanationParser = getExplanationsParser(); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), explanationParser, EXPLANATION); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> GetResult.fromXContentEmbedded(p), GET); - } - - @SuppressWarnings("unchecked") - private static ConstructingObjectParser getExplanationsParser() { - final ConstructingObjectParser explanationParser = new ConstructingObjectParser<>( - "explanation", - true, - arg -> { - if ((float) arg[0] > 0) { - return Explanation.match((float) arg[0], (String) arg[1], (Collection) arg[2]); - } else { - return Explanation.noMatch((String) arg[1], (Collection) arg[2]); - } - } - ); - explanationParser.declareFloat(ConstructingObjectParser.constructorArg(), VALUE); - explanationParser.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); - explanationParser.declareObjectArray(ConstructingObjectParser.constructorArg(), explanationParser, DETAILS); - return explanationParser; - } - - public static ExplainResponse fromXContent(XContentParser parser, boolean exists) { - return PARSER.apply(parser, exists); - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java index 6028a6e21ecff..51cb05f981177 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; @@ -123,14 +124,15 @@ private FieldCapabilitiesIndexResponse doFetch( final String shardUuid = indexService.getShard(shardId.getId()).getShardUuid(); indexMappingHash = mapping == null ? shardUuid : shardUuid + mapping.getSha256(); } + FieldPredicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); if (indexMappingHash != null) { + indexMappingHash = fieldPredicate.modifyHash(indexMappingHash); final Map existing = indexMappingHashToResponses.get(indexMappingHash); if (existing != null) { return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), indexMappingHash, existing, true); } } task.ensureNotCancelled(); - Predicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); final Map responseMap = retrieveFieldCaps( searchExecutionContext, fieldNameFilter, @@ -151,7 +153,7 @@ static Map retrieveFieldCaps( Predicate fieldNameFilter, String[] filters, String[] types, - Predicate indexFieldfilter, + FieldPredicate fieldPredicate, IndexShard indexShard, boolean includeEmptyFields ) { @@ -169,7 +171,7 @@ static Map retrieveFieldCaps( } MappedFieldType ft = entry.getValue(); if ((includeEmptyFields || ft.fieldHasValue(fieldInfos)) - && (indexFieldfilter.test(ft.name()) || context.isMetadataField(ft.name())) + && (fieldPredicate.test(ft.name()) || context.isMetadataField(ft.name())) && (filter == null || filter.test(ft))) { IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( field, diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index a8d6220415a43..d142db2d5a1ab 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -329,7 +329,7 @@ public IndexRequest id(String id) { */ @Override public IndexRequest routing(String routing) { - if (routing != null && routing.length() == 0) { + if (routing != null && routing.isEmpty()) { this.routing = null; } else { this.routing = routing; diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java b/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java index e3598c33d5951..c547eb7ba8f30 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java @@ -17,13 +17,10 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.List; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * A response of an index operation, * @@ -134,23 +131,6 @@ public String toString() { return builder.append("]").toString(); } - public static IndexResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - - Builder context = new Builder(); - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - parseXContentFields(parser, context); - } - return context.build(); - } - - /** - * Parse the current token and update the parsing context appropriately. - */ - public static void parseXContentFields(XContentParser parser, Builder context) throws IOException { - DocWriteResponse.parseInnerToXContent(parser, context); - } - /** * Builder class for {@link IndexResponse}. This builder is usually used during xcontent parsing to * temporarily store the parsed values, then the {@link Builder#build()} method is called to diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java index 865b7bdf8abfa..bc50a9f8f0c2c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java @@ -18,9 +18,12 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.Scroll; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.builder.SubSearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; @@ -31,6 +34,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.Map; @@ -129,7 +133,7 @@ public CanMatchNodeRequest( long nowInMillis, @Nullable String clusterAlias ) { - this.source = searchRequest.source(); + this.source = getCanMatchSource(searchRequest); this.indicesOptions = indicesOptions; this.shards = new ArrayList<>(shards); this.searchType = searchRequest.searchType(); @@ -146,6 +150,36 @@ public CanMatchNodeRequest( indices = shards.stream().map(Shard::getOriginalIndices).flatMap(Arrays::stream).distinct().toArray(String[]::new); } + private static void collectAggregationQueries(Collection aggregations, List aggregationQueries) { + for (AggregationBuilder aggregation : aggregations) { + QueryBuilder aggregationQuery = aggregation.getQuery(); + if (aggregationQuery != null) { + aggregationQueries.add(aggregationQuery); + } + collectAggregationQueries(aggregation.getSubAggregations(), aggregationQueries); + } + } + + private SearchSourceBuilder getCanMatchSource(SearchRequest searchRequest) { + // Aggregations may use a different query than the top-level search query. An example is + // the significant terms aggregation, which also collects data over a background that + // typically much larger than the search query. To accommodate for this, we take the union + // of all queries to determine whether a request can match. + List aggregationQueries = new ArrayList<>(); + if (searchRequest.source() != null && searchRequest.source().aggregations() != null) { + collectAggregationQueries(searchRequest.source().aggregations().getAggregatorFactories(), aggregationQueries); + } + if (aggregationQueries.isEmpty()) { + return searchRequest.source(); + } else { + List subSearches = new ArrayList<>(searchRequest.source().subSearches()); + for (QueryBuilder aggregationQuery : aggregationQueries) { + subSearches.add(new SubSearchSourceBuilder(aggregationQuery)); + } + return searchRequest.source().shallowCopy().subSearches(subSearches); + } + } + public CanMatchNodeRequest(StreamInput in) throws IOException { super(in); source = in.readOptionalWriteable(SearchSourceBuilder::new); diff --git a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java index 65739b01422a5..adebcfb4e12cb 100644 --- a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java @@ -159,7 +159,12 @@ private SubscribableListener(Object initialState) { * (or after) the completion of this listener. *

* If the subscribed listener is not completed immediately then it will be completed on the thread, and in the {@link ThreadContext}, of - * the thread which completes this listener. + * the thread which completes this listener. In other words, if you want to ensure that {@code listener} is completed using a particular + * executor, then you must do both of: + *

    + *
  • Ensure that this {@link SubscribableListener} is always completed using that executor, and
  • + *
  • Invoke {@link #addListener} using that executor.
  • + *
*/ public final void addListener(ActionListener listener) { addListener(listener, EsExecutors.DIRECT_EXECUTOR_SERVICE, null); @@ -179,6 +184,13 @@ public final void addListener(ActionListener listener) { * @param executor If not {@link EsExecutors#DIRECT_EXECUTOR_SERVICE}, and the subscribing listener is not completed immediately, * then it will be completed using the given executor. If the subscribing listener is completed immediately then * this completion happens on the subscribing thread. + *

+ * In other words, if you want to ensure that {@code listener} is completed using a particular executor, then you + * must do both of: + *

    + *
  • Pass the desired executor in as {@code executor}, and
  • + *
  • Invoke {@link #addListener} using that executor.
  • + *
* @param threadContext If not {@code null}, and the subscribing listener is not completed immediately, then it will be completed in * the given thread context. If {@code null}, and the subscribing listener is not completed immediately, then it * will be completed in the {@link ThreadContext} of the completing thread. If the subscribing listener is @@ -405,7 +417,13 @@ public void complete(ActionListener listener) { *

* The threading of the {@code nextStep} callback is the same as for listeners added with {@link #addListener}: if this listener is * already complete then {@code nextStep} is invoked on the thread calling {@link #andThen} and in its thread context, but if this - * listener is incomplete then {@code nextStep} is invoked on the completing thread and in its thread context. + * listener is incomplete then {@code nextStep} is invoked on the completing thread and in its thread context. In other words, if you + * want to ensure that {@code nextStep} is invoked using a particular executor, then you must do + * both of: + *

    + *
  • Ensure that this {@link SubscribableListener} is always completed using that executor, and
  • + *
  • Invoke {@link #andThen} using that executor.
  • + *
*/ public SubscribableListener andThen(CheckedBiConsumer, T, ? extends Exception> nextStep) { return andThen(EsExecutors.DIRECT_EXECUTOR_SERVICE, null, nextStep); @@ -427,7 +445,12 @@ public SubscribableListener andThen(CheckedBiConsumer, * The threading of the {@code nextStep} callback is the same as for listeners added with {@link #addListener}: if this listener is * already complete then {@code nextStep} is invoked on the thread calling {@link #andThen} and in its thread context, but if this * listener is incomplete then {@code nextStep} is invoked using {@code executor}, in a thread context captured when {@link #andThen} - * was called. + * was called. In other words, if you want to ensure that {@code nextStep} is invoked using a particular executor, then you must do + * both of: + *
    + *
  • Pass the desired executor in as {@code executor}, and
  • + *
  • Invoke {@link #andThen} using that executor.
  • + *
*/ public SubscribableListener andThen( Executor executor, diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java index b69b87190f2a7..3a27d6ac58534 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java @@ -13,15 +13,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import java.io.IOException; import java.util.List; import static org.elasticsearch.action.support.DefaultShardOperationFailedException.readShardOperationFailed; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Base class for all broadcast operation based responses. @@ -30,35 +26,11 @@ public class BaseBroadcastResponse extends ActionResponse { public static final DefaultShardOperationFailedException[] EMPTY = new DefaultShardOperationFailedException[0]; - private static final ParseField _SHARDS_FIELD = new ParseField("_shards"); - private static final ParseField TOTAL_FIELD = new ParseField("total"); - private static final ParseField SUCCESSFUL_FIELD = new ParseField("successful"); - private static final ParseField FAILED_FIELD = new ParseField("failed"); - private static final ParseField FAILURES_FIELD = new ParseField("failures"); - private final int totalShards; private final int successfulShards; private final int failedShards; private final DefaultShardOperationFailedException[] shardFailures; - @SuppressWarnings("unchecked") - public static void declareBroadcastFields(ConstructingObjectParser PARSER) { - ConstructingObjectParser shardsParser = new ConstructingObjectParser<>( - "_shards", - true, - arg -> new BaseBroadcastResponse((int) arg[0], (int) arg[1], (int) arg[2], (List) arg[3]) - ); - shardsParser.declareInt(constructorArg(), TOTAL_FIELD); - shardsParser.declareInt(constructorArg(), SUCCESSFUL_FIELD); - shardsParser.declareInt(constructorArg(), FAILED_FIELD); - shardsParser.declareObjectArray( - optionalConstructorArg(), - (p, c) -> DefaultShardOperationFailedException.fromXContent(p), - FAILURES_FIELD - ); - PARSER.declareObject(constructorArg(), shardsParser, _SHARDS_FIELD); - } - public BaseBroadcastResponse(StreamInput in) throws IOException { totalShards = in.readVInt(); successfulShards = in.readVInt(); diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java index 107bab447e03e..892db8d4a6d04 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java @@ -40,7 +40,7 @@ public class AcknowledgedResponse extends ActionResponse implements IsAcknowledg public static final String ACKNOWLEDGED_KEY = "acknowledged"; private static final ParseField ACKNOWLEDGED = new ParseField(ACKNOWLEDGED_KEY); - protected static void declareAcknowledgedField(ConstructingObjectParser objectParser) { + public static void declareAcknowledgedField(ConstructingObjectParser objectParser) { objectParser.declareField( constructorArg(), (parser, context) -> parser.booleanValue(), diff --git a/server/src/main/java/org/elasticsearch/action/support/master/ShardsAcknowledgedResponse.java b/server/src/main/java/org/elasticsearch/action/support/master/ShardsAcknowledgedResponse.java index f897d8644d4bb..a00495605dbb5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/ShardsAcknowledgedResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/ShardsAcknowledgedResponse.java @@ -24,7 +24,7 @@ public class ShardsAcknowledgedResponse extends AcknowledgedResponse { protected static final ParseField SHARDS_ACKNOWLEDGED = new ParseField("shards_acknowledged"); - protected static void declareAcknowledgedAndShardsAcknowledgedFields( + public static void declareAcknowledgedAndShardsAcknowledgedFields( ConstructingObjectParser objectParser ) { declareAcknowledgedField(objectParser); diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java index fe6db0c0c4a4b..3e8290ad4fb4a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java @@ -32,8 +32,8 @@ * Base class for responses of task-related operations */ public class BaseTasksResponse extends ActionResponse { - protected static final String TASK_FAILURES = "task_failures"; - protected static final String NODE_FAILURES = "node_failures"; + public static final String TASK_FAILURES = "task_failures"; + public static final String NODE_FAILURES = "node_failures"; private List taskFailures; private List nodeFailures; diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java index c6454dd6cedd8..8c9ae3f43d5c4 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java @@ -15,15 +15,12 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - public class UpdateResponse extends DocWriteResponse { - private static final String GET = "get"; + static final String GET = "get"; private GetResult getResult; @@ -114,32 +111,6 @@ public String toString() { return builder.append("]").toString(); } - public static UpdateResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - - Builder context = new Builder(); - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - parseXContentFields(parser, context); - } - return context.build(); - } - - /** - * Parse the current token and update the parsing context appropriately. - */ - public static void parseXContentFields(XContentParser parser, Builder context) throws IOException { - XContentParser.Token token = parser.currentToken(); - String currentFieldName = parser.currentName(); - - if (GET.equals(currentFieldName)) { - if (token == XContentParser.Token.START_OBJECT) { - context.setGetResult(GetResult.fromXContentEmbedded(parser)); - } - } else { - DocWriteResponse.parseInnerToXContent(parser, context); - } - } - /** * Builder class for {@link UpdateResponse}. This builder is usually used during xcontent parsing to * temporarily store the parsed values, then the {@link DocWriteResponse.Builder#build()} method is called to diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index d1092b0390dff..fc7eaa97c677b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -504,6 +504,13 @@ private void closePrevotingRound() { } } + /** + * Updates {@link #maxTermSeen} if greater. + * + * Every time a new term is found, either from another node requesting election, or this node trying to run for election, always update + * the max term number. The max term may not reflect an actual election, but rather an election attempt by some node in the + * cluster. + */ private void updateMaxTermSeen(final long term) { synchronized (mutex) { maxTermSeen = Math.max(maxTermSeen, term); @@ -549,6 +556,13 @@ private void startElection() { } } + /** + * Broadcasts a request to all 'discoveredNodes' in the cluster to elect 'candidateMasterNode' as the new master. + * + * @param candidateMasterNode the node running for election + * @param term the new proposed master term + * @param discoveredNodes all the nodes to which to send the request + */ private void broadcastStartJoinRequest(DiscoveryNode candidateMasterNode, long term, List discoveredNodes) { electionStrategy.onNewElection(candidateMasterNode, term, new ActionListener<>() { @Override @@ -670,6 +684,9 @@ public void onFailure(Exception e) { }); } + /** + * Validates a request to join the new cluster. Runs on the candidate node running for election to master. + */ private void validateJoinRequest(JoinRequest joinRequest, ActionListener validateListener) { // Before letting the node join the cluster, ensure: @@ -753,6 +770,9 @@ private void sendJoinPing(DiscoveryNode discoveryNode, TransportRequestOptions.T ); } + /** + * Processes the request to join the cluster. Received by the node running for election to master. + */ private void processJoinRequest(JoinRequest joinRequest, ActionListener joinListener) { assert Transports.assertNotTransportThread("blocking on coordinator mutex and maybe doing IO to increase term"); final Optional optionalJoin = joinRequest.getOptionalJoin(); diff --git a/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java b/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java index 887a0d7d5a751..ad957f7a8f37f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java +++ b/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java @@ -15,94 +15,25 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.HashMap; -import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; -import static java.util.Collections.emptyMap; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - public final class ClusterIndexHealth implements Writeable, ToXContentFragment { - private static final String STATUS = "status"; - private static final String NUMBER_OF_SHARDS = "number_of_shards"; - private static final String NUMBER_OF_REPLICAS = "number_of_replicas"; - private static final String ACTIVE_PRIMARY_SHARDS = "active_primary_shards"; - private static final String ACTIVE_SHARDS = "active_shards"; - private static final String RELOCATING_SHARDS = "relocating_shards"; - private static final String INITIALIZING_SHARDS = "initializing_shards"; - private static final String UNASSIGNED_SHARDS = "unassigned_shards"; - private static final String SHARDS = "shards"; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "cluster_index_health", - true, - (parsedObjects, index) -> { - int i = 0; - int numberOfShards = (int) parsedObjects[i++]; - int numberOfReplicas = (int) parsedObjects[i++]; - int activeShards = (int) parsedObjects[i++]; - int relocatingShards = (int) parsedObjects[i++]; - int initializingShards = (int) parsedObjects[i++]; - int unassignedShards = (int) parsedObjects[i++]; - int activePrimaryShards = (int) parsedObjects[i++]; - String statusStr = (String) parsedObjects[i++]; - ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); - @SuppressWarnings("unchecked") - List shardList = (List) parsedObjects[i]; - final Map shards; - if (shardList == null || shardList.isEmpty()) { - shards = emptyMap(); - } else { - shards = Maps.newMapWithExpectedSize(shardList.size()); - for (ClusterShardHealth shardHealth : shardList) { - shards.put(shardHealth.getShardId(), shardHealth); - } - } - return new ClusterIndexHealth( - index, - numberOfShards, - numberOfReplicas, - activeShards, - relocatingShards, - initializingShards, - unassignedShards, - activePrimaryShards, - status, - shards - ); - } - ); - - public static final ObjectParser.NamedObjectParser SHARD_PARSER = ( - XContentParser p, - String indexIgnored, - String shardId) -> ClusterShardHealth.innerFromXContent(p, Integer.valueOf(shardId)); - - static { - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_REPLICAS)); - PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(RELOCATING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(INITIALIZING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(UNASSIGNED_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_PRIMARY_SHARDS)); - PARSER.declareString(constructorArg(), new ParseField(STATUS)); - // Can be absent if LEVEL == 'indices' or 'cluster' - PARSER.declareNamedObjects(optionalConstructorArg(), SHARD_PARSER, new ParseField(SHARDS)); - } + static final String STATUS = "status"; + static final String NUMBER_OF_SHARDS = "number_of_shards"; + static final String NUMBER_OF_REPLICAS = "number_of_replicas"; + static final String ACTIVE_PRIMARY_SHARDS = "active_primary_shards"; + static final String ACTIVE_SHARDS = "active_shards"; + static final String RELOCATING_SHARDS = "relocating_shards"; + static final String INITIALIZING_SHARDS = "initializing_shards"; + static final String UNASSIGNED_SHARDS = "unassigned_shards"; + static final String SHARDS = "shards"; private final String index; private final int numberOfShards; @@ -280,20 +211,6 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa return builder; } - public static ClusterIndexHealth innerFromXContent(XContentParser parser, String index) { - return PARSER.apply(parser, index); - } - - public static ClusterIndexHealth fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - String index = parser.currentName(); - ClusterIndexHealth parsed = innerFromXContent(parser, index); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return parsed; - } - @Override public String toString() { return "ClusterIndexHealth{" diff --git a/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java b/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java index b3aa4275f7be7..785b0db5cc807 100644 --- a/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java +++ b/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java @@ -17,59 +17,20 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Locale; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - public final class ClusterShardHealth implements Writeable, ToXContentFragment { - private static final String STATUS = "status"; - private static final String ACTIVE_SHARDS = "active_shards"; - private static final String RELOCATING_SHARDS = "relocating_shards"; - private static final String INITIALIZING_SHARDS = "initializing_shards"; - private static final String UNASSIGNED_SHARDS = "unassigned_shards"; - private static final String PRIMARY_ACTIVE = "primary_active"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "cluster_shard_health", - true, - (parsedObjects, shardId) -> { - int i = 0; - boolean primaryActive = (boolean) parsedObjects[i++]; - int activeShards = (int) parsedObjects[i++]; - int relocatingShards = (int) parsedObjects[i++]; - int initializingShards = (int) parsedObjects[i++]; - int unassignedShards = (int) parsedObjects[i++]; - String statusStr = (String) parsedObjects[i]; - ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); - return new ClusterShardHealth( - shardId, - status, - activeShards, - relocatingShards, - initializingShards, - unassignedShards, - primaryActive - ); - } - ); - - static { - PARSER.declareBoolean(constructorArg(), new ParseField(PRIMARY_ACTIVE)); - PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(RELOCATING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(INITIALIZING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(UNASSIGNED_SHARDS)); - PARSER.declareString(constructorArg(), new ParseField(STATUS)); - } + static final String STATUS = "status"; + static final String ACTIVE_SHARDS = "active_shards"; + static final String RELOCATING_SHARDS = "relocating_shards"; + static final String INITIALIZING_SHARDS = "initializing_shards"; + static final String UNASSIGNED_SHARDS = "unassigned_shards"; + static final String PRIMARY_ACTIVE = "primary_active"; private final int shardId; private final ClusterHealthStatus status; @@ -230,20 +191,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - static ClusterShardHealth innerFromXContent(XContentParser parser, Integer shardId) { - return PARSER.apply(parser, shardId); - } - - public static ClusterShardHealth fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - String shardIdStr = parser.currentName(); - ClusterShardHealth parsed = innerFromXContent(parser, Integer.valueOf(shardIdStr)); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return parsed; - } - @Override public String toString() { return Strings.toString(this); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java index d3d758e110ff3..a11ec64dc6f2c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java @@ -163,17 +163,21 @@ public String toString() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } /** * Converts the component template to XContent and passes the RolloverConditions, when provided, to the template. */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); builder.field(TEMPLATE.getPreferredName()); - this.template.toXContent(builder, params, rolloverConfiguration); + this.template.toXContent(builder, params, rolloverConfiguration, globalRetention); if (this.version != null) { builder.field(VERSION.getPreferredName(), this.version); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index 7702ec0ac0b5c..8e8e6fff4cc6a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -259,19 +259,23 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } /** * Converts the composable index template to XContent and passes the RolloverConditions, when provided, to the template. */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); builder.stringListField(INDEX_PATTERNS.getPreferredName(), this.indexPatterns); if (this.template != null) { builder.field(TEMPLATE.getPreferredName()); - this.template.toXContent(builder, params, rolloverConfiguration); + this.template.toXContent(builder, params, rolloverConfiguration, globalRetention); } if (this.componentTemplates != null) { builder.stringListField(COMPOSED_OF.getPreferredName(), this.componentTemplates); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java index a8b094bafde2e..6db7b2cf670bc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java @@ -36,6 +36,7 @@ import java.io.IOException; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; @@ -53,6 +54,14 @@ public class DataStreamLifecycle implements SimpleDiffable, public static final TransportVersion ADDED_ENABLED_FLAG_VERSION = TransportVersions.V_8_10_X; public static final String DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME = "data_streams.lifecycle_only.mode"; + // The following XContent params are used to enrich the DataStreamLifecycle json with effective retention information + // This should be set only when the lifecycle is used in a response to the user and NEVER when we expect the json to + // be deserialized. + public static final String INCLUDE_EFFECTIVE_RETENTION_PARAM_NAME = "include_effective_retention"; + public static final Map INCLUDE_EFFECTIVE_RETENTION_PARAMS = Map.of( + DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAM_NAME, + "true" + ); /** * Check if {@link #DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME} is present and set to {@code true}, indicating that @@ -79,6 +88,8 @@ public static boolean isDataStreamsLifecycleOnlyMode(final Settings settings) { public static final ParseField ENABLED_FIELD = new ParseField("enabled"); public static final ParseField DATA_RETENTION_FIELD = new ParseField("data_retention"); + public static final ParseField EFFECTIVE_RETENTION_FIELD = new ParseField("effective_retention"); + public static final ParseField RETENTION_SOURCE_FIELD = new ParseField("retention_determined_by"); public static final ParseField DOWNSAMPLING_FIELD = new ParseField("downsampling"); private static final ParseField ROLLOVER_FIELD = new ParseField("rollover"); @@ -130,17 +141,6 @@ public boolean isEnabled() { return enabled; } - /** - * The least amount of time data should be kept by elasticsearch. - * @return the time period or null, null represents that data should never be deleted. - * @deprecated use {@link #getEffectiveDataRetention(DataStreamGlobalRetention)} - */ - @Deprecated - @Nullable - public TimeValue getEffectiveDataRetention() { - return getEffectiveDataRetention(null); - } - /** * The least amount of time data should be kept by elasticsearch. * @return the time period or null, null represents that data should never be deleted. @@ -275,17 +275,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } /** - * Converts the data stream lifecycle to XContent and injects the RolloverConditions if they exist. - * @deprecated use {@link #toXContent(XContentBuilder, Params, RolloverConfiguration, DataStreamGlobalRetention)} - */ - @Deprecated - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { - return toXContent(builder, params, rolloverConfiguration, null); - } - - /** - * Converts the data stream lifecycle to XContent and injects the RolloverConditions and the global retention if they exist. + * Converts the data stream lifecycle to XContent, enriches it with effective retention information when requested + * and injects the RolloverConditions if they exist. + * In order to request the effective retention you need to set {@link #INCLUDE_EFFECTIVE_RETENTION_PARAM_NAME} to true + * in the XContent params. */ public XContentBuilder toXContent( XContentBuilder builder, @@ -302,6 +295,14 @@ public XContentBuilder toXContent( builder.field(DATA_RETENTION_FIELD.getPreferredName(), dataRetention.value().getStringRep()); } } + if (params.paramAsBoolean(INCLUDE_EFFECTIVE_RETENTION_PARAM_NAME, false)) { + Tuple effectiveRetention = getEffectiveDataRetentionWithSource(globalRetention); + if (effectiveRetention.v1() != null) { + builder.field(EFFECTIVE_RETENTION_FIELD.getPreferredName(), effectiveRetention.v1().getStringRep()); + builder.field(RETENTION_SOURCE_FIELD.getPreferredName(), effectiveRetention.v2().displayName()); + } + } + if (downsampling != null) { builder.field(DOWNSAMPLING_FIELD.getPreferredName()); downsampling.toXContent(builder, params); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index b450251ff7e3f..f424861c5b7ff 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -51,6 +51,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.transport.Transports; @@ -921,7 +922,7 @@ private void findAliasInfo(final String[] aliases, final String[] possibleMatche */ public Map findMappings( String[] concreteIndices, - Function> fieldFilter, + Function> fieldFilter, Runnable onNextIndex ) { assert Transports.assertNotTransportThread("decompressing mappings is too expensive for a transport thread"); @@ -974,7 +975,7 @@ private static MappingMetadata filterFields(MappingMetadata mappingMetadata, Pre if (mappingMetadata == null) { return MappingMetadata.EMPTY_MAPPINGS; } - if (fieldPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { + if (fieldPredicate == FieldPredicate.ACCEPT_ALL) { return mappingMetadata; } Map sourceAsMap = XContentHelper.convertToMap(mappingMetadata.source().compressedReference(), true).v2(); @@ -997,7 +998,7 @@ private static MappingMetadata filterFields(MappingMetadata mappingMetadata, Pre @SuppressWarnings("unchecked") private static boolean filterFields(String currentPath, Map fields, Predicate fieldPredicate) { - assert fieldPredicate != MapperPlugin.NOOP_FIELD_PREDICATE; + assert fieldPredicate != FieldPredicate.ACCEPT_ALL; Iterator> entryIterator = fields.entrySet().iterator(); while (entryIterator.hasNext()) { Map.Entry entry = entryIterator.next(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java index 18a99f984707f..74627e27032b4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java @@ -213,14 +213,18 @@ public String toString() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } /** * Converts the template to XContent and passes the RolloverConditions, when provided, to the lifecycle. */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); if (this.settings != null) { builder.startObject(SETTINGS.getPreferredName()); @@ -250,7 +254,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla } if (this.lifecycle != null) { builder.field(LIFECYCLE.getPreferredName()); - lifecycle.toXContent(builder, params, rolloverConfiguration); + lifecycle.toXContent(builder, params, rolloverConfiguration, globalRetention); } builder.endObject(); return builder; diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index e40ca70460b13..749773cd91eb8 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -197,6 +197,7 @@ public InputStream readBlob(OperationPurpose purpose, String blobName, long posi assert BlobContainer.assertPurposeConsistency(purpose, blobName); final SeekableByteChannel channel = Files.newByteChannel(path.resolve(blobName)); if (position > 0L) { + assert position < channel.size() : "reading from " + position + " exceeds file length " + channel.size(); channel.position(position); } assert channel.position() == position; diff --git a/server/src/main/java/org/elasticsearch/common/collect/Iterators.java b/server/src/main/java/org/elasticsearch/common/collect/Iterators.java index ea8eadd66acaa..165280e370025 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/Iterators.java +++ b/server/src/main/java/org/elasticsearch/common/collect/Iterators.java @@ -15,11 +15,13 @@ import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Objects; +import java.util.function.BiFunction; import java.util.function.BiPredicate; import java.util.function.BooleanSupplier; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.IntFunction; +import java.util.function.Supplier; import java.util.function.ToIntFunction; public class Iterators { @@ -56,7 +58,7 @@ public static Iterator concat(Iterator... iterators) { for (int i = 0; i < iterators.length; i++) { if (iterators[i].hasNext()) { // explicit generic type argument needed for type inference - return new ConcatenatedIterator(iterators, i); + return new ConcatenatedIterator<>(iterators, i); } } @@ -258,6 +260,103 @@ public T next() { } } + /** + * Enumerates the elements of an iterator together with their index, using a function to combine the pair together into the final items + * produced by the iterator. + *

+ * An example of its usage to enumerate a list of names together with their positional index in the list: + *

+ *

+     * Iterator<String> nameIterator = ...;
+     * Iterator<Tuple<Integer, String>> enumeratedNames = Iterators.enumerate(nameIterator, Tuple::new);
+     * enumeratedNames.forEachRemaining(tuple -> System.out.println("Index: " + t.v1() + ", Name: " + t.v2()));
+     * 
+ * + * @param input The iterator to wrap + * @param fn A function that takes the index for an entry and the entry itself, returning an item that combines them together + * @return An iterator that combines elements together with their indices in the underlying collection + * @param The object type contained in the original iterator + * @param The object type that results from combining the original entry with its index in the iterator + */ + public static Iterator enumerate(Iterator input, BiFunction fn) { + return new EnumeratingIterator<>(Objects.requireNonNull(input), Objects.requireNonNull(fn)); + } + + private static class EnumeratingIterator implements Iterator { + private final Iterator input; + private final BiFunction fn; + + private int idx = 0; + + EnumeratingIterator(Iterator input, BiFunction fn) { + this.input = input; + this.fn = fn; + } + + @Override + public boolean hasNext() { + return input.hasNext(); + } + + @Override + public U next() { + return fn.apply(idx++, input.next()); + } + + @Override + public void forEachRemaining(Consumer action) { + input.forEachRemaining(t -> action.accept(fn.apply(idx++, t))); + } + } + + /** + * Adapts a {@link Supplier} object into an iterator. The resulting iterator will return values from the delegate Supplier until the + * delegate returns a null value. Once the delegate returns null, the iterator will claim to be empty. + *

+ * An example of its usage to iterate over a queue while draining it at the same time: + *

+ *

+     *     LinkedList<String> names = ...;
+     *     assert names.size() != 0;
+     *
+     *     Iterator<String> nameIterator = Iterator.fromSupplier(names::pollFirst);
+     *     nameIterator.forEachRemaining(System.out::println)
+     *     assert names.size() == 0;
+     * 
+ * + * @param input A {@link Supplier} that returns null when no more elements should be returned from the iterator + * @return An iterator that returns elements by calling the supplier until a null value is returned + * @param The object type returned from the supplier function + */ + public static Iterator fromSupplier(Supplier input) { + return new SupplierIterator<>(Objects.requireNonNull(input)); + } + + private static final class SupplierIterator implements Iterator { + private final Supplier fn; + private T head; + + SupplierIterator(Supplier fn) { + this.fn = fn; + this.head = fn.get(); + } + + @Override + public boolean hasNext() { + return head != null; + } + + @Override + public T next() { + if (head == null) { + throw new NoSuchElementException(); + } + T next = head; + head = fn.get(); + return next; + } + } + public static boolean equals(Iterator iterator1, Iterator iterator2, BiPredicate itemComparer) { if (iterator1 == null) { return iterator2 == null; diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 21801dee844b0..ac5255f58622a 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -313,6 +313,7 @@ public void apply(Settings value, Settings current, Settings previous) { HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE, HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH, HttpTransportSettings.SETTING_HTTP_SERVER_SHUTDOWN_GRACE_PERIOD, + HttpTransportSettings.SETTING_HTTP_SERVER_SHUTDOWN_POLL_PERIOD, HttpTransportSettings.SETTING_HTTP_READ_TIMEOUT, HttpTransportSettings.SETTING_HTTP_RESET_COOKIES, HttpTransportSettings.SETTING_HTTP_TCP_NO_DELAY, diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index aa1c25a3f1952..1b3173395791c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -626,6 +626,13 @@ String innerGetRaw(final Settings settings) { return defaultValue.apply(settings); } + /** + * Returns the raw (string) settings value, which is for logging use + */ + String getLogString(final Settings settings) { + return getRaw(settings); + } + /** Logs a deprecation warning if the setting is deprecated and used. */ void checkDeprecation(Settings settings) { // They're using the setting, so we need to tell them to stop @@ -881,6 +888,18 @@ private Stream matchStream(Settings settings) { return settings.keySet().stream().filter(this::match).map(key::getConcreteString); } + @Override + public boolean exists(Settings settings) { + // concrete settings might be secure, so don't exclude these here + return key.exists(settings.keySet(), Collections.emptySet()); + } + + @Override + public boolean exists(Settings.Builder builder) { + // concrete settings might be secure, so don't exclude these here + return key.exists(builder.keys(), Collections.emptySet()); + } + /** * Get the raw list of dependencies. This method is exposed for testing purposes and {@link #getSettingsDependencies(String)} * should be preferred for most all cases. @@ -989,6 +1008,7 @@ public Map getValue(Settings current, Settings previous) { @Override public void apply(Map value, Settings current, Settings previous) { + Setting.logSettingUpdate(AffixSetting.this, current, previous, logger); consumer.accept(value); } }; @@ -1008,6 +1028,20 @@ public String innerGetRaw(final Settings settings) { ); } + @Override + String getLogString(final Settings settings) { + Settings filteredAffixSetting = settings.filter(this::match); + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + filteredAffixSetting.toXContent(builder, new MapParams(Collections.singletonMap("flat_settings", "true"))); + builder.endObject(); + return Strings.toString(builder); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + @Override public Setting getConcreteSetting(String key) { if (match(key)) { @@ -1811,7 +1845,7 @@ static void logSettingUpdate(Setting setting, Settings current, Settings prev if (setting.isFiltered()) { logger.info("updating [{}]", setting.key); } else { - logger.info("updating [{}] from [{}] to [{}]", setting.key, setting.getRaw(previous), setting.getRaw(current)); + logger.info("updating [{}] from [{}] to [{}]", setting.key, setting.getLogString(previous), setting.getLogString(current)); } } } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 80811fbbd1b80..7dae11fb8d720 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -1538,13 +1538,13 @@ private static DateFormatter newDateFormatter(String format, DateTimeFormatter p */ private static final DateFormatter DATE = newDateFormatter( "date", - DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.STRICT), + DateTimeFormatter.ISO_LOCAL_DATE.withLocale(Locale.ROOT).withResolverStyle(ResolverStyle.STRICT), DATE_FORMATTER ); // only the formatter, nothing optional here private static final DateTimeFormatter DATE_TIME_NO_MILLIS_PRINTER = new DateTimeFormatterBuilder().append( - DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT) + DateTimeFormatter.ISO_LOCAL_DATE.withLocale(Locale.ROOT).withResolverStyle(ResolverStyle.LENIENT) ) .appendLiteral('T') .appendPattern("HH:mm") diff --git a/server/src/main/java/org/elasticsearch/common/time/DateTimeParser.java b/server/src/main/java/org/elasticsearch/common/time/DateTimeParser.java index 7c37f4a3c569e..a40fee58ceeb2 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateTimeParser.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateTimeParser.java @@ -9,7 +9,6 @@ package org.elasticsearch.common.time; import java.time.ZoneId; -import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; import java.util.Locale; @@ -23,14 +22,10 @@ interface DateTimeParser { Locale getLocale(); - String getFormatString(); - DateTimeParser withZone(ZoneId zone); DateTimeParser withLocale(Locale locale); - void applyToBuilder(DateTimeFormatterBuilder builder); - /** * Parses the specified string. *

diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochTime.java b/server/src/main/java/org/elasticsearch/common/time/EpochTime.java index 2c8ef4e48411f..6b80aa9abf6aa 100644 --- a/server/src/main/java/org/elasticsearch/common/time/EpochTime.java +++ b/server/src/main/java/org/elasticsearch/common/time/EpochTime.java @@ -252,7 +252,7 @@ public long getFrom(TemporalAccessor temporal) { static final DateFormatter SECONDS_FORMATTER = new JavaDateFormatter( "epoch_second", new JavaTimeDateTimePrinter(SECONDS_FORMATTER1), - (builder, parser) -> builder.parseDefaulting(ChronoField.NANO_OF_SECOND, 999_999_999L), + JavaTimeDateTimeParser.createRoundUpParserGenerator(builder -> builder.parseDefaulting(ChronoField.NANO_OF_SECOND, 999_999_999L)), new JavaTimeDateTimeParser(SECONDS_FORMATTER1), new JavaTimeDateTimeParser(SECONDS_FORMATTER2) ); @@ -260,7 +260,7 @@ public long getFrom(TemporalAccessor temporal) { static final DateFormatter MILLIS_FORMATTER = new JavaDateFormatter( "epoch_millis", new JavaTimeDateTimePrinter(MILLISECONDS_FORMATTER1), - (builder, parser) -> builder.parseDefaulting(EpochTime.NANOS_OF_MILLI, 999_999L), + JavaTimeDateTimeParser.createRoundUpParserGenerator(builder -> builder.parseDefaulting(EpochTime.NANOS_OF_MILLI, 999_999L)), new JavaTimeDateTimeParser(MILLISECONDS_FORMATTER1), new JavaTimeDateTimeParser(MILLISECONDS_FORMATTER2) ); diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java index c3a25cb4e15b5..9c39ee51276d7 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java @@ -17,29 +17,40 @@ import java.time.temporal.IsoFields; import java.time.temporal.TemporalAccessor; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Objects; -import java.util.function.BiConsumer; import java.util.function.UnaryOperator; class JavaDateFormatter implements DateFormatter { + @SuppressWarnings("unchecked") + private static T defaultRoundUp(T parser) { + if (parser instanceof JavaTimeDateTimeParser jtp) { + return (T) defaultRoundUp(jtp); + } + throw new IllegalArgumentException("Unknown parser implementation " + parser.getClass()); + } + /** - * A default consumer that allows to round up fields (used for range searches, optional fields missing) - * it relies on toString implementation of DateTimeFormatter and ChronoField. - * For instance for pattern - * the parser would have a toString() - * - * Value(MonthOfYear,2)'/'Value(DayOfMonth,2)'/'Value(YearOfEra,4,19,EXCEEDS_PAD)' - * 'Value(ClockHourOfAmPm,2)':'Value(MinuteOfHour,2)' 'Text(AmPmOfDay,SHORT) - * - * and ChronoField.CLOCK_HOUR_OF_AMPM would have toString() ClockHourOfAmPm - * this allows the rounding logic to default CLOCK_HOUR_OF_AMPM field instead of HOUR_OF_DAY - * without this logic, the rounding would result in a conflict as HOUR_OF_DAY would be missing, but CLOCK_HOUR_OF_AMPM would be provided - */ - private static final BiConsumer DEFAULT_ROUND_UP = (builder, parser) -> { - String parserAsString = parser.getFormatString(); + * A default transform that allows to round up fields (used for range searches, optional fields missing) + * it relies on toString implementation of DateTimeFormatter and ChronoField. + * For instance for pattern + * the parser would have a toString() + * + * Value(MonthOfYear,2)'/'Value(DayOfMonth,2)'/'Value(YearOfEra,4,19,EXCEEDS_PAD)' + * 'Value(ClockHourOfAmPm,2)':'Value(MinuteOfHour,2)' 'Text(AmPmOfDay,SHORT) + * + * and ChronoField.CLOCK_HOUR_OF_AMPM would have toString() ClockHourOfAmPm + * this allows the rounding logic to default CLOCK_HOUR_OF_AMPM field instead of HOUR_OF_DAY + * without this logic, the rounding would result in a conflict as HOUR_OF_DAY would be missing, but CLOCK_HOUR_OF_AMPM would be provided + */ + private static JavaTimeDateTimeParser defaultRoundUp(JavaTimeDateTimeParser parser) { + DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); + builder.append(parser.formatter()); + + String parserAsString = parser.formatter().toString(); if (parserAsString.contains(ChronoField.DAY_OF_YEAR.toString())) { builder.parseDefaulting(ChronoField.DAY_OF_YEAR, 1L); // TODO ideally we should make defaulting for weekbased year here too, @@ -63,7 +74,9 @@ class JavaDateFormatter implements DateFormatter { builder.parseDefaulting(ChronoField.MINUTE_OF_HOUR, 59L); builder.parseDefaulting(ChronoField.SECOND_OF_MINUTE, 59L); builder.parseDefaulting(ChronoField.NANO_OF_SECOND, 999_999_999L); - }; + + return new JavaTimeDateTimeParser(builder.toFormatter(parser.getLocale())); + } private final String format; private final DateTimePrinter printer; @@ -72,38 +85,38 @@ class JavaDateFormatter implements DateFormatter { // named formatters use default roundUpParser JavaDateFormatter(String format, DateTimePrinter printer, DateTimeParser... parsers) { - this( - format, - printer, - // set up base fields which should be used for default parsing, when we round up for date math - DEFAULT_ROUND_UP, - parsers - ); + this(format, printer, JavaDateFormatter::defaultRoundUp, parsers); } - JavaDateFormatter( + @SafeVarargs + @SuppressWarnings("varargs") // parsers array is read-only, and not stored in any fields, so this is safe + JavaDateFormatter( String format, DateTimePrinter printer, - BiConsumer roundupParserConsumer, - DateTimeParser... parsers + UnaryOperator generateRoundUpParser, + T... parsers ) { + if (format.contains("||")) { + throw new IllegalArgumentException("This class cannot handle multiple format specifiers"); + } if (printer == null) { throw new IllegalArgumentException("printer may not be null"); } if (parsers.length == 0) { throw new IllegalArgumentException("parsers need to be specified"); } + verifyPrinterParsers(printer, parsers); + this.printer = printer; this.format = format; - this.parsers = parsersArray(parsers); - this.roundupParsers = createRoundUpParsers(format, roundupParserConsumer, locale(), this.parsers); + this.parsers = Arrays.copyOf(parsers, parsers.length, DateTimeParser[].class); + this.roundupParsers = mapParsers(generateRoundUpParser, parsers); } - private static DateTimeParser[] parsersArray(DateTimeParser[] parsers) { - final ZoneId zoneId = parsers[0].getZone(); - final Locale locale = parsers[0].getLocale(); - for (int i = 1; i < parsers.length; i++) { - final DateTimeParser parser = parsers[i]; + private static void verifyPrinterParsers(DateTimePrinter printer, DateTimeParser[] parsers) { + ZoneId zoneId = printer.getZone(); + Locale locale = printer.getLocale(); + for (DateTimeParser parser : parsers) { if (Objects.equals(parser.getZone(), zoneId) == false) { throw new IllegalArgumentException("formatters must have the same time zone"); } @@ -111,31 +124,6 @@ private static DateTimeParser[] parsersArray(DateTimeParser[] parsers) { throw new IllegalArgumentException("formatters must have the same locale"); } } - return parsers; - } - - /** - * This is when the RoundUp Formatters are created. In further merges (with ||) it will only append them to a list. - * || is not expected to be provided as format when a RoundUp formatter is created. It will be splitted before in - * DateFormatter.forPattern - * JavaDateFormatter created with a custom format like DateFormatter.forPattern("YYYY") will only have one parser - * It is however possible to have a JavaDateFormatter with multiple parsers. For instance see a "date_time" formatter in - * DateFormatters. - * This means that we need to also have multiple RoundUp parsers. - */ - private static DateTimeParser[] createRoundUpParsers( - String format, - BiConsumer roundupParserConsumer, - Locale locale, - DateTimeParser[] parsers - ) { - assert format.contains("||") == false; - return mapObjects(parser -> { - DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); - parser.applyToBuilder(builder); - roundupParserConsumer.accept(builder, parser); - return new JavaTimeDateTimeParser(builder.toFormatter(locale)); - }, parsers); } static DateFormatter combined(String input, List formatters) { @@ -232,13 +220,14 @@ private JavaDateFormatter mapParsers(UnaryOperator printerMappi return new JavaDateFormatter( format, printerMapping.apply(printer), - mapObjects(parserMapping, this.roundupParsers), - mapObjects(parserMapping, this.parsers) + mapParsers(parserMapping, this.roundupParsers), + mapParsers(parserMapping, this.parsers) ); } - private static T[] mapObjects(UnaryOperator mapping, T[] objects) { - T[] res = objects.clone(); + @SafeVarargs + private static DateTimeParser[] mapParsers(UnaryOperator mapping, T... objects) { + DateTimeParser[] res = new DateTimeParser[objects.length]; for (int i = 0; i < objects.length; i++) { res[i] = mapping.apply(objects[i]); } diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaTimeDateTimeParser.java b/server/src/main/java/org/elasticsearch/common/time/JavaTimeDateTimeParser.java index c473b81771a53..793b97b3fa472 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaTimeDateTimeParser.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaTimeDateTimeParser.java @@ -15,15 +15,30 @@ import java.time.temporal.TemporalAccessor; import java.util.Locale; import java.util.Optional; +import java.util.function.Consumer; +import java.util.function.UnaryOperator; class JavaTimeDateTimeParser implements DateTimeParser { + static UnaryOperator createRoundUpParserGenerator(Consumer modifyBuilder) { + return p -> { + var builder = new DateTimeFormatterBuilder(); + builder.append(p.formatter); + modifyBuilder.accept(builder); + return new JavaTimeDateTimeParser(builder.toFormatter(p.getLocale())); + }; + } + private final DateTimeFormatter formatter; JavaTimeDateTimeParser(DateTimeFormatter formatter) { this.formatter = formatter; } + DateTimeFormatter formatter() { + return formatter; + } + @Override public ZoneId getZone() { return formatter.getZone(); @@ -34,11 +49,6 @@ public Locale getLocale() { return formatter.getLocale(); } - @Override - public String getFormatString() { - return formatter.toString(); - } - @Override public DateTimeParser withZone(ZoneId zone) { return new JavaTimeDateTimeParser(formatter.withZone(zone)); @@ -49,11 +59,6 @@ public DateTimeParser withLocale(Locale locale) { return new JavaTimeDateTimeParser(formatter.withLocale(locale)); } - @Override - public void applyToBuilder(DateTimeFormatterBuilder builder) { - builder.append(formatter); - } - @Override public TemporalAccessor parse(CharSequence str) { return formatter.parse(str); diff --git a/server/src/main/java/org/elasticsearch/common/util/Maps.java b/server/src/main/java/org/elasticsearch/common/util/Maps.java index 1b46e71dadd12..fc911793711b7 100644 --- a/server/src/main/java/org/elasticsearch/common/util/Maps.java +++ b/server/src/main/java/org/elasticsearch/common/util/Maps.java @@ -331,8 +331,7 @@ public static Map transformValues(Map source, Function /** * An immutable implementation of {@link Map.Entry}. - * @param key key - * @param value value + * Unlike {@code Map.entry(...)} this implementation permits null key and value. */ public record ImmutableEntry(KType key, VType value) implements Map.Entry { diff --git a/server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java b/server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java new file mode 100644 index 0000000000000..2b444741dd363 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java @@ -0,0 +1,199 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.util; + +import org.elasticsearch.core.Releasables; + +import java.util.Iterator; +import java.util.NoSuchElementException; + +/** + * A hash table from objects to objects. This implementation resolves collisions + * with open addressing and linear probing and does not support null values. + * This class is not thread-safe. + * + * Note that this class does not track either the actual keys or values. It is responsibility of + * the caller to release those objects if necessary. + */ +public final class ObjectObjectPagedHashMap extends AbstractPagedHashMap implements Iterable> { + + private ObjectArray keys; + private ObjectArray values; + + public ObjectObjectPagedHashMap(long capacity, BigArrays bigArrays) { + this(capacity, DEFAULT_MAX_LOAD_FACTOR, bigArrays); + } + + public ObjectObjectPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) { + super(capacity, maxLoadFactor, bigArrays); + keys = bigArrays.newObjectArray(capacity()); + boolean success = false; + try { + values = bigArrays.newObjectArray(capacity()); + success = true; + } finally { + if (false == success) { + close(); + } + } + } + + /** + * Get the value that is associated with key or null if key + * was not present in the hash table. + */ + public V get(K key) { + final long slot = slot(key.hashCode(), mask); + for (long index = slot;; index = nextSlot(index, mask)) { + final V value = values.get(index); + if (value == null) { + return null; + } else if (keys.get(index).equals(key)) { + return value; + } + } + } + + /** + * Put this new (key, value) pair into this hash table and return the value + * that was previously associated with key or null in case of + * an insertion. + */ + public V put(K key, V value) { + if (size >= maxSize) { + assert size == maxSize; + grow(); + } + assert size < maxSize; + return set(key, key.hashCode(), value); + } + + /** + * Remove the entry which has this key in the hash table and return the + * associated value or null if there was no entry associated with this key. + */ + public V remove(K key) { + final long slot = slot(key.hashCode(), mask); + for (long index = slot;; index = nextSlot(index, mask)) { + final V previous = values.set(index, null); + if (previous == null) { + return null; + } else if (keys.get(index).equals(key)) { + --size; + for (long j = nextSlot(index, mask); used(j); j = nextSlot(j, mask)) { + removeAndAdd(j); + } + return previous; + } else { + // repair and continue + values.set(index, previous); + } + } + } + + private V set(K key, int code, V value) { + assert key.hashCode() == code; + assert value != null; + assert size < maxSize; + final long slot = slot(code, mask); + for (long index = slot;; index = nextSlot(index, mask)) { + final V previous = values.set(index, value); + if (previous == null) { + // slot was free + keys.set(index, key); + ++size; + return null; + } else if (key.equals(keys.get(index))) { + // we just updated the value + return previous; + } else { + // not the right key, repair and continue + values.set(index, previous); + } + } + } + + @Override + public Iterator> iterator() { + return new Iterator<>() { + + boolean cached; + final Cursor cursor; + { + cursor = new Cursor<>(); + cursor.index = -1; + cached = false; + } + + @Override + public boolean hasNext() { + if (cached == false) { + while (true) { + ++cursor.index; + if (cursor.index >= capacity()) { + break; + } else if (used(cursor.index)) { + cursor.key = keys.get(cursor.index); + cursor.value = values.get(cursor.index); + break; + } + } + cached = true; + } + return cursor.index < capacity(); + } + + @Override + public Cursor next() { + if (hasNext() == false) { + throw new NoSuchElementException(); + } + cached = false; + return cursor; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + }; + } + + @Override + public void close() { + Releasables.close(keys, values); + } + + @Override + protected void resize(long capacity) { + keys = bigArrays.resize(keys, capacity); + values = bigArrays.resize(values, capacity); + } + + @Override + protected boolean used(long bucket) { + return values.get(bucket) != null; + } + + @Override + protected void removeAndAdd(long index) { + final K key = keys.get(index); + final V value = values.set(index, null); + --size; + final V removed = set(key, key.hashCode(), value); + assert removed == null; + } + + public static final class Cursor { + public long index; + public K key; + public V value; + } +} diff --git a/server/src/main/java/org/elasticsearch/common/util/set/Sets.java b/server/src/main/java/org/elasticsearch/common/util/set/Sets.java index adfa5023f2b35..75e5717d41b9f 100644 --- a/server/src/main/java/org/elasticsearch/common/util/set/Sets.java +++ b/server/src/main/java/org/elasticsearch/common/util/set/Sets.java @@ -40,7 +40,7 @@ public static HashSet newHashSet(T... elements) { return new HashSet<>(Arrays.asList(elements)); } - public static Set newHashSetWithExpectedSize(int expectedSize) { + public static HashSet newHashSetWithExpectedSize(int expectedSize) { return new HashSet<>(capacity(expectedSize)); } @@ -53,7 +53,17 @@ static int capacity(int expectedSize) { return expectedSize < 2 ? expectedSize + 1 : (int) (expectedSize / 0.75 + 1.0); } - public static boolean haveEmptyIntersection(Set left, Set right) { + public static boolean haveEmptyIntersection(Set set1, Set set2) { + final Set left; + final Set right; + if (set1.size() < set2.size()) { + left = set1; + right = set2; + } else { + left = set2; + right = set1; + } + for (T t : left) { if (right.contains(t)) { return false; @@ -95,7 +105,7 @@ public static Set difference(Set left, Set right) { * @param the type of the elements of the sets * @return the sorted relative complement of the left set with respect to the right set */ - public static SortedSet sortedDifference(final Set left, final Set right) { + public static > SortedSet sortedDifference(final Set left, final Set right) { final SortedSet set = new TreeSet<>(); for (T k : left) { if (right.contains(k) == false) { @@ -165,11 +175,12 @@ public static Set intersection(Set set1, Set set2) { * * @param set set to copy * @param elements elements to add + * @return the unmodifiable copy of the input set with the extra elements added */ @SuppressWarnings("unchecked") public static Set addToCopy(Set set, E... elements) { final var res = new HashSet<>(set); Collections.addAll(res, elements); - return Set.copyOf(res); + return (Set) Set.of(res.toArray()); } } diff --git a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java index 5ff147a11a06a..3fde94d559338 100644 --- a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java +++ b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java @@ -205,11 +205,6 @@ public void clusterChanged(ClusterChangedEvent event) { && currentMasterNode != null; if (prerequisitesFulfilled == false || healthNodeChanged || masterNodeChanged) { stopMonitoring(); - // Reset the reference of each HealthTracker. - // By doing this after `stopMonitoring()`, we're sure the `Monitoring` instance has been cancelled and therefore won't - // touch the `lastReportedValue` of the health trackers after we've reset them (only the new `Monitoring` instance will - // be able to update them). - healthTrackers.forEach(HealthTracker::reset); } if (prerequisitesFulfilled) { startMonitoringIfNecessary(); @@ -256,6 +251,7 @@ static class Monitoring implements Runnable, Scheduler.Cancellable { private final AtomicBoolean inFlightRequest; private volatile boolean cancelled = false; + private volatile boolean fistRun = true; private volatile Scheduler.ScheduledCancellable scheduledRun; private Monitoring( @@ -323,54 +319,56 @@ public void run() { return; } // Before we do anything, we're first going to make sure there is no in-flight request at this moment. - // If that's the case, we'll acquire the "lock", which prevents any other thread/instance from sending any requests. + // If that's the case, we'll acquire the "lock", which prevents any other threads/instances from sending any requests + // and writing to the health trackers' states. if (inFlightRequest.compareAndSet(false, true) == false) { logger.debug("Not allowed to send health info update request due to in-flight request, will try again."); + // Since we weren't able to acquire the lock, we don't need to release it, and we can schedule the next run right away. scheduleNextRunIfNecessary(); return; } - boolean nextRunScheduled = false; - Runnable releaseAndScheduleNextRun = new RunOnce(() -> { - inFlightRequest.set(false); - scheduleNextRunIfNecessary(); - }); try { - List> healthProgresses = getHealthProgresses(); - if (healthProgresses.isEmpty()) { - // Next run will still be scheduled in the `finally` block. + // On the first run, we're resetting all the health trackers. When we're in a first run, we either got restarted + // (health/master node change, manual restart, etc.) or we're actually starting the whole LocalHealthMonitor for + // the first time. In either case, we want to make sure we're (re)sending all the info to the health node, hence the reset. + // We're doing it _here_, so that we've acquired the inFlightRequest lock before we make any changes to the health trackers. + if (fistRun) { + healthTrackers.forEach(HealthTracker::reset); + fistRun = false; + } + List> changedHealthTrackers = getChangedHealthTrackers(); + if (changedHealthTrackers.isEmpty()) { + releaseAndScheduleNextRun(); return; } + // Create builder and add the current value of each (changed) health tracker to the request. var builder = new UpdateHealthInfoCacheAction.Request.Builder().nodeId(clusterService.localNode().getId()); - healthProgresses.forEach(changedHealthInfo -> changedHealthInfo.updateRequestBuilder(builder)); - - var listener = ActionListener.wrap(response -> { - // Only record health progress if this monitoring instance hasn't been cancelled in the meantime. - // This avoids any unwanted writes to the HealthTrackers' states after a new monitoring instance has possibly - // already started. - if (cancelled == false) { - healthProgresses.forEach(HealthTracker.HealthProgress::recordProgressIfRelevant); - } - }, e -> { + changedHealthTrackers.forEach(changedHealthTracker -> changedHealthTracker.addToRequestBuilder(builder)); + + // We don't need to do anything with the response when the request was successful, as HealthTracker#checkHealthChanged has + // already updated it's internal state. + var listener = ActionListener.wrap(response -> {}, e -> { if (e.getCause() instanceof NodeNotConnectedException || e.getCause() instanceof HealthNodeNotDiscoveredException) { logger.debug("Failed to connect to the health node [{}], will try again.", e.getCause().getMessage()); } else { logger.debug(() -> format("Failed to send health info to health node, will try again."), e); } + // If anything went wrong, we're going to reset the changed trackers to make + // sure their health will get reported in the next iteration. + changedHealthTrackers.forEach(HealthTracker::reset); }); client.execute( UpdateHealthInfoCacheAction.INSTANCE, builder.build(), - ActionListener.runAfter(listener, releaseAndScheduleNextRun) + ActionListener.runAfter(listener, new RunOnce(this::releaseAndScheduleNextRun)) ); - nextRunScheduled = true; } catch (Exception e) { logger.warn(() -> format("Failed to run scheduled health monitoring on thread pool [%s]", executor), e); - } finally { - // If the next run isn't scheduled because for example the health info hasn't changed, we schedule it here. - if (nextRunScheduled == false) { - releaseAndScheduleNextRun.run(); - } + // If anything went wrong, we're going to reset all the trackers to make + // sure their health will get reported in the next iteration. + healthTrackers.forEach(HealthTracker::reset); + releaseAndScheduleNextRun(); } } @@ -379,17 +377,19 @@ public void run() { * * @return a list of changed health info's. */ - private List> getHealthProgresses() { + private List> getChangedHealthTrackers() { var healthMetadata = HealthMetadata.getFromClusterState(clusterService.state()); // Don't try to run the health trackers if the HealthMetadata is not available. if (healthMetadata == null) { return List.of(); } - return healthTrackers.stream().>map(HealthTracker::trackHealth) - // Only return changed values. - .filter(HealthTracker.HealthProgress::hasChanged) - .toList(); + return healthTrackers.stream().filter(HealthTracker::checkHealthChanged).toList(); + } + + private void releaseAndScheduleNextRun() { + inFlightRequest.set(false); + scheduleNextRunIfNecessary(); } private void scheduleNextRunIfNecessary() { diff --git a/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java b/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java index a478130d83a78..f8aae2443e9fa 100644 --- a/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java +++ b/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java @@ -48,7 +48,7 @@ public DiskHealthTracker(NodeService nodeService, ClusterService clusterService) * @return the current disk health info. */ @Override - public DiskHealthInfo checkCurrentHealth() { + protected DiskHealthInfo determineCurrentHealth() { var clusterState = clusterService.state(); var healthMetadata = HealthMetadata.getFromClusterState(clusterState); DiscoveryNode node = clusterState.getNodes().getLocalNode(); @@ -92,7 +92,7 @@ public DiskHealthInfo checkCurrentHealth() { } @Override - public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, DiskHealthInfo healthInfo) { + protected void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, DiskHealthInfo healthInfo) { builder.diskHealthInfo(healthInfo); } diff --git a/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java b/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java index 2dd71a38f959e..8aec843e2bf1e 100644 --- a/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java +++ b/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java @@ -8,31 +8,28 @@ package org.elasticsearch.health.node.tracker; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.health.node.LocalHealthMonitor; import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; -import java.util.Objects; -import java.util.concurrent.atomic.AtomicReference; - /** * Base class for health trackers that will be executed by the {@link LocalHealthMonitor}. It keeps track of the last - * reported value and can retrieve the current health status when requested. + * reported health and can retrieve the current health status when requested. * * @param the type of the health check result they track */ public abstract class HealthTracker { - private static final Logger logger = LogManager.getLogger(HealthTracker.class); - private final AtomicReference lastReportedValue = new AtomicReference<>(); + /** + * We can "simply" use a volatile field here, as we've ensured only one monitoring instance/thread at a time can update this value. + */ + private volatile T lastDeterminedHealth; /** * Determine the health info for this health check. * * @return the health info. */ - public abstract T checkCurrentHealth(); + protected abstract T determineCurrentHealth(); /** * Add the health info to the request builder. @@ -40,28 +37,29 @@ public abstract class HealthTracker { * @param builder the builder to add the health info to. * @param healthInfo the health info to add. */ - public abstract void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, T healthInfo); + protected abstract void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, T healthInfo); /** - * Create a new {@link HealthProgress} instance by getting the current last reported value and determining the health info at this time. - * - * @return the new {@link HealthProgress} instance. + * Add the last reported health to the request builder. */ - public HealthProgress trackHealth() { - return new HealthProgress<>(this, lastReportedValue.get(), checkCurrentHealth()); + public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder) { + addToRequestBuilder(builder, lastDeterminedHealth); } /** - * Update the last reported health info to current, but only when the value inside lastReportedValue - * is equal to previous. + * Determine the current health info for this tracker and check if it has changed from the last reported value. When the health has + * changed, we'll store the new health as the last reported value. * - * @param previous the previous value that should be in lastReportedValue at the time of execution. - * @param current the value that should be stored in lastReportedValue. + * @return whether the health has changed. */ - public void updateLastReportedHealth(T previous, T current) { - if (lastReportedValue.compareAndSet(previous, current)) { - logger.debug("Health info [{}] successfully sent, last reported value: {}.", current, previous); + public boolean checkHealthChanged() { + var health = determineCurrentHealth(); + assert health != null : "health trackers must return unknown health instead of null"; + if (health.equals(lastDeterminedHealth)) { + return false; } + lastDeterminedHealth = health; + return true; } /** @@ -69,37 +67,10 @@ public void updateLastReportedHealth(T previous, T current) { * Should be used when, for example, the master or health node has changed. */ public void reset() { - lastReportedValue.set(null); + lastDeterminedHealth = null; } - public T getLastReportedValue() { - return lastReportedValue.get(); - } - - /** - * A record for storing the previous and current value of a health check. This allows us to be sure no concurrent processes have - * updated the health check's reference value. - * - * @param the type that the health tracker returns - */ - public record HealthProgress(HealthTracker healthTracker, T previousHealth, T currentHealth) { - public boolean hasChanged() { - return Objects.equals(previousHealth, currentHealth) == false; - } - - /** - * See {@link HealthTracker#addToRequestBuilder}. - */ - public void updateRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder) { - healthTracker.addToRequestBuilder(builder, currentHealth); - } - - /** - * Update the reference value of the health tracker with the current health info. - * See {@link HealthTracker#updateLastReportedHealth} for more info. - */ - public void recordProgressIfRelevant() { - healthTracker.updateLastReportedHealth(previousHealth, currentHealth); - } + public T getLastDeterminedHealth() { + return lastDeterminedHealth; } } diff --git a/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java b/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java index cffc470045e0b..016df116300b8 100644 --- a/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java +++ b/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java @@ -33,7 +33,7 @@ public RepositoriesHealthTracker(RepositoriesService repositoriesService) { * @return the current repositories health on this node. */ @Override - public RepositoriesHealthInfo checkCurrentHealth() { + protected RepositoriesHealthInfo determineCurrentHealth() { var repositories = repositoriesService.getRepositories(); if (repositories.isEmpty()) { return new RepositoriesHealthInfo(List.of(), List.of()); @@ -52,7 +52,7 @@ public RepositoriesHealthInfo checkCurrentHealth() { } @Override - public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, RepositoriesHealthInfo healthInfo) { + protected void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, RepositoriesHealthInfo healthInfo) { builder.repositoriesHealthInfo(healthInfo); } } diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index f9005f6e37889..3f95aff62313b 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -70,6 +70,7 @@ import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PUBLISH_HOST; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PUBLISH_PORT; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_SERVER_SHUTDOWN_GRACE_PERIOD; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_SERVER_SHUTDOWN_POLL_PERIOD; public abstract class AbstractHttpServerTransport extends AbstractLifecycleComponent implements HttpServerTransport { private static final Logger logger = LogManager.getLogger(AbstractHttpServerTransport.class); @@ -95,6 +96,7 @@ public abstract class AbstractHttpServerTransport extends AbstractLifecycleCompo private final RefCounted refCounted = AbstractRefCounted.of(() -> allClientsClosedListener.onResponse(null)); private final Set httpServerChannels = ConcurrentCollections.newConcurrentSet(); private final long shutdownGracePeriodMillis; + private final long shutdownPollPeriodMillis; private final HttpClientStatsTracker httpClientStatsTracker; private final HttpTracer httpLogger; @@ -146,6 +148,7 @@ protected AbstractHttpServerTransport( slowLogThresholdMs = TransportSettings.SLOW_OPERATION_THRESHOLD_SETTING.get(settings).getMillis(); httpClientStatsTracker = new HttpClientStatsTracker(settings, clusterSettings, threadPool); shutdownGracePeriodMillis = SETTING_HTTP_SERVER_SHUTDOWN_GRACE_PERIOD.get(settings).getMillis(); + shutdownPollPeriodMillis = SETTING_HTTP_SERVER_SHUTDOWN_POLL_PERIOD.get(settings).getMillis(); } public Recycler recycler() { @@ -272,17 +275,36 @@ protected void doStop() { boolean closed = false; + long pollTimeMillis = shutdownPollPeriodMillis; if (shutdownGracePeriodMillis > 0) { + if (shutdownGracePeriodMillis < pollTimeMillis) { + pollTimeMillis = shutdownGracePeriodMillis; + } + logger.debug(format("waiting [%d]ms for clients to close connections", shutdownGracePeriodMillis)); + } else { + logger.debug("waiting indefinitely for clients to close connections"); + } + + long startPollTimeMillis = System.currentTimeMillis(); + do { try { - logger.debug(format("waiting [%d]ms for clients to close connections", shutdownGracePeriodMillis)); - FutureUtils.get(allClientsClosedListener, shutdownGracePeriodMillis, TimeUnit.MILLISECONDS); + FutureUtils.get(allClientsClosedListener, pollTimeMillis, TimeUnit.MILLISECONDS); closed = true; } catch (ElasticsearchTimeoutException t) { - logger.warn(format("timed out while waiting [%d]ms for clients to close connections", shutdownGracePeriodMillis)); + logger.info(format("still waiting on %d client connections to close", httpChannels.size())); + if (shutdownGracePeriodMillis > 0) { + long endPollTimeMillis = System.currentTimeMillis(); + long remainingGracePeriodMillis = shutdownGracePeriodMillis - (endPollTimeMillis - startPollTimeMillis); + if (remainingGracePeriodMillis <= 0) { + logger.warn(format("timed out while waiting [%d]ms for clients to close connections", shutdownGracePeriodMillis)); + break; + } else if (remainingGracePeriodMillis < pollTimeMillis) { + pollTimeMillis = remainingGracePeriodMillis; + } + } } - } else { - logger.debug("closing all client connections immediately"); - } + } while (closed == false); + if (closed == false) { try { CloseableChannel.closeChannels(new ArrayList<>(httpChannels.values()), true); diff --git a/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java index 737a99d536919..dcceb43b63db8 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java @@ -132,6 +132,12 @@ public final class HttpTransportSettings { Setting.Property.NodeScope ); + public static final Setting SETTING_HTTP_SERVER_SHUTDOWN_POLL_PERIOD = Setting.positiveTimeSetting( + "http.shutdown_poll_period", + TimeValue.timeValueMinutes(5), + Setting.Property.NodeScope + ); + // don't reset cookies by default, since I don't think we really need to // note, parsing cookies was fixed in netty 3.5.1 regarding stack allocation, but still, currently, we don't need cookies public static final Setting SETTING_HTTP_RESET_COOKIES = Setting.boolSetting("http.reset_cookies", false, Property.NodeScope); diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/ForUtil.java similarity index 97% rename from server/src/main/java/org/elasticsearch/index/codec/tsdb/ForUtil.java rename to server/src/main/java/org/elasticsearch/index/codec/ForUtil.java index 874b90a08b920..5687b0d1b687d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ForUtil.java +++ b/server/src/main/java/org/elasticsearch/index/codec/ForUtil.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.index.codec.tsdb; +package org.elasticsearch.index.codec; import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; @@ -20,20 +20,9 @@ // else we pack 2 ints per long public final class ForUtil { - static final int DEFAULT_BLOCK_SIZE = 128; - private final int blockSize; - private final int blockSizeLog2; - private final long[] tmp; - - public ForUtil() { - this(DEFAULT_BLOCK_SIZE); - } - - private ForUtil(int blockSize) { - this.blockSize = blockSize; - this.blockSizeLog2 = (int) (Math.log(blockSize) / Math.log(2)); - this.tmp = new long[blockSize / 2]; - } + public static final int BLOCK_SIZE = 128; + private static final int BLOCK_SIZE_LOG2 = 7; + private final long[] tmp = new long[BLOCK_SIZE / 2]; private static long expandMask32(long mask32) { return mask32 | (mask32 << 32); @@ -129,20 +118,20 @@ private static void collapse32(long[] arr) { } /** Encode 128 integers from {@code longs} into {@code out}. */ - void encode(long[] longs, int bitsPerValue, DataOutput out) throws IOException { + public void encode(long[] longs, int bitsPerValue, DataOutput out) throws IOException { final int nextPrimitive; final int numLongs; if (bitsPerValue <= 8) { nextPrimitive = 8; - numLongs = blockSize / 8; + numLongs = BLOCK_SIZE / 8; collapse8(longs); } else if (bitsPerValue <= 16) { nextPrimitive = 16; - numLongs = blockSize / 4; + numLongs = BLOCK_SIZE / 4; collapse16(longs); } else { nextPrimitive = 32; - numLongs = blockSize / 2; + numLongs = BLOCK_SIZE / 2; collapse32(longs); } @@ -202,11 +191,11 @@ void encode(long[] longs, int bitsPerValue, DataOutput out) throws IOException { } /** Number of bytes required to encode 128 integers of {@code bitsPerValue} bits per value. */ - int numBytes(int bitsPerValue) { - return bitsPerValue << (blockSizeLog2 - 3); + public int numBytes(int bitsPerValue) { + return bitsPerValue << (BLOCK_SIZE_LOG2 - 3); } - private static void decodeSlow(int blockSize, int bitsPerValue, DataInput in, long[] tmp, long[] longs) throws IOException { + private static void decodeSlow(int bitsPerValue, DataInput in, long[] tmp, long[] longs) throws IOException { final int numLongs = bitsPerValue << 1; in.readLongs(tmp, 0, numLongs); final long mask = MASKS32[bitsPerValue]; @@ -220,7 +209,7 @@ private static void decodeSlow(int blockSize, int bitsPerValue, DataInput in, lo final long mask32RemainingBitsPerLong = MASKS32[remainingBitsPerLong]; int tmpIdx = 0; int remainingBits = remainingBitsPerLong; - for (; longsIdx < blockSize / 2; ++longsIdx) { + for (; longsIdx < BLOCK_SIZE / 2; ++longsIdx) { int b = bitsPerValue - remainingBits; long l = (tmp[tmpIdx++] & MASKS32[remainingBits]) << b; while (b >= remainingBitsPerLong) { @@ -310,7 +299,7 @@ private static void shiftLongs(long[] a, int count, long[] b, int bi, int shift, private static final long MASK32_24 = MASKS32[24]; /** Decode 128 integers into {@code longs}. */ - void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { + public void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { switch (bitsPerValue) { case 1: decode1(in, tmp, longs); @@ -409,7 +398,7 @@ void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { expand32(longs); break; default: - decodeSlow(blockSize, bitsPerValue, in, tmp, longs); + decodeSlow(bitsPerValue, in, tmp, longs); expand32(longs); break; } @@ -421,7 +410,7 @@ void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { * [0..63], and values [64..127] are encoded in the low-order bits of {@code longs} [0..63]. This * representation may allow subsequent operations to be performed on two values at a time. */ - void decodeTo32(int bitsPerValue, DataInput in, long[] longs) throws IOException { + public void decodeTo32(int bitsPerValue, DataInput in, long[] longs) throws IOException { switch (bitsPerValue) { case 1: decode1(in, tmp, longs); @@ -512,7 +501,7 @@ void decodeTo32(int bitsPerValue, DataInput in, long[] longs) throws IOException decode24(in, tmp, longs); break; default: - decodeSlow(blockSize, bitsPerValue, in, tmp, longs); + decodeSlow(bitsPerValue, in, tmp, longs); break; } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java index 5270326876e08..6ccfaba7853f2 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java @@ -36,6 +36,7 @@ import org.apache.lucene.store.DataOutput; import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.codec.ForUtil; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java index 8b3d5d02a04c0..11bd90cd31610 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java @@ -36,11 +36,13 @@ import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BytesRef; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.codec.ForUtil; import org.elasticsearch.index.codec.postings.ES812PostingsFormat.IntBlockTermState; import java.io.IOException; import java.util.Arrays; +import static org.elasticsearch.index.codec.ForUtil.BLOCK_SIZE; import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.DOC_CODEC; import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.MAX_SKIP_LEVELS; import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.PAY_CODEC; @@ -48,7 +50,6 @@ import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.TERMS_CODEC; import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.VERSION_CURRENT; import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.VERSION_START; -import static org.elasticsearch.index.codec.postings.ForUtil.BLOCK_SIZE; /** * Concrete class that reads docId(maybe frq,pos,offset,payloads) list with postings format. diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java index 9ab7ed42efb09..cc95b4ffcfacf 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java @@ -35,6 +35,7 @@ import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BytesRef; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.codec.ForUtil; import org.elasticsearch.index.codec.postings.ES812PostingsFormat.IntBlockTermState; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java index 11c0c611312fc..f9b36114361ca 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java @@ -21,6 +21,7 @@ import org.apache.lucene.codecs.MultiLevelSkipListReader; import org.apache.lucene.store.IndexInput; +import org.elasticsearch.index.codec.ForUtil; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ForUtil.java deleted file mode 100644 index d874caab1b8c0..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/ForUtil.java +++ /dev/null @@ -1,1049 +0,0 @@ -/* - * @notice - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * Modifications copyright (C) 2022 Elasticsearch B.V. - */ -package org.elasticsearch.index.codec.postings; - -import org.apache.lucene.store.DataInput; -import org.apache.lucene.store.DataOutput; - -import java.io.IOException; - -// Inspired from https://fulmicoton.com/posts/bitpacking/ -// Encodes multiple integers in a long to get SIMD-like speedups. -// If bitsPerValue <= 8 then we pack 8 ints per long -// else if bitsPerValue <= 16 we pack 4 ints per long -// else we pack 2 ints per long -final class ForUtil { - - static final int BLOCK_SIZE = 128; - private static final int BLOCK_SIZE_LOG2 = 7; - - private static long expandMask32(long mask32) { - return mask32 | (mask32 << 32); - } - - private static long expandMask16(long mask16) { - return expandMask32(mask16 | (mask16 << 16)); - } - - private static long expandMask8(long mask8) { - return expandMask16(mask8 | (mask8 << 8)); - } - - private static long mask32(int bitsPerValue) { - return expandMask32((1L << bitsPerValue) - 1); - } - - private static long mask16(int bitsPerValue) { - return expandMask16((1L << bitsPerValue) - 1); - } - - private static long mask8(int bitsPerValue) { - return expandMask8((1L << bitsPerValue) - 1); - } - - private static void expand8(long[] arr) { - for (int i = 0; i < 16; ++i) { - long l = arr[i]; - arr[i] = (l >>> 56) & 0xFFL; - arr[16 + i] = (l >>> 48) & 0xFFL; - arr[32 + i] = (l >>> 40) & 0xFFL; - arr[48 + i] = (l >>> 32) & 0xFFL; - arr[64 + i] = (l >>> 24) & 0xFFL; - arr[80 + i] = (l >>> 16) & 0xFFL; - arr[96 + i] = (l >>> 8) & 0xFFL; - arr[112 + i] = l & 0xFFL; - } - } - - private static void expand8To32(long[] arr) { - for (int i = 0; i < 16; ++i) { - long l = arr[i]; - arr[i] = (l >>> 24) & 0x000000FF000000FFL; - arr[16 + i] = (l >>> 16) & 0x000000FF000000FFL; - arr[32 + i] = (l >>> 8) & 0x000000FF000000FFL; - arr[48 + i] = l & 0x000000FF000000FFL; - } - } - - private static void collapse8(long[] arr) { - for (int i = 0; i < 16; ++i) { - arr[i] = (arr[i] << 56) | (arr[16 + i] << 48) | (arr[32 + i] << 40) | (arr[48 + i] << 32) | (arr[64 + i] << 24) | (arr[80 + i] - << 16) | (arr[96 + i] << 8) | arr[112 + i]; - } - } - - private static void expand16(long[] arr) { - for (int i = 0; i < 32; ++i) { - long l = arr[i]; - arr[i] = (l >>> 48) & 0xFFFFL; - arr[32 + i] = (l >>> 32) & 0xFFFFL; - arr[64 + i] = (l >>> 16) & 0xFFFFL; - arr[96 + i] = l & 0xFFFFL; - } - } - - private static void expand16To32(long[] arr) { - for (int i = 0; i < 32; ++i) { - long l = arr[i]; - arr[i] = (l >>> 16) & 0x0000FFFF0000FFFFL; - arr[32 + i] = l & 0x0000FFFF0000FFFFL; - } - } - - private static void collapse16(long[] arr) { - for (int i = 0; i < 32; ++i) { - arr[i] = (arr[i] << 48) | (arr[32 + i] << 32) | (arr[64 + i] << 16) | arr[96 + i]; - } - } - - private static void expand32(long[] arr) { - for (int i = 0; i < 64; ++i) { - long l = arr[i]; - arr[i] = l >>> 32; - arr[64 + i] = l & 0xFFFFFFFFL; - } - } - - private static void collapse32(long[] arr) { - for (int i = 0; i < 64; ++i) { - arr[i] = (arr[i] << 32) | arr[64 + i]; - } - } - - private final long[] tmp = new long[BLOCK_SIZE / 2]; - - /** Encode 128 integers from {@code longs} into {@code out}. */ - void encode(long[] longs, int bitsPerValue, DataOutput out) throws IOException { - final int nextPrimitive; - final int numLongs; - if (bitsPerValue <= 8) { - nextPrimitive = 8; - numLongs = BLOCK_SIZE / 8; - collapse8(longs); - } else if (bitsPerValue <= 16) { - nextPrimitive = 16; - numLongs = BLOCK_SIZE / 4; - collapse16(longs); - } else { - nextPrimitive = 32; - numLongs = BLOCK_SIZE / 2; - collapse32(longs); - } - - final int numLongsPerShift = bitsPerValue * 2; - int idx = 0; - int shift = nextPrimitive - bitsPerValue; - for (int i = 0; i < numLongsPerShift; ++i) { - tmp[i] = longs[idx++] << shift; - } - for (shift = shift - bitsPerValue; shift >= 0; shift -= bitsPerValue) { - for (int i = 0; i < numLongsPerShift; ++i) { - tmp[i] |= longs[idx++] << shift; - } - } - - final int remainingBitsPerLong = shift + bitsPerValue; - final long maskRemainingBitsPerLong; - if (nextPrimitive == 8) { - maskRemainingBitsPerLong = MASKS8[remainingBitsPerLong]; - } else if (nextPrimitive == 16) { - maskRemainingBitsPerLong = MASKS16[remainingBitsPerLong]; - } else { - maskRemainingBitsPerLong = MASKS32[remainingBitsPerLong]; - } - - int tmpIdx = 0; - int remainingBitsPerValue = bitsPerValue; - while (idx < numLongs) { - if (remainingBitsPerValue >= remainingBitsPerLong) { - remainingBitsPerValue -= remainingBitsPerLong; - tmp[tmpIdx++] |= (longs[idx] >>> remainingBitsPerValue) & maskRemainingBitsPerLong; - if (remainingBitsPerValue == 0) { - idx++; - remainingBitsPerValue = bitsPerValue; - } - } else { - final long mask1, mask2; - if (nextPrimitive == 8) { - mask1 = MASKS8[remainingBitsPerValue]; - mask2 = MASKS8[remainingBitsPerLong - remainingBitsPerValue]; - } else if (nextPrimitive == 16) { - mask1 = MASKS16[remainingBitsPerValue]; - mask2 = MASKS16[remainingBitsPerLong - remainingBitsPerValue]; - } else { - mask1 = MASKS32[remainingBitsPerValue]; - mask2 = MASKS32[remainingBitsPerLong - remainingBitsPerValue]; - } - tmp[tmpIdx] |= (longs[idx++] & mask1) << (remainingBitsPerLong - remainingBitsPerValue); - remainingBitsPerValue = bitsPerValue - remainingBitsPerLong + remainingBitsPerValue; - tmp[tmpIdx++] |= (longs[idx] >>> remainingBitsPerValue) & mask2; - } - } - - for (int i = 0; i < numLongsPerShift; ++i) { - out.writeLong(tmp[i]); - } - } - - /** Number of bytes required to encode 128 integers of {@code bitsPerValue} bits per value. */ - int numBytes(int bitsPerValue) { - return bitsPerValue << (BLOCK_SIZE_LOG2 - 3); - } - - private static void decodeSlow(int bitsPerValue, DataInput in, long[] tmp, long[] longs) throws IOException { - final int numLongs = bitsPerValue << 1; - in.readLongs(tmp, 0, numLongs); - final long mask = MASKS32[bitsPerValue]; - int longsIdx = 0; - int shift = 32 - bitsPerValue; - for (; shift >= 0; shift -= bitsPerValue) { - shiftLongs(tmp, numLongs, longs, longsIdx, shift, mask); - longsIdx += numLongs; - } - final int remainingBitsPerLong = shift + bitsPerValue; - final long mask32RemainingBitsPerLong = MASKS32[remainingBitsPerLong]; - int tmpIdx = 0; - int remainingBits = remainingBitsPerLong; - for (; longsIdx < BLOCK_SIZE / 2; ++longsIdx) { - int b = bitsPerValue - remainingBits; - long l = (tmp[tmpIdx++] & MASKS32[remainingBits]) << b; - while (b >= remainingBitsPerLong) { - b -= remainingBitsPerLong; - l |= (tmp[tmpIdx++] & mask32RemainingBitsPerLong) << b; - } - if (b > 0) { - l |= (tmp[tmpIdx] >>> (remainingBitsPerLong - b)) & MASKS32[b]; - remainingBits = remainingBitsPerLong - b; - } else { - remainingBits = remainingBitsPerLong; - } - longs[longsIdx] = l; - } - } - - /** - * The pattern that this shiftLongs method applies is recognized by the C2 compiler, which - * generates SIMD instructions for it in order to shift multiple longs at once. - */ - private static void shiftLongs(long[] a, int count, long[] b, int bi, int shift, long mask) { - for (int i = 0; i < count; ++i) { - b[bi + i] = (a[i] >>> shift) & mask; - } - } - - private static final long[] MASKS8 = new long[8]; - private static final long[] MASKS16 = new long[16]; - private static final long[] MASKS32 = new long[32]; - - static { - for (int i = 0; i < 8; ++i) { - MASKS8[i] = mask8(i); - } - for (int i = 0; i < 16; ++i) { - MASKS16[i] = mask16(i); - } - for (int i = 0; i < 32; ++i) { - MASKS32[i] = mask32(i); - } - } - - // mark values in array as final longs to avoid the cost of reading array, arrays should only be - // used when the idx is a variable - private static final long MASK8_1 = MASKS8[1]; - private static final long MASK8_2 = MASKS8[2]; - private static final long MASK8_3 = MASKS8[3]; - private static final long MASK8_4 = MASKS8[4]; - private static final long MASK8_5 = MASKS8[5]; - private static final long MASK8_6 = MASKS8[6]; - private static final long MASK8_7 = MASKS8[7]; - private static final long MASK16_1 = MASKS16[1]; - private static final long MASK16_2 = MASKS16[2]; - private static final long MASK16_3 = MASKS16[3]; - private static final long MASK16_4 = MASKS16[4]; - private static final long MASK16_5 = MASKS16[5]; - private static final long MASK16_6 = MASKS16[6]; - private static final long MASK16_7 = MASKS16[7]; - private static final long MASK16_9 = MASKS16[9]; - private static final long MASK16_10 = MASKS16[10]; - private static final long MASK16_11 = MASKS16[11]; - private static final long MASK16_12 = MASKS16[12]; - private static final long MASK16_13 = MASKS16[13]; - private static final long MASK16_14 = MASKS16[14]; - private static final long MASK16_15 = MASKS16[15]; - private static final long MASK32_1 = MASKS32[1]; - private static final long MASK32_2 = MASKS32[2]; - private static final long MASK32_3 = MASKS32[3]; - private static final long MASK32_4 = MASKS32[4]; - private static final long MASK32_5 = MASKS32[5]; - private static final long MASK32_6 = MASKS32[6]; - private static final long MASK32_7 = MASKS32[7]; - private static final long MASK32_8 = MASKS32[8]; - private static final long MASK32_9 = MASKS32[9]; - private static final long MASK32_10 = MASKS32[10]; - private static final long MASK32_11 = MASKS32[11]; - private static final long MASK32_12 = MASKS32[12]; - private static final long MASK32_13 = MASKS32[13]; - private static final long MASK32_14 = MASKS32[14]; - private static final long MASK32_15 = MASKS32[15]; - private static final long MASK32_17 = MASKS32[17]; - private static final long MASK32_18 = MASKS32[18]; - private static final long MASK32_19 = MASKS32[19]; - private static final long MASK32_20 = MASKS32[20]; - private static final long MASK32_21 = MASKS32[21]; - private static final long MASK32_22 = MASKS32[22]; - private static final long MASK32_23 = MASKS32[23]; - private static final long MASK32_24 = MASKS32[24]; - - /** Decode 128 integers into {@code longs}. */ - void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { - switch (bitsPerValue) { - case 1: - decode1(in, tmp, longs); - expand8(longs); - break; - case 2: - decode2(in, tmp, longs); - expand8(longs); - break; - case 3: - decode3(in, tmp, longs); - expand8(longs); - break; - case 4: - decode4(in, tmp, longs); - expand8(longs); - break; - case 5: - decode5(in, tmp, longs); - expand8(longs); - break; - case 6: - decode6(in, tmp, longs); - expand8(longs); - break; - case 7: - decode7(in, tmp, longs); - expand8(longs); - break; - case 8: - decode8(in, tmp, longs); - expand8(longs); - break; - case 9: - decode9(in, tmp, longs); - expand16(longs); - break; - case 10: - decode10(in, tmp, longs); - expand16(longs); - break; - case 11: - decode11(in, tmp, longs); - expand16(longs); - break; - case 12: - decode12(in, tmp, longs); - expand16(longs); - break; - case 13: - decode13(in, tmp, longs); - expand16(longs); - break; - case 14: - decode14(in, tmp, longs); - expand16(longs); - break; - case 15: - decode15(in, tmp, longs); - expand16(longs); - break; - case 16: - decode16(in, tmp, longs); - expand16(longs); - break; - case 17: - decode17(in, tmp, longs); - expand32(longs); - break; - case 18: - decode18(in, tmp, longs); - expand32(longs); - break; - case 19: - decode19(in, tmp, longs); - expand32(longs); - break; - case 20: - decode20(in, tmp, longs); - expand32(longs); - break; - case 21: - decode21(in, tmp, longs); - expand32(longs); - break; - case 22: - decode22(in, tmp, longs); - expand32(longs); - break; - case 23: - decode23(in, tmp, longs); - expand32(longs); - break; - case 24: - decode24(in, tmp, longs); - expand32(longs); - break; - default: - decodeSlow(bitsPerValue, in, tmp, longs); - expand32(longs); - break; - } - } - - /** - * Decodes 128 integers into 64 {@code longs} such that each long contains two values, each - * represented with 32 bits. Values [0..63] are encoded in the high-order bits of {@code longs} - * [0..63], and values [64..127] are encoded in the low-order bits of {@code longs} [0..63]. This - * representation may allow subsequent operations to be performed on two values at a time. - */ - void decodeTo32(int bitsPerValue, DataInput in, long[] longs) throws IOException { - switch (bitsPerValue) { - case 1: - decode1(in, tmp, longs); - expand8To32(longs); - break; - case 2: - decode2(in, tmp, longs); - expand8To32(longs); - break; - case 3: - decode3(in, tmp, longs); - expand8To32(longs); - break; - case 4: - decode4(in, tmp, longs); - expand8To32(longs); - break; - case 5: - decode5(in, tmp, longs); - expand8To32(longs); - break; - case 6: - decode6(in, tmp, longs); - expand8To32(longs); - break; - case 7: - decode7(in, tmp, longs); - expand8To32(longs); - break; - case 8: - decode8(in, tmp, longs); - expand8To32(longs); - break; - case 9: - decode9(in, tmp, longs); - expand16To32(longs); - break; - case 10: - decode10(in, tmp, longs); - expand16To32(longs); - break; - case 11: - decode11(in, tmp, longs); - expand16To32(longs); - break; - case 12: - decode12(in, tmp, longs); - expand16To32(longs); - break; - case 13: - decode13(in, tmp, longs); - expand16To32(longs); - break; - case 14: - decode14(in, tmp, longs); - expand16To32(longs); - break; - case 15: - decode15(in, tmp, longs); - expand16To32(longs); - break; - case 16: - decode16(in, tmp, longs); - expand16To32(longs); - break; - case 17: - decode17(in, tmp, longs); - break; - case 18: - decode18(in, tmp, longs); - break; - case 19: - decode19(in, tmp, longs); - break; - case 20: - decode20(in, tmp, longs); - break; - case 21: - decode21(in, tmp, longs); - break; - case 22: - decode22(in, tmp, longs); - break; - case 23: - decode23(in, tmp, longs); - break; - case 24: - decode24(in, tmp, longs); - break; - default: - decodeSlow(bitsPerValue, in, tmp, longs); - break; - } - } - - private static void decode1(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 2); - shiftLongs(tmp, 2, longs, 0, 7, MASK8_1); - shiftLongs(tmp, 2, longs, 2, 6, MASK8_1); - shiftLongs(tmp, 2, longs, 4, 5, MASK8_1); - shiftLongs(tmp, 2, longs, 6, 4, MASK8_1); - shiftLongs(tmp, 2, longs, 8, 3, MASK8_1); - shiftLongs(tmp, 2, longs, 10, 2, MASK8_1); - shiftLongs(tmp, 2, longs, 12, 1, MASK8_1); - shiftLongs(tmp, 2, longs, 14, 0, MASK8_1); - } - - private static void decode2(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 4); - shiftLongs(tmp, 4, longs, 0, 6, MASK8_2); - shiftLongs(tmp, 4, longs, 4, 4, MASK8_2); - shiftLongs(tmp, 4, longs, 8, 2, MASK8_2); - shiftLongs(tmp, 4, longs, 12, 0, MASK8_2); - } - - private static void decode3(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 6); - shiftLongs(tmp, 6, longs, 0, 5, MASK8_3); - shiftLongs(tmp, 6, longs, 6, 2, MASK8_3); - for (int iter = 0, tmpIdx = 0, longsIdx = 12; iter < 2; ++iter, tmpIdx += 3, longsIdx += 2) { - long l0 = (tmp[tmpIdx + 0] & MASK8_2) << 1; - l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK8_1; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK8_1) << 2; - l1 |= (tmp[tmpIdx + 2] & MASK8_2) << 0; - longs[longsIdx + 1] = l1; - } - } - - private static void decode4(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 8); - shiftLongs(tmp, 8, longs, 0, 4, MASK8_4); - shiftLongs(tmp, 8, longs, 8, 0, MASK8_4); - } - - private static void decode5(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 10); - shiftLongs(tmp, 10, longs, 0, 3, MASK8_5); - for (int iter = 0, tmpIdx = 0, longsIdx = 10; iter < 2; ++iter, tmpIdx += 5, longsIdx += 3) { - long l0 = (tmp[tmpIdx + 0] & MASK8_3) << 2; - l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK8_2; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK8_1) << 4; - l1 |= (tmp[tmpIdx + 2] & MASK8_3) << 1; - l1 |= (tmp[tmpIdx + 3] >>> 2) & MASK8_1; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 3] & MASK8_2) << 3; - l2 |= (tmp[tmpIdx + 4] & MASK8_3) << 0; - longs[longsIdx + 2] = l2; - } - } - - private static void decode6(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 12); - shiftLongs(tmp, 12, longs, 0, 2, MASK8_6); - shiftLongs(tmp, 12, tmp, 0, 0, MASK8_2); - for (int iter = 0, tmpIdx = 0, longsIdx = 12; iter < 4; ++iter, tmpIdx += 3, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 4; - l0 |= tmp[tmpIdx + 1] << 2; - l0 |= tmp[tmpIdx + 2] << 0; - longs[longsIdx + 0] = l0; - } - } - - private static void decode7(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 14); - shiftLongs(tmp, 14, longs, 0, 1, MASK8_7); - shiftLongs(tmp, 14, tmp, 0, 0, MASK8_1); - for (int iter = 0, tmpIdx = 0, longsIdx = 14; iter < 2; ++iter, tmpIdx += 7, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 6; - l0 |= tmp[tmpIdx + 1] << 5; - l0 |= tmp[tmpIdx + 2] << 4; - l0 |= tmp[tmpIdx + 3] << 3; - l0 |= tmp[tmpIdx + 4] << 2; - l0 |= tmp[tmpIdx + 5] << 1; - l0 |= tmp[tmpIdx + 6] << 0; - longs[longsIdx + 0] = l0; - } - } - - private static void decode8(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(longs, 0, 16); - } - - private static void decode9(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 18); - shiftLongs(tmp, 18, longs, 0, 7, MASK16_9); - for (int iter = 0, tmpIdx = 0, longsIdx = 18; iter < 2; ++iter, tmpIdx += 9, longsIdx += 7) { - long l0 = (tmp[tmpIdx + 0] & MASK16_7) << 2; - l0 |= (tmp[tmpIdx + 1] >>> 5) & MASK16_2; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK16_5) << 4; - l1 |= (tmp[tmpIdx + 2] >>> 3) & MASK16_4; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 2] & MASK16_3) << 6; - l2 |= (tmp[tmpIdx + 3] >>> 1) & MASK16_6; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 3] & MASK16_1) << 8; - l3 |= (tmp[tmpIdx + 4] & MASK16_7) << 1; - l3 |= (tmp[tmpIdx + 5] >>> 6) & MASK16_1; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 5] & MASK16_6) << 3; - l4 |= (tmp[tmpIdx + 6] >>> 4) & MASK16_3; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 6] & MASK16_4) << 5; - l5 |= (tmp[tmpIdx + 7] >>> 2) & MASK16_5; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 7] & MASK16_2) << 7; - l6 |= (tmp[tmpIdx + 8] & MASK16_7) << 0; - longs[longsIdx + 6] = l6; - } - } - - private static void decode10(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 20); - shiftLongs(tmp, 20, longs, 0, 6, MASK16_10); - for (int iter = 0, tmpIdx = 0, longsIdx = 20; iter < 4; ++iter, tmpIdx += 5, longsIdx += 3) { - long l0 = (tmp[tmpIdx + 0] & MASK16_6) << 4; - l0 |= (tmp[tmpIdx + 1] >>> 2) & MASK16_4; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK16_2) << 8; - l1 |= (tmp[tmpIdx + 2] & MASK16_6) << 2; - l1 |= (tmp[tmpIdx + 3] >>> 4) & MASK16_2; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 3] & MASK16_4) << 6; - l2 |= (tmp[tmpIdx + 4] & MASK16_6) << 0; - longs[longsIdx + 2] = l2; - } - } - - private static void decode11(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 22); - shiftLongs(tmp, 22, longs, 0, 5, MASK16_11); - for (int iter = 0, tmpIdx = 0, longsIdx = 22; iter < 2; ++iter, tmpIdx += 11, longsIdx += 5) { - long l0 = (tmp[tmpIdx + 0] & MASK16_5) << 6; - l0 |= (tmp[tmpIdx + 1] & MASK16_5) << 1; - l0 |= (tmp[tmpIdx + 2] >>> 4) & MASK16_1; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 2] & MASK16_4) << 7; - l1 |= (tmp[tmpIdx + 3] & MASK16_5) << 2; - l1 |= (tmp[tmpIdx + 4] >>> 3) & MASK16_2; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 4] & MASK16_3) << 8; - l2 |= (tmp[tmpIdx + 5] & MASK16_5) << 3; - l2 |= (tmp[tmpIdx + 6] >>> 2) & MASK16_3; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 6] & MASK16_2) << 9; - l3 |= (tmp[tmpIdx + 7] & MASK16_5) << 4; - l3 |= (tmp[tmpIdx + 8] >>> 1) & MASK16_4; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 8] & MASK16_1) << 10; - l4 |= (tmp[tmpIdx + 9] & MASK16_5) << 5; - l4 |= (tmp[tmpIdx + 10] & MASK16_5) << 0; - longs[longsIdx + 4] = l4; - } - } - - private static void decode12(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 24); - shiftLongs(tmp, 24, longs, 0, 4, MASK16_12); - shiftLongs(tmp, 24, tmp, 0, 0, MASK16_4); - for (int iter = 0, tmpIdx = 0, longsIdx = 24; iter < 8; ++iter, tmpIdx += 3, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 8; - l0 |= tmp[tmpIdx + 1] << 4; - l0 |= tmp[tmpIdx + 2] << 0; - longs[longsIdx + 0] = l0; - } - } - - private static void decode13(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 26); - shiftLongs(tmp, 26, longs, 0, 3, MASK16_13); - for (int iter = 0, tmpIdx = 0, longsIdx = 26; iter < 2; ++iter, tmpIdx += 13, longsIdx += 3) { - long l0 = (tmp[tmpIdx + 0] & MASK16_3) << 10; - l0 |= (tmp[tmpIdx + 1] & MASK16_3) << 7; - l0 |= (tmp[tmpIdx + 2] & MASK16_3) << 4; - l0 |= (tmp[tmpIdx + 3] & MASK16_3) << 1; - l0 |= (tmp[tmpIdx + 4] >>> 2) & MASK16_1; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 4] & MASK16_2) << 11; - l1 |= (tmp[tmpIdx + 5] & MASK16_3) << 8; - l1 |= (tmp[tmpIdx + 6] & MASK16_3) << 5; - l1 |= (tmp[tmpIdx + 7] & MASK16_3) << 2; - l1 |= (tmp[tmpIdx + 8] >>> 1) & MASK16_2; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 8] & MASK16_1) << 12; - l2 |= (tmp[tmpIdx + 9] & MASK16_3) << 9; - l2 |= (tmp[tmpIdx + 10] & MASK16_3) << 6; - l2 |= (tmp[tmpIdx + 11] & MASK16_3) << 3; - l2 |= (tmp[tmpIdx + 12] & MASK16_3) << 0; - longs[longsIdx + 2] = l2; - } - } - - private static void decode14(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 28); - shiftLongs(tmp, 28, longs, 0, 2, MASK16_14); - shiftLongs(tmp, 28, tmp, 0, 0, MASK16_2); - for (int iter = 0, tmpIdx = 0, longsIdx = 28; iter < 4; ++iter, tmpIdx += 7, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 12; - l0 |= tmp[tmpIdx + 1] << 10; - l0 |= tmp[tmpIdx + 2] << 8; - l0 |= tmp[tmpIdx + 3] << 6; - l0 |= tmp[tmpIdx + 4] << 4; - l0 |= tmp[tmpIdx + 5] << 2; - l0 |= tmp[tmpIdx + 6] << 0; - longs[longsIdx + 0] = l0; - } - } - - private static void decode15(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 30); - shiftLongs(tmp, 30, longs, 0, 1, MASK16_15); - shiftLongs(tmp, 30, tmp, 0, 0, MASK16_1); - for (int iter = 0, tmpIdx = 0, longsIdx = 30; iter < 2; ++iter, tmpIdx += 15, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 14; - l0 |= tmp[tmpIdx + 1] << 13; - l0 |= tmp[tmpIdx + 2] << 12; - l0 |= tmp[tmpIdx + 3] << 11; - l0 |= tmp[tmpIdx + 4] << 10; - l0 |= tmp[tmpIdx + 5] << 9; - l0 |= tmp[tmpIdx + 6] << 8; - l0 |= tmp[tmpIdx + 7] << 7; - l0 |= tmp[tmpIdx + 8] << 6; - l0 |= tmp[tmpIdx + 9] << 5; - l0 |= tmp[tmpIdx + 10] << 4; - l0 |= tmp[tmpIdx + 11] << 3; - l0 |= tmp[tmpIdx + 12] << 2; - l0 |= tmp[tmpIdx + 13] << 1; - l0 |= tmp[tmpIdx + 14] << 0; - longs[longsIdx + 0] = l0; - } - } - - private static void decode16(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(longs, 0, 32); - } - - private static void decode17(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 34); - shiftLongs(tmp, 34, longs, 0, 15, MASK32_17); - for (int iter = 0, tmpIdx = 0, longsIdx = 34; iter < 2; ++iter, tmpIdx += 17, longsIdx += 15) { - long l0 = (tmp[tmpIdx + 0] & MASK32_15) << 2; - l0 |= (tmp[tmpIdx + 1] >>> 13) & MASK32_2; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK32_13) << 4; - l1 |= (tmp[tmpIdx + 2] >>> 11) & MASK32_4; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 2] & MASK32_11) << 6; - l2 |= (tmp[tmpIdx + 3] >>> 9) & MASK32_6; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 3] & MASK32_9) << 8; - l3 |= (tmp[tmpIdx + 4] >>> 7) & MASK32_8; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 4] & MASK32_7) << 10; - l4 |= (tmp[tmpIdx + 5] >>> 5) & MASK32_10; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 5] & MASK32_5) << 12; - l5 |= (tmp[tmpIdx + 6] >>> 3) & MASK32_12; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 6] & MASK32_3) << 14; - l6 |= (tmp[tmpIdx + 7] >>> 1) & MASK32_14; - longs[longsIdx + 6] = l6; - long l7 = (tmp[tmpIdx + 7] & MASK32_1) << 16; - l7 |= (tmp[tmpIdx + 8] & MASK32_15) << 1; - l7 |= (tmp[tmpIdx + 9] >>> 14) & MASK32_1; - longs[longsIdx + 7] = l7; - long l8 = (tmp[tmpIdx + 9] & MASK32_14) << 3; - l8 |= (tmp[tmpIdx + 10] >>> 12) & MASK32_3; - longs[longsIdx + 8] = l8; - long l9 = (tmp[tmpIdx + 10] & MASK32_12) << 5; - l9 |= (tmp[tmpIdx + 11] >>> 10) & MASK32_5; - longs[longsIdx + 9] = l9; - long l10 = (tmp[tmpIdx + 11] & MASK32_10) << 7; - l10 |= (tmp[tmpIdx + 12] >>> 8) & MASK32_7; - longs[longsIdx + 10] = l10; - long l11 = (tmp[tmpIdx + 12] & MASK32_8) << 9; - l11 |= (tmp[tmpIdx + 13] >>> 6) & MASK32_9; - longs[longsIdx + 11] = l11; - long l12 = (tmp[tmpIdx + 13] & MASK32_6) << 11; - l12 |= (tmp[tmpIdx + 14] >>> 4) & MASK32_11; - longs[longsIdx + 12] = l12; - long l13 = (tmp[tmpIdx + 14] & MASK32_4) << 13; - l13 |= (tmp[tmpIdx + 15] >>> 2) & MASK32_13; - longs[longsIdx + 13] = l13; - long l14 = (tmp[tmpIdx + 15] & MASK32_2) << 15; - l14 |= (tmp[tmpIdx + 16] & MASK32_15) << 0; - longs[longsIdx + 14] = l14; - } - } - - private static void decode18(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 36); - shiftLongs(tmp, 36, longs, 0, 14, MASK32_18); - for (int iter = 0, tmpIdx = 0, longsIdx = 36; iter < 4; ++iter, tmpIdx += 9, longsIdx += 7) { - long l0 = (tmp[tmpIdx + 0] & MASK32_14) << 4; - l0 |= (tmp[tmpIdx + 1] >>> 10) & MASK32_4; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK32_10) << 8; - l1 |= (tmp[tmpIdx + 2] >>> 6) & MASK32_8; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 2] & MASK32_6) << 12; - l2 |= (tmp[tmpIdx + 3] >>> 2) & MASK32_12; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 3] & MASK32_2) << 16; - l3 |= (tmp[tmpIdx + 4] & MASK32_14) << 2; - l3 |= (tmp[tmpIdx + 5] >>> 12) & MASK32_2; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 5] & MASK32_12) << 6; - l4 |= (tmp[tmpIdx + 6] >>> 8) & MASK32_6; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 6] & MASK32_8) << 10; - l5 |= (tmp[tmpIdx + 7] >>> 4) & MASK32_10; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 7] & MASK32_4) << 14; - l6 |= (tmp[tmpIdx + 8] & MASK32_14) << 0; - longs[longsIdx + 6] = l6; - } - } - - private static void decode19(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 38); - shiftLongs(tmp, 38, longs, 0, 13, MASK32_19); - for (int iter = 0, tmpIdx = 0, longsIdx = 38; iter < 2; ++iter, tmpIdx += 19, longsIdx += 13) { - long l0 = (tmp[tmpIdx + 0] & MASK32_13) << 6; - l0 |= (tmp[tmpIdx + 1] >>> 7) & MASK32_6; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK32_7) << 12; - l1 |= (tmp[tmpIdx + 2] >>> 1) & MASK32_12; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 2] & MASK32_1) << 18; - l2 |= (tmp[tmpIdx + 3] & MASK32_13) << 5; - l2 |= (tmp[tmpIdx + 4] >>> 8) & MASK32_5; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 4] & MASK32_8) << 11; - l3 |= (tmp[tmpIdx + 5] >>> 2) & MASK32_11; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 5] & MASK32_2) << 17; - l4 |= (tmp[tmpIdx + 6] & MASK32_13) << 4; - l4 |= (tmp[tmpIdx + 7] >>> 9) & MASK32_4; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 7] & MASK32_9) << 10; - l5 |= (tmp[tmpIdx + 8] >>> 3) & MASK32_10; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 8] & MASK32_3) << 16; - l6 |= (tmp[tmpIdx + 9] & MASK32_13) << 3; - l6 |= (tmp[tmpIdx + 10] >>> 10) & MASK32_3; - longs[longsIdx + 6] = l6; - long l7 = (tmp[tmpIdx + 10] & MASK32_10) << 9; - l7 |= (tmp[tmpIdx + 11] >>> 4) & MASK32_9; - longs[longsIdx + 7] = l7; - long l8 = (tmp[tmpIdx + 11] & MASK32_4) << 15; - l8 |= (tmp[tmpIdx + 12] & MASK32_13) << 2; - l8 |= (tmp[tmpIdx + 13] >>> 11) & MASK32_2; - longs[longsIdx + 8] = l8; - long l9 = (tmp[tmpIdx + 13] & MASK32_11) << 8; - l9 |= (tmp[tmpIdx + 14] >>> 5) & MASK32_8; - longs[longsIdx + 9] = l9; - long l10 = (tmp[tmpIdx + 14] & MASK32_5) << 14; - l10 |= (tmp[tmpIdx + 15] & MASK32_13) << 1; - l10 |= (tmp[tmpIdx + 16] >>> 12) & MASK32_1; - longs[longsIdx + 10] = l10; - long l11 = (tmp[tmpIdx + 16] & MASK32_12) << 7; - l11 |= (tmp[tmpIdx + 17] >>> 6) & MASK32_7; - longs[longsIdx + 11] = l11; - long l12 = (tmp[tmpIdx + 17] & MASK32_6) << 13; - l12 |= (tmp[tmpIdx + 18] & MASK32_13) << 0; - longs[longsIdx + 12] = l12; - } - } - - private static void decode20(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 40); - shiftLongs(tmp, 40, longs, 0, 12, MASK32_20); - for (int iter = 0, tmpIdx = 0, longsIdx = 40; iter < 8; ++iter, tmpIdx += 5, longsIdx += 3) { - long l0 = (tmp[tmpIdx + 0] & MASK32_12) << 8; - l0 |= (tmp[tmpIdx + 1] >>> 4) & MASK32_8; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK32_4) << 16; - l1 |= (tmp[tmpIdx + 2] & MASK32_12) << 4; - l1 |= (tmp[tmpIdx + 3] >>> 8) & MASK32_4; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 3] & MASK32_8) << 12; - l2 |= (tmp[tmpIdx + 4] & MASK32_12) << 0; - longs[longsIdx + 2] = l2; - } - } - - private static void decode21(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 42); - shiftLongs(tmp, 42, longs, 0, 11, MASK32_21); - for (int iter = 0, tmpIdx = 0, longsIdx = 42; iter < 2; ++iter, tmpIdx += 21, longsIdx += 11) { - long l0 = (tmp[tmpIdx + 0] & MASK32_11) << 10; - l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK32_10; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK32_1) << 20; - l1 |= (tmp[tmpIdx + 2] & MASK32_11) << 9; - l1 |= (tmp[tmpIdx + 3] >>> 2) & MASK32_9; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 3] & MASK32_2) << 19; - l2 |= (tmp[tmpIdx + 4] & MASK32_11) << 8; - l2 |= (tmp[tmpIdx + 5] >>> 3) & MASK32_8; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 5] & MASK32_3) << 18; - l3 |= (tmp[tmpIdx + 6] & MASK32_11) << 7; - l3 |= (tmp[tmpIdx + 7] >>> 4) & MASK32_7; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 7] & MASK32_4) << 17; - l4 |= (tmp[tmpIdx + 8] & MASK32_11) << 6; - l4 |= (tmp[tmpIdx + 9] >>> 5) & MASK32_6; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 9] & MASK32_5) << 16; - l5 |= (tmp[tmpIdx + 10] & MASK32_11) << 5; - l5 |= (tmp[tmpIdx + 11] >>> 6) & MASK32_5; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 11] & MASK32_6) << 15; - l6 |= (tmp[tmpIdx + 12] & MASK32_11) << 4; - l6 |= (tmp[tmpIdx + 13] >>> 7) & MASK32_4; - longs[longsIdx + 6] = l6; - long l7 = (tmp[tmpIdx + 13] & MASK32_7) << 14; - l7 |= (tmp[tmpIdx + 14] & MASK32_11) << 3; - l7 |= (tmp[tmpIdx + 15] >>> 8) & MASK32_3; - longs[longsIdx + 7] = l7; - long l8 = (tmp[tmpIdx + 15] & MASK32_8) << 13; - l8 |= (tmp[tmpIdx + 16] & MASK32_11) << 2; - l8 |= (tmp[tmpIdx + 17] >>> 9) & MASK32_2; - longs[longsIdx + 8] = l8; - long l9 = (tmp[tmpIdx + 17] & MASK32_9) << 12; - l9 |= (tmp[tmpIdx + 18] & MASK32_11) << 1; - l9 |= (tmp[tmpIdx + 19] >>> 10) & MASK32_1; - longs[longsIdx + 9] = l9; - long l10 = (tmp[tmpIdx + 19] & MASK32_10) << 11; - l10 |= (tmp[tmpIdx + 20] & MASK32_11) << 0; - longs[longsIdx + 10] = l10; - } - } - - private static void decode22(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 44); - shiftLongs(tmp, 44, longs, 0, 10, MASK32_22); - for (int iter = 0, tmpIdx = 0, longsIdx = 44; iter < 4; ++iter, tmpIdx += 11, longsIdx += 5) { - long l0 = (tmp[tmpIdx + 0] & MASK32_10) << 12; - l0 |= (tmp[tmpIdx + 1] & MASK32_10) << 2; - l0 |= (tmp[tmpIdx + 2] >>> 8) & MASK32_2; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 2] & MASK32_8) << 14; - l1 |= (tmp[tmpIdx + 3] & MASK32_10) << 4; - l1 |= (tmp[tmpIdx + 4] >>> 6) & MASK32_4; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 4] & MASK32_6) << 16; - l2 |= (tmp[tmpIdx + 5] & MASK32_10) << 6; - l2 |= (tmp[tmpIdx + 6] >>> 4) & MASK32_6; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 6] & MASK32_4) << 18; - l3 |= (tmp[tmpIdx + 7] & MASK32_10) << 8; - l3 |= (tmp[tmpIdx + 8] >>> 2) & MASK32_8; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 8] & MASK32_2) << 20; - l4 |= (tmp[tmpIdx + 9] & MASK32_10) << 10; - l4 |= (tmp[tmpIdx + 10] & MASK32_10) << 0; - longs[longsIdx + 4] = l4; - } - } - - private static void decode23(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 46); - shiftLongs(tmp, 46, longs, 0, 9, MASK32_23); - for (int iter = 0, tmpIdx = 0, longsIdx = 46; iter < 2; ++iter, tmpIdx += 23, longsIdx += 9) { - long l0 = (tmp[tmpIdx + 0] & MASK32_9) << 14; - l0 |= (tmp[tmpIdx + 1] & MASK32_9) << 5; - l0 |= (tmp[tmpIdx + 2] >>> 4) & MASK32_5; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 2] & MASK32_4) << 19; - l1 |= (tmp[tmpIdx + 3] & MASK32_9) << 10; - l1 |= (tmp[tmpIdx + 4] & MASK32_9) << 1; - l1 |= (tmp[tmpIdx + 5] >>> 8) & MASK32_1; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 5] & MASK32_8) << 15; - l2 |= (tmp[tmpIdx + 6] & MASK32_9) << 6; - l2 |= (tmp[tmpIdx + 7] >>> 3) & MASK32_6; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 7] & MASK32_3) << 20; - l3 |= (tmp[tmpIdx + 8] & MASK32_9) << 11; - l3 |= (tmp[tmpIdx + 9] & MASK32_9) << 2; - l3 |= (tmp[tmpIdx + 10] >>> 7) & MASK32_2; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 10] & MASK32_7) << 16; - l4 |= (tmp[tmpIdx + 11] & MASK32_9) << 7; - l4 |= (tmp[tmpIdx + 12] >>> 2) & MASK32_7; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 12] & MASK32_2) << 21; - l5 |= (tmp[tmpIdx + 13] & MASK32_9) << 12; - l5 |= (tmp[tmpIdx + 14] & MASK32_9) << 3; - l5 |= (tmp[tmpIdx + 15] >>> 6) & MASK32_3; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 15] & MASK32_6) << 17; - l6 |= (tmp[tmpIdx + 16] & MASK32_9) << 8; - l6 |= (tmp[tmpIdx + 17] >>> 1) & MASK32_8; - longs[longsIdx + 6] = l6; - long l7 = (tmp[tmpIdx + 17] & MASK32_1) << 22; - l7 |= (tmp[tmpIdx + 18] & MASK32_9) << 13; - l7 |= (tmp[tmpIdx + 19] & MASK32_9) << 4; - l7 |= (tmp[tmpIdx + 20] >>> 5) & MASK32_4; - longs[longsIdx + 7] = l7; - long l8 = (tmp[tmpIdx + 20] & MASK32_5) << 18; - l8 |= (tmp[tmpIdx + 21] & MASK32_9) << 9; - l8 |= (tmp[tmpIdx + 22] & MASK32_9) << 0; - longs[longsIdx + 8] = l8; - } - } - - private static void decode24(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 48); - shiftLongs(tmp, 48, longs, 0, 8, MASK32_24); - shiftLongs(tmp, 48, tmp, 0, 0, MASK32_8); - for (int iter = 0, tmpIdx = 0, longsIdx = 48; iter < 16; ++iter, tmpIdx += 3, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 16; - l0 |= tmp[tmpIdx + 1] << 8; - l0 |= tmp[tmpIdx + 2] << 0; - longs[longsIdx + 0] = l0; - } - } -} diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java index 26a600c73eeb5..46ab0b0d00671 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java @@ -23,6 +23,7 @@ import org.apache.lucene.store.DataOutput; import org.apache.lucene.util.LongHeap; import org.apache.lucene.util.packed.PackedInts; +import org.elasticsearch.index.codec.ForUtil; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java index 873dcc9b87207..303c66309a23f 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java @@ -11,6 +11,7 @@ import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; import org.elasticsearch.common.util.ByteUtils; +import org.elasticsearch.index.codec.ForUtil; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java index e9bf5838be8b3..cf453bd1571be 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java @@ -208,7 +208,7 @@ protected final LeafFactory leafFactory(SearchExecutionContext context) { public void validateMatchedRoutingPath(final String routingPath) { throw new IllegalArgumentException( "All fields that match routing_path " - + "must be keywords with [time_series_dimension: true] " + + "must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] " + "and without the [script] parameter. [" + name() diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index cc01a487ad7b8..968c48abc54d8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -48,6 +48,7 @@ import java.time.ZoneId; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -317,8 +318,9 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { if (isIndexed()) { return super.termsQuery(values, context); } else { + Set dedupe = new HashSet<>(values); BooleanQuery.Builder builder = new BooleanQuery.Builder(); - for (Object value : values) { + for (Object value : dedupe) { builder.add(termQuery(value, context), BooleanClause.Occur.SHOULD); } return new ConstantScoreQuery(builder.build()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index f7dc09cdbb370..9b3496acfd9f3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -125,7 +125,7 @@ public void validate(IndexSettings settings, boolean checkLimits) { // object type is not allowed in the routing paths if (path.equals(objectName)) { throw new IllegalArgumentException( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] " + "and without the [script] parameter. [" + objectName diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java index 8505c561bfb1a..799042b4f3a87 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java @@ -333,7 +333,11 @@ public boolean newDynamicStringField(DocumentParserContext context, String name) ); } else { return createDynamicField( - new TextFieldMapper.Builder(name, context.indexAnalyzers()).addMultiField( + new TextFieldMapper.Builder( + name, + context.indexAnalyzers(), + context.indexSettings().getMode().isSyntheticSourceEnabled() + ).addMultiField( new KeywordFieldMapper.Builder("keyword", context.indexSettings().getIndexVersionCreated()).ignoreAbove(256) ), context diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index f9354025cab49..63d23462e4847 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -450,13 +450,28 @@ public static class Builder { private final Map> mapperBuilders = new HashMap<>(); + private boolean hasSyntheticSourceCompatibleKeywordField; + public Builder add(FieldMapper.Builder builder) { mapperBuilders.put(builder.name(), builder::build); + + if (builder instanceof KeywordFieldMapper.Builder kwd) { + if (kwd.hasNormalizer() == false && (kwd.hasDocValues() || kwd.isStored())) { + hasSyntheticSourceCompatibleKeywordField = true; + } + } + return this; } private void add(FieldMapper mapper) { mapperBuilders.put(mapper.simpleName(), context -> mapper); + + if (mapper instanceof KeywordFieldMapper kwd) { + if (kwd.hasNormalizer() == false && (kwd.fieldType().hasDocValues() || kwd.fieldType().isStored())) { + hasSyntheticSourceCompatibleKeywordField = true; + } + } } private void update(FieldMapper toMerge, MapperMergeContext context) { @@ -474,6 +489,10 @@ public boolean hasMultiFields() { return mapperBuilders.isEmpty() == false; } + public boolean hasSyntheticSourceCompatibleKeywordField() { + return hasSyntheticSourceCompatibleKeywordField; + } + public MultiFields build(Mapper.Builder mainFieldBuilder, MapperBuilderContext context) { if (mapperBuilders.isEmpty()) { return empty(); @@ -1134,6 +1153,10 @@ public static Parameter storeParam(Function initi return Parameter.boolParam("store", false, initializer, defaultValue); } + public static Parameter storeParam(Function initializer, Supplier defaultValue) { + return Parameter.boolParam("store", false, initializer, defaultValue); + } + public static Parameter docValuesParam(Function initializer, boolean defaultValue) { return Parameter.boolParam("doc_values", false, initializer, defaultValue); } @@ -1419,6 +1442,26 @@ private static boolean isDeprecatedParameter(String propName, IndexVersion index } } + /** + * Creates mappers for fields that can act as time-series dimensions. + */ + public abstract static class DimensionBuilder extends Builder { + + private boolean inheritDimensionParameterFromParentObject = false; + + public DimensionBuilder(String name) { + super(name); + } + + void setInheritDimensionParameterFromParentObject() { + this.inheritDimensionParameterFromParentObject = true; + } + + protected boolean inheritDimensionParameterFromParentObject(MapperBuilderContext context) { + return inheritDimensionParameterFromParentObject || context.parentObjectContainsDimensions(); + } + } + public static BiConsumer notInMultiFields(String type) { return (n, c) -> { if (c.isWithinMultiField()) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IdLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/IdLoader.java index ef15af93f6e34..b8e52667894bb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IdLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IdLoader.java @@ -93,6 +93,9 @@ public IdLoader.Leaf leaf(LeafStoredFieldLoader loader, LeafReader reader, int[] // Each document always has exactly one tsid and one timestamp: SortedDocValues tsIdDocValues = DocValues.getSorted(reader, TimeSeriesIdFieldMapper.NAME); SortedNumericDocValues timestampDocValues = DocValues.getSortedNumeric(reader, DataStream.TIMESTAMP_FIELD_NAME); + SortedDocValues routingHashDocValues = builders == null + ? DocValues.getSorted(reader, TimeSeriesRoutingHashFieldMapper.NAME) + : null; for (int i = 0; i < docIdsInLeaf.length; i++) { int docId = docIdsInLeaf[i]; @@ -107,11 +110,12 @@ public IdLoader.Leaf leaf(LeafStoredFieldLoader loader, LeafReader reader, int[] var routingBuilder = builders[i]; ids[i] = TsidExtractingIdFieldMapper.createId(false, routingBuilder, tsid, timestamp, new byte[16]); } else { - SortedDocValues routingHashDocValues = DocValues.getSorted(reader, TimeSeriesRoutingHashFieldMapper.NAME); found = routingHashDocValues.advanceExact(docId); assert found; BytesRef routingHashBytes = routingHashDocValues.lookupOrd(routingHashDocValues.ordValue()); - int routingHash = TimeSeriesRoutingHashFieldMapper.decode(Uid.decodeId(routingHashBytes.bytes)); + int routingHash = TimeSeriesRoutingHashFieldMapper.decode( + Uid.decodeId(routingHashBytes.bytes, routingHashBytes.offset, routingHashBytes.length) + ); ids[i] = TsidExtractingIdFieldMapper.createId(routingHash, tsid, timestamp); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 355b38d4dcb96..2e0fc68770045 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -69,7 +69,7 @@ private static IpFieldMapper toType(FieldMapper in) { return (IpFieldMapper) in; } - public static final class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.DimensionBuilder { private final Parameter indexed = Parameter.indexParam(m -> toType(m).indexed, true); private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -166,7 +166,7 @@ protected Parameter[] getParameters() { @Override public IpFieldMapper build(MapperBuilderContext context) { - if (context.parentObjectContainsDimensions()) { + if (inheritDimensionParameterFromParentObject(context)) { dimension.setValue(true); } return new IpFieldMapper( @@ -246,6 +246,16 @@ public boolean mayExistInIndex(SearchExecutionContext context) { return context.fieldExistsInIndex(name()); } + @Override + public boolean isDimension() { + return isDimension; + } + + @Override + public boolean hasScriptValues() { + return scriptValues != null; + } + private static InetAddress parse(Object value) { if (value instanceof InetAddress) { return (InetAddress) value; @@ -461,13 +471,6 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi } return terms.intersect(prefixAutomaton, searchBytes); } - - /** - * @return true if field has been marked as a dimension field - */ - public boolean isDimension() { - return isDimension; - } } private final boolean indexed; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 06e689784b087..bdf25307d3343 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -137,7 +137,7 @@ private static KeywordFieldMapper toType(FieldMapper in) { return (KeywordFieldMapper) in; } - public static final class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.DimensionBuilder { private final Parameter indexed = Parameter.indexParam(m -> toType(m).indexed, true); private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -227,6 +227,10 @@ Builder normalizer(String normalizerName) { return this; } + public boolean hasNormalizer() { + return this.normalizer.get() != null; + } + Builder nullValue(String nullValue) { this.nullValue.setValue(nullValue); return this; @@ -237,6 +241,10 @@ public Builder docValues(boolean hasDocValues) { return this; } + public boolean hasDocValues() { + return this.hasDocValues.get(); + } + public Builder dimension(boolean dimension) { this.dimension.setValue(dimension); return this; @@ -247,6 +255,15 @@ public Builder indexed(boolean indexed) { return this; } + public Builder stored(boolean stored) { + this.stored.setValue(stored); + return this; + } + + public boolean isStored() { + return this.stored.get(); + } + private FieldValues scriptValues() { if (script.get() == null) { return null; @@ -304,7 +321,7 @@ private KeywordFieldType buildFieldType(MapperBuilderContext context, FieldType } else if (splitQueriesOnWhitespace.getValue()) { searchAnalyzer = Lucene.WHITESPACE_ANALYZER; } - if (context.parentObjectContainsDimensions()) { + if (inheritDimensionParameterFromParentObject(context)) { dimension(true); } return new KeywordFieldType( @@ -811,35 +828,14 @@ public int ignoreAbove() { return ignoreAbove; } - /** - * @return true if field has been marked as a dimension field - */ @Override public boolean isDimension() { return isDimension; } @Override - public void validateMatchedRoutingPath(final String routingPath) { - if (false == isDimension) { - throw new IllegalArgumentException( - "All fields that match routing_path " - + "must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + name() - + "] was not a dimension." - ); - } - if (scriptValues != null) { - throw new IllegalArgumentException( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + name() - + "] has a [script] parameter." - ); - } + public boolean hasScriptValues() { + return scriptValues != null; } public boolean hasNormalizer() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 265374a687312..a554e6e44a8e8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -49,6 +49,7 @@ import java.time.ZoneId; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -195,6 +196,13 @@ public boolean isDimension() { return false; } + /** + * @return true if field has script values. + */ + public boolean hasScriptValues() { + return false; + } + /** * @return a list of dimension fields. Expected to be used by fields that have * nested fields or that, in some way, identify a collection of fields by means @@ -234,8 +242,9 @@ public Query termQueryCaseInsensitive(Object value, @Nullable SearchExecutionCon * {@link ConstantScoreQuery} around a {@link BooleanQuery} whose {@link Occur#SHOULD} clauses * are generated with {@link #termQuery}. */ public Query termsQuery(Collection values, @Nullable SearchExecutionContext context) { + Set dedupe = new HashSet<>(values); BooleanQuery.Builder builder = new BooleanQuery.Builder(); - for (Object value : values) { + for (Object value : dedupe) { builder.add(termQuery(value, context), Occur.SHOULD); } return new ConstantScoreQuery(builder.build()); @@ -621,16 +630,26 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi * Validate that this field can be the target of {@link IndexMetadata#INDEX_ROUTING_PATH}. */ public void validateMatchedRoutingPath(String routingPath) { - throw new IllegalArgumentException( - "All fields that match routing_path " - + "must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + name() - + "] was [" - + typeName() - + "]." - ); + if (hasScriptValues()) { + throw new IllegalArgumentException( + "All fields that match routing_path must be configured with [time_series_dimension: true] " + + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + + "without the [script] parameter. [" + + name() + + "] has a [script] parameter." + ); + } + + if (isDimension() == false) { + throw new IllegalArgumentException( + "All fields that match routing_path " + + "must be configured with [time_series_dimension: true] " + + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + + "without the [script] parameter. [" + + name() + + "] was not a dimension." + ); + } } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java index dcf24c9a61bbd..aa2a7ce2f3996 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java @@ -10,13 +10,13 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; import java.util.function.Function; -import java.util.function.Predicate; /** * A registry for all field mappers. @@ -29,13 +29,13 @@ public final class MapperRegistry { private final Map metadataMapperParsers7x; private final Map metadataMapperParsers6x; private final Map metadataMapperParsers5x; - private final Function> fieldFilter; + private final Function fieldFilter; public MapperRegistry( Map mapperParsers, Map runtimeFieldParsers, Map metadataMapperParsers, - Function> fieldFilter + Function fieldFilter ) { this.mapperParsers = Collections.unmodifiableMap(new LinkedHashMap<>(mapperParsers)); this.runtimeFieldParsers = runtimeFieldParsers; @@ -92,7 +92,7 @@ public Map getMetadataMapperParsers(Inde * {@link MapperPlugin#getFieldFilter()}, only fields that match all the registered filters will be returned by get mappings, * get index, get field mappings and field capabilities API. */ - public Function> getFieldFilter() { + public Function getFieldFilter() { return fieldFilter; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 2245e527c2aa2..1f7a3bf2106ae 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -70,7 +70,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -89,7 +88,7 @@ private static NumberFieldMapper toType(FieldMapper in) { private static final IndexVersion MINIMUM_COMPATIBILITY_VERSION = IndexVersion.fromId(5000099); - public static final class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.DimensionBuilder { private final Parameter indexed; private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -183,11 +182,6 @@ public Builder( } }); this.dimension = TimeSeriesParams.dimensionParam(m -> toType(m).dimension).addValidator(v -> { - if (v && EnumSet.of(NumberType.INTEGER, NumberType.LONG, NumberType.BYTE, NumberType.SHORT).contains(type) == false) { - throw new IllegalArgumentException( - "Parameter [" + TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + "] cannot be set to numeric type [" + type.name + "]" - ); - } if (v && (indexed.getValue() == false || hasDocValues.getValue() == false)) { throw new IllegalArgumentException( "Field [" @@ -267,7 +261,7 @@ protected Parameter[] getParameters() { @Override public NumberFieldMapper build(MapperBuilderContext context) { - if (context.parentObjectContainsDimensions()) { + if (inheritDimensionParameterFromParentObject(context)) { dimension.setValue(true); } @@ -281,6 +275,7 @@ public enum NumberType { @Override public Float parse(Object value, boolean coerce) { final float result = parseToFloat(value); + validateFiniteValue(result); // Reduce the precision to what we actually index return HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(result)); } @@ -292,9 +287,9 @@ public double reduceToStoredPrecision(double value) { /** * Parse a query parameter or {@code _source} value to a float, - * keeping float precision. Used by queries which need more - * precise control over their rounding behavior that - * {@link #parse(Object, boolean)} provides. + * keeping float precision. Used by queries which do need to validate + * against infinite values, but need more precise control over their + * rounding behavior that {@link #parse(Object, boolean)} provides. */ private static float parseToFloat(Object value) { final float result; @@ -307,7 +302,6 @@ private static float parseToFloat(Object value) { } result = Float.parseFloat(value.toString()); } - validateParsed(result); return result; } @@ -319,13 +313,14 @@ public Number parsePoint(byte[] value) { @Override public Float parse(XContentParser parser, boolean coerce) throws IOException { float parsed = parser.floatValue(coerce); - validateParsed(parsed); + validateFiniteValue(parsed); return parsed; } @Override public Query termQuery(String field, Object value, boolean isIndexed) { float v = parseToFloat(value); + validateFiniteValue(v); if (isIndexed) { return HalfFloatPoint.newExactQuery(field, v); } else { @@ -338,7 +333,9 @@ public Query termsQuery(String field, Collection values) { float[] v = new float[values.size()]; int pos = 0; for (Object value : values) { - v[pos++] = parseToFloat(value); + float float_value = parseToFloat(value); + validateFiniteValue(float_value); + v[pos++] = float_value; } return HalfFloatPoint.newSetQuery(field, v); } @@ -426,7 +423,7 @@ public IndexFieldData.Builder getValueFetcherFieldDataBuilder( ); } - private static void validateParsed(float value) { + private static void validateFiniteValue(float value) { if (Float.isFinite(HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(value))) == false) { throw new IllegalArgumentException("[half_float] supports only finite values, but got [" + value + "]"); } @@ -455,6 +452,17 @@ BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSo FLOAT("float", NumericType.FLOAT) { @Override public Float parse(Object value, boolean coerce) { + final float result = parseToFloat(value); + validateFiniteValue(result); + return result; + } + + /** + * Parse a query parameter or {@code _source} value to a float, + * keeping float precision. Used by queries which do need validate + * against infinite values like {@link #parse(Object, boolean)} does. + */ + private static float parseToFloat(Object value) { final float result; if (value instanceof Number) { @@ -465,7 +473,6 @@ public Float parse(Object value, boolean coerce) { } result = Float.parseFloat(value.toString()); } - validateParsed(result); return result; } @@ -482,13 +489,13 @@ public Number parsePoint(byte[] value) { @Override public Float parse(XContentParser parser, boolean coerce) throws IOException { float parsed = parser.floatValue(coerce); - validateParsed(parsed); + validateFiniteValue(parsed); return parsed; } @Override public Query termQuery(String field, Object value, boolean isIndexed) { - float v = parse(value, false); + float v = parseToFloat(value); if (isIndexed) { return FloatPoint.newExactQuery(field, v); } else { @@ -520,16 +527,18 @@ public Query rangeQuery( float l = Float.NEGATIVE_INFINITY; float u = Float.POSITIVE_INFINITY; if (lowerTerm != null) { - l = parse(lowerTerm, false); - if (includeLower == false) { - l = FloatPoint.nextUp(l); + l = parseToFloat(lowerTerm); + if (includeLower) { + l = FloatPoint.nextDown(l); } + l = FloatPoint.nextUp(l); } if (upperTerm != null) { - u = parse(upperTerm, false); - if (includeUpper == false) { - u = FloatPoint.nextDown(u); + u = parseToFloat(upperTerm); + if (includeUpper) { + u = FloatPoint.nextUp(u); } + u = FloatPoint.nextDown(u); } Query query; if (isIndexed) { @@ -588,7 +597,7 @@ public IndexFieldData.Builder getValueFetcherFieldDataBuilder( ); } - private static void validateParsed(float value) { + private static void validateFiniteValue(float value) { if (Float.isFinite(value) == false) { throw new IllegalArgumentException("[float] supports only finite values, but got [" + value + "]"); } @@ -1745,13 +1754,16 @@ public CollapseType collapseType() { return CollapseType.NUMERIC; } - /** - * @return true if field has been marked as a dimension field - */ + @Override public boolean isDimension() { return isDimension; } + @Override + public boolean hasScriptValues() { + return scriptValues != null; + } + /** * If field is a time series metric field, returns its metric type * @return the metric type or null diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index 05ae7e59f69c3..16b4d0b49917f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -41,8 +41,8 @@ public Builder(String name) { @Override public PassThroughObjectMapper.Builder add(Mapper.Builder builder) { - if (timeSeriesDimensionSubFields.value() && builder instanceof KeywordFieldMapper.Builder keywordBuilder) { - keywordBuilder.dimension(true); + if (timeSeriesDimensionSubFields.value() && builder instanceof FieldMapper.DimensionBuilder dimensionBuilder) { + dimensionBuilder.setInheritDimensionParameterFromParentObject(); } super.add(builder); return this; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 82ff9ef818579..90d9c879c57e1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -128,19 +128,22 @@ public RootObjectMapper build(MapperBuilderContext context) { } Map getAliasMappers(Map mappers, MapperBuilderContext context) { - Map aliasMappers = new HashMap<>(); + Map newMappers = new HashMap<>(); Map objectIntermediates = new HashMap<>(1); - getAliasMappers(mappers, aliasMappers, objectIntermediates, context, 0); + Map objectIntermediatesFullName = new HashMap<>(1); + getAliasMappers(mappers, mappers, newMappers, objectIntermediates, objectIntermediatesFullName, context, 0); for (var entry : objectIntermediates.entrySet()) { - aliasMappers.put(entry.getKey(), entry.getValue().build(context)); + newMappers.put(entry.getKey(), entry.getValue().build(context)); } - return aliasMappers; + return newMappers; } void getAliasMappers( Map mappers, + Map topLevelMappers, Map aliasMappers, Map objectIntermediates, + Map objectIntermediatesFullName, MapperBuilderContext context, int level ) { @@ -179,32 +182,76 @@ void getAliasMappers( ).build(context); aliasMappers.put(aliasMapper.simpleName(), aliasMapper); } else { + conflict = topLevelMappers.get(fieldNameParts[0]); + if (conflict != null) { + if (isConflictingObject(conflict, fieldNameParts)) { + throw new IllegalArgumentException( + "Conflicting objects created during alias generation for pass-through field: [" + + conflict.name() + + "]" + ); + } + } + // Nest the alias within object(s). String realFieldName = fieldNameParts[fieldNameParts.length - 1]; Mapper.Builder fieldBuilder = new FieldAliasMapper.Builder(realFieldName).path( fieldMapper.mappedFieldType.name() ); + ObjectMapper.Builder intermediate = null; for (int i = fieldNameParts.length - 2; i >= 0; --i) { String intermediateObjectName = fieldNameParts[i]; - ObjectMapper.Builder intermediate = objectIntermediates.computeIfAbsent( - intermediateObjectName, + intermediate = objectIntermediatesFullName.computeIfAbsent( + concatStrings(fieldNameParts, i), s -> new ObjectMapper.Builder(intermediateObjectName, ObjectMapper.Defaults.SUBOBJECTS) ); intermediate.add(fieldBuilder); fieldBuilder = intermediate; } + objectIntermediates.putIfAbsent(fieldNameParts[0], intermediate); } } } } } else if (mapper instanceof ObjectMapper objectMapper) { // Call recursively to check child fields. The level guards against long recursive call sequences. - getAliasMappers(objectMapper.mappers, aliasMappers, objectIntermediates, context, level + 1); + getAliasMappers( + objectMapper.mappers, + topLevelMappers, + aliasMappers, + objectIntermediates, + objectIntermediatesFullName, + context, + level + 1 + ); } } } } + private static String concatStrings(String[] parts, int last) { + StringBuilder builder = new StringBuilder(); + for (int i = 0; i <= last; i++) { + builder.append('.'); + builder.append(parts[i]); + } + return builder.toString(); + } + + private static boolean isConflictingObject(Mapper mapper, String[] parts) { + for (int i = 0; i < parts.length - 1; i++) { + if (mapper == null) { + return true; + } + if (mapper instanceof ObjectMapper objectMapper) { + mapper = objectMapper.getMapper(parts[i + 1]); + } else { + return true; + } + } + return mapper == null; + } + private final Explicit dynamicDateTimeFormatters; private final Explicit dateDetection; private final Explicit numericDetection; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 4f3c4814517e5..15770785e11f9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -144,7 +144,8 @@ protected Parameter[] getParameters() { } private boolean isDefault() { - if (mode.get() != null) { + Mode m = mode.get(); + if (m != null && (indexMode == IndexMode.TIME_SERIES && m == Mode.SYNTHETIC) == false) { return false; } if (enabled.get().value() == false) { @@ -169,8 +170,8 @@ public SourceFieldMapper build() { SourceFieldMapper sourceFieldMapper = new SourceFieldMapper( mode.get(), enabled.get(), - includes.getValue().toArray(String[]::new), - excludes.getValue().toArray(String[]::new), + includes.getValue().toArray(Strings.EMPTY_ARRAY), + excludes.getValue().toArray(Strings.EMPTY_ARRAY), indexMode ); if (indexMode != null) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index faa840dacc732..ef512e2bbd46b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -236,9 +236,11 @@ private static FielddataFrequencyFilter parseFrequencyFilter(String name, Mappin public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; + private final Parameter store; + + private final boolean isSyntheticSourceEnabledViaIndexMode; private final Parameter index = Parameter.indexParam(m -> ((TextFieldMapper) m).index, true); - private final Parameter store = Parameter.storeParam(m -> ((TextFieldMapper) m).store, false); final Parameter similarity = TextParams.similarity(m -> ((TextFieldMapper) m).similarity); @@ -283,12 +285,28 @@ public static class Builder extends FieldMapper.Builder { final TextParams.Analyzers analyzers; - public Builder(String name, IndexAnalyzers indexAnalyzers) { - this(name, IndexVersion.current(), indexAnalyzers); + public Builder(String name, IndexAnalyzers indexAnalyzers, boolean isSyntheticSourceEnabledViaIndexMode) { + this(name, IndexVersion.current(), indexAnalyzers, isSyntheticSourceEnabledViaIndexMode); } - public Builder(String name, IndexVersion indexCreatedVersion, IndexAnalyzers indexAnalyzers) { + public Builder( + String name, + IndexVersion indexCreatedVersion, + IndexAnalyzers indexAnalyzers, + boolean isSyntheticSourceEnabledViaIndexMode + ) { super(name); + + // If synthetic source is used we need to either store this field + // to recreate the source or use keyword multi-fields for that. + // So if there are no suitable multi-fields we will default to + // storing the field without requiring users to explicitly set 'store'. + // + // If 'store' parameter was explicitly provided we'll reject the request. + this.store = Parameter.storeParam( + m -> ((TextFieldMapper) m).store, + () -> isSyntheticSourceEnabledViaIndexMode && multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField() == false + ); this.indexCreatedVersion = indexCreatedVersion; this.analyzers = new TextParams.Analyzers( indexAnalyzers, @@ -296,6 +314,7 @@ public Builder(String name, IndexVersion indexCreatedVersion, IndexAnalyzers ind m -> (((TextFieldMapper) m).positionIncrementGap), indexCreatedVersion ); + this.isSyntheticSourceEnabledViaIndexMode = isSyntheticSourceEnabledViaIndexMode; } public Builder index(boolean index) { @@ -387,13 +406,9 @@ private static KeywordFieldMapper.KeywordFieldType syntheticSourceDelegate(Field if (fieldType.stored()) { return null; } - for (Mapper sub : multiFields) { - if (sub.typeName().equals(KeywordFieldMapper.CONTENT_TYPE)) { - KeywordFieldMapper kwd = (KeywordFieldMapper) sub; - if (kwd.hasNormalizer() == false && (kwd.fieldType().hasDocValues() || kwd.fieldType().isStored())) { - return kwd.fieldType(); - } - } + var kwd = getKeywordFieldMapperForSyntheticSource(multiFields); + if (kwd != null) { + return kwd.fieldType(); } return null; } @@ -483,7 +498,7 @@ public TextFieldMapper build(MapperBuilderContext context) { private static final IndexVersion MINIMUM_COMPATIBILITY_VERSION = IndexVersion.fromId(5000099); public static final TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers()), + (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), c.getIndexSettings().getMode().isSyntheticSourceEnabled()), MINIMUM_COMPATIBILITY_VERSION ); @@ -1203,6 +1218,8 @@ public Query existsQuery(SearchExecutionContext context) { private final SubFieldInfo prefixFieldInfo; private final SubFieldInfo phraseFieldInfo; + private final boolean isSyntheticSourceEnabledViaIndexMode; + private TextFieldMapper( String simpleName, FieldType fieldType, @@ -1235,6 +1252,7 @@ private TextFieldMapper( this.indexPrefixes = builder.indexPrefixes.getValue(); this.freqFilter = builder.freqFilter.getValue(); this.fieldData = builder.fieldData.get(); + this.isSyntheticSourceEnabledViaIndexMode = builder.isSyntheticSourceEnabledViaIndexMode; } @Override @@ -1258,7 +1276,7 @@ public Map indexAnalyzers() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(simpleName(), indexCreatedVersion, indexAnalyzers).init(this); + return new Builder(simpleName(), indexCreatedVersion, indexAnalyzers, isSyntheticSourceEnabledViaIndexMode).init(this); } @Override @@ -1454,15 +1472,12 @@ protected void write(XContentBuilder b, Object value) throws IOException { } }; } - for (Mapper sub : this) { - if (sub.typeName().equals(KeywordFieldMapper.CONTENT_TYPE)) { - KeywordFieldMapper kwd = (KeywordFieldMapper) sub; - if (kwd.hasNormalizer() == false && (kwd.fieldType().hasDocValues() || kwd.fieldType().isStored())) { - return kwd.syntheticFieldLoader(simpleName()); - } - } + var kwd = getKeywordFieldMapperForSyntheticSource(this); + if (kwd != null) { + return kwd.syntheticFieldLoader(simpleName()); } + throw new IllegalArgumentException( String.format( Locale.ROOT, @@ -1473,4 +1488,17 @@ protected void write(XContentBuilder b, Object value) throws IOException { ) ); } + + private static KeywordFieldMapper getKeywordFieldMapperForSyntheticSource(Iterable multiFields) { + for (Mapper sub : multiFields) { + if (sub.typeName().equals(KeywordFieldMapper.CONTENT_TYPE)) { + KeywordFieldMapper kwd = (KeywordFieldMapper) sub; + if (kwd.hasNormalizer() == false && (kwd.fieldType().hasDocValues() || kwd.fieldType().isStored())) { + return kwd; + } + } + } + + return null; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java index 2d330e433d444..112b3ec96b39e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java @@ -413,6 +413,8 @@ public static Map decodeTsidAsMap(StreamInput in) { Object ul = DocValueFormat.UNSIGNED_LONG_SHIFTED.format(in.readLong()); result.put(name, ul); } + case (byte) 'd' -> // parse a double + result.put(name, in.readDouble()); default -> throw new IllegalArgumentException("Cannot parse [" + name + "]: Unknown type [" + type + "]"); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java index 090fe7839b3e9..b9629d7561982 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java @@ -10,6 +10,8 @@ import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexVersions; @@ -20,8 +22,11 @@ import org.elasticsearch.index.fielddata.plain.SortedOrdinalsIndexFieldData; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.script.field.DelegateDocValuesField; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import java.time.ZoneId; +import java.util.Arrays; import java.util.Base64; import java.util.Collections; @@ -43,6 +48,22 @@ public class TimeSeriesRoutingHashFieldMapper extends MetadataFieldMapper { static final class TimeSeriesRoutingHashFieldType extends MappedFieldType { private static final TimeSeriesRoutingHashFieldType INSTANCE = new TimeSeriesRoutingHashFieldType(); + private static final DocValueFormat DOC_VALUE_FORMAT = new DocValueFormat() { + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) {} + + @Override + public Object format(BytesRef value) { + return Uid.decodeId(value.bytes, value.offset, value.length); + } + + }; private TimeSeriesRoutingHashFieldType() { super(NAME, false, false, true, TextSearchInfo.NONE, Collections.emptyMap()); @@ -75,6 +96,11 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext public Query termQuery(Object value, SearchExecutionContext context) { throw new IllegalArgumentException("[" + NAME + "] is not searchable"); } + + @Override + public DocValueFormat docValueFormat(String format, ZoneId timeZone) { + return DOC_VALUE_FORMAT; + } } private TimeSeriesRoutingHashFieldMapper() { @@ -86,7 +112,13 @@ public void postParse(DocumentParserContext context) { if (context.indexSettings().getMode() == IndexMode.TIME_SERIES && context.indexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.TIME_SERIES_ROUTING_HASH_IN_ID)) { String routingHash = context.sourceToParse().routing(); - var field = new SortedDocValuesField(NAME, Uid.encodeId(routingHash != null ? routingHash : encode(0))); + if (routingHash == null) { + assert context.sourceToParse().id() != null; + routingHash = Base64.getUrlEncoder() + .withoutPadding() + .encodeToString(Arrays.copyOf(Base64.getUrlDecoder().decode(context.sourceToParse().id()), 4)); + } + var field = new SortedDocValuesField(NAME, Uid.encodeId(routingHash)); context.rootDoc().add(field); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index 5a8efb6c8ed59..8feaba73b1dd4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -231,6 +231,11 @@ public static final class KeyedFlattenedFieldType extends StringFieldType { private final String rootName; private final boolean isDimension; + @Override + public boolean isDimension() { + return isDimension; + } + KeyedFlattenedFieldType( String rootName, boolean indexed, @@ -280,24 +285,6 @@ public Query existsQuery(SearchExecutionContext context) { return new PrefixQuery(term); } - @Override - public void validateMatchedRoutingPath(final String routingPath) { - if (false == isDimension) { - throw new IllegalArgumentException( - "All fields that match routing_path " - + "must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + this.rootName - + "." - + this.key - + "] was [" - + typeName() - + "]." - ); - } - } - @Override public Query rangeQuery( Object lowerTerm, @@ -737,17 +724,8 @@ public List dimensions() { @Override public void validateMatchedRoutingPath(final String routingPath) { - if (false == isDimension && this.dimensions.contains(routingPath) == false) { - throw new IllegalArgumentException( - "All fields that match routing_path " - + "must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + name() - + "] was [" - + typeName() - + "]." - ); + if (this.dimensions.contains(routingPath) == false) { + super.validateMatchedRoutingPath(routingPath); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index f4a9e1727abd6..365880dbcc837 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -735,6 +735,10 @@ static Function errorByteElementsAppender(byte[] v } public abstract double computeDotProduct(VectorData vectorData); + + public static ElementType fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } } static final Map namesToElementType = Map.of( diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index fbfce6aab403f..6ab5d6d77d86d 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -196,7 +196,11 @@ MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMap if (fieldMapping != null || allowUnmappedFields) { return fieldMapping; } else if (mapUnmappedFieldAsString) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, getIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder( + name, + getIndexAnalyzers(), + getIndexSettings() != null && getIndexSettings().getMode().isSyntheticSourceEnabled() + ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } else { throw new QueryShardException(this, "No field mapping can be found for the field with name [{}]", name); diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java index 150948b4e5822..a483dd5cd48e9 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java @@ -20,14 +20,9 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParser.Token; import java.io.IOException; import java.util.ArrayList; @@ -42,9 +37,7 @@ import static java.lang.Math.min; import static java.util.Collections.emptyList; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.core.TimeValue.timeValueNanos; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; /** * Task storing information about a currently running BulkByScroll request. @@ -380,37 +373,6 @@ public static class Status implements Task.Status, SuccessfullyProcessed { FIELDS_SET.add(SLICES_FIELD); } - static final ConstructingObjectParser, Void> RETRIES_PARSER = new ConstructingObjectParser<>( - "bulk_by_scroll_task_status_retries", - true, - a -> new Tuple<>(((Long) a[0]), (Long) a[1]) - ); - static { - RETRIES_PARSER.declareLong(constructorArg(), new ParseField(RETRIES_BULK_FIELD)); - RETRIES_PARSER.declareLong(constructorArg(), new ParseField(RETRIES_SEARCH_FIELD)); - } - - public static void declareFields(ObjectParser parser) { - parser.declareInt(StatusBuilder::setSliceId, new ParseField(SLICE_ID_FIELD)); - parser.declareLong(StatusBuilder::setTotal, new ParseField(TOTAL_FIELD)); - parser.declareLong(StatusBuilder::setUpdated, new ParseField(UPDATED_FIELD)); - parser.declareLong(StatusBuilder::setCreated, new ParseField(CREATED_FIELD)); - parser.declareLong(StatusBuilder::setDeleted, new ParseField(DELETED_FIELD)); - parser.declareInt(StatusBuilder::setBatches, new ParseField(BATCHES_FIELD)); - parser.declareLong(StatusBuilder::setVersionConflicts, new ParseField(VERSION_CONFLICTS_FIELD)); - parser.declareLong(StatusBuilder::setNoops, new ParseField(NOOPS_FIELD)); - parser.declareObject(StatusBuilder::setRetries, RETRIES_PARSER, new ParseField(RETRIES_FIELD)); - parser.declareLong(StatusBuilder::setThrottled, new ParseField(THROTTLED_RAW_FIELD)); - parser.declareFloat(StatusBuilder::setRequestsPerSecond, new ParseField(REQUESTS_PER_SEC_FIELD)); - parser.declareString(StatusBuilder::setReasonCancelled, new ParseField(CANCELED_FIELD)); - parser.declareLong(StatusBuilder::setThrottledUntil, new ParseField(THROTTLED_UNTIL_RAW_FIELD)); - parser.declareObjectArray( - StatusBuilder::setSliceStatuses, - (p, c) -> StatusOrException.fromXContent(p), - new ParseField(SLICES_FIELD) - ); - } - private final Integer sliceId; private final long total; private final long updated; @@ -571,11 +533,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder.endObject(); } - /** - * We need to write a manual parser for this because of {@link StatusOrException}. Since - * {@link StatusOrException#fromXContent(XContentParser)} tries to peek at a field first before deciding - * what needs to be it cannot use an {@link ObjectParser}. - */ public XContentBuilder innerXContent(XContentBuilder builder, Params params) throws IOException { if (sliceId != null) { builder.field(SLICE_ID_FIELD, sliceId); @@ -617,61 +574,6 @@ public XContentBuilder innerXContent(XContentBuilder builder, Params params) thr return builder; } - public static Status fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token; - if (parser.currentToken() == Token.START_OBJECT) { - token = parser.nextToken(); - } else { - token = parser.nextToken(); - } - ensureExpectedToken(Token.START_OBJECT, token, parser); - token = parser.nextToken(); - ensureExpectedToken(Token.FIELD_NAME, token, parser); - return innerFromXContent(parser); - } - - public static Status innerFromXContent(XContentParser parser) throws IOException { - Token token = parser.currentToken(); - String fieldName = parser.currentName(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - StatusBuilder builder = new StatusBuilder(); - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token == Token.START_OBJECT) { - if (fieldName.equals(Status.RETRIES_FIELD)) { - builder.setRetries(Status.RETRIES_PARSER.parse(parser, null)); - } else { - parser.skipChildren(); - } - } else if (token == Token.START_ARRAY) { - if (fieldName.equals(Status.SLICES_FIELD)) { - while ((token = parser.nextToken()) != Token.END_ARRAY) { - builder.addToSliceStatuses(StatusOrException.fromXContent(parser)); - } - } else { - parser.skipChildren(); - } - } else { // else if it is a value - switch (fieldName) { - case Status.SLICE_ID_FIELD -> builder.setSliceId(parser.intValue()); - case Status.TOTAL_FIELD -> builder.setTotal(parser.longValue()); - case Status.UPDATED_FIELD -> builder.setUpdated(parser.longValue()); - case Status.CREATED_FIELD -> builder.setCreated(parser.longValue()); - case Status.DELETED_FIELD -> builder.setDeleted(parser.longValue()); - case Status.BATCHES_FIELD -> builder.setBatches(parser.intValue()); - case Status.VERSION_CONFLICTS_FIELD -> builder.setVersionConflicts(parser.longValue()); - case Status.NOOPS_FIELD -> builder.setNoops(parser.longValue()); - case Status.THROTTLED_RAW_FIELD -> builder.setThrottled(parser.longValue()); - case Status.REQUESTS_PER_SEC_FIELD -> builder.setRequestsPerSecond(parser.floatValue()); - case Status.CANCELED_FIELD -> builder.setReasonCancelled(parser.text()); - case Status.THROTTLED_UNTIL_RAW_FIELD -> builder.setThrottledUntil(parser.longValue()); - } - } - } - return builder.buildStatus(); - } - @Override public String toString() { StringBuilder builder = new StringBuilder(); @@ -937,46 +839,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * Since {@link StatusOrException} can contain either an {@link Exception} or a {@link Status} we need to peek - * at a field first before deciding what needs to be parsed since the same object could contains either. - * The {@link #EXPECTED_EXCEPTION_FIELDS} contains the fields that are expected when the serialised object - * was an instance of exception and the {@link Status#FIELDS_SET} is the set of fields expected when the - * serialized object was an instance of Status. - */ - public static StatusOrException fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - if (token == null) { - token = parser.nextToken(); - } - if (token == Token.VALUE_NULL) { - return null; - } else { - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - token = parser.nextToken(); - // This loop is present only to ignore unknown tokens. It breaks as soon as we find a field - // that is allowed. - while (token != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, token, parser); - String fieldName = parser.currentName(); - // weird way to ignore unknown tokens - if (Status.FIELDS_SET.contains(fieldName)) { - return new StatusOrException(Status.innerFromXContent(parser)); - } else if (EXPECTED_EXCEPTION_FIELDS.contains(fieldName)) { - return new StatusOrException(ElasticsearchException.innerFromXContent(parser, false)); - } else { - // Ignore unknown tokens - token = parser.nextToken(); - if (token == Token.START_OBJECT || token == Token.START_ARRAY) { - parser.skipChildren(); - } - token = parser.nextToken(); - } - } - throw new XContentParseException("Unable to parse StatusFromException. Expected fields not found."); - } - } - @Override public String toString() { if (exception != null) { diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java index fc4bec969a9be..c21a2b440ba8c 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java @@ -62,6 +62,7 @@ public Translog.Operation next() throws IOException { } skippedOperations++; } + reuse = null; // release buffer, it may be large and is no longer needed return null; } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java index 048d9adb8e7e3..b17777fc5a91e 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -72,6 +72,7 @@ import org.elasticsearch.index.shard.PrimaryReplicaSyncer; import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.store.IndicesStore; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -83,7 +84,6 @@ import java.util.Map; import java.util.Set; import java.util.function.Function; -import java.util.function.Predicate; /** * Configures classes and services that are shared by indices on each node. @@ -307,18 +307,15 @@ public static Set getBuiltInMetadataFields() { return builtInMetadataFields; } - private static Function> getFieldFilter(List mapperPlugins) { - Function> fieldFilter = MapperPlugin.NOOP_FIELD_FILTER; + private static Function getFieldFilter(List mapperPlugins) { + Function fieldFilter = MapperPlugin.NOOP_FIELD_FILTER; for (MapperPlugin mapperPlugin : mapperPlugins) { fieldFilter = and(fieldFilter, mapperPlugin.getFieldFilter()); } return fieldFilter; } - private static Function> and( - Function> first, - Function> second - ) { + private static Function and(Function first, Function second) { // the purpose of this method is to not chain no-op field predicates, so that we can easily find out when no plugins plug in // a field filter, hence skip the mappings filtering part as a whole, as it requires parsing mappings into a map. if (first == MapperPlugin.NOOP_FIELD_FILTER) { @@ -328,15 +325,15 @@ private static Function> and( return first; } return index -> { - Predicate firstPredicate = first.apply(index); - Predicate secondPredicate = second.apply(index); - if (firstPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { + FieldPredicate firstPredicate = first.apply(index); + FieldPredicate secondPredicate = second.apply(index); + if (firstPredicate == FieldPredicate.ACCEPT_ALL) { return secondPredicate; } - if (secondPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { + if (secondPredicate == FieldPredicate.ACCEPT_ALL) { return firstPredicate; } - return firstPredicate.and(secondPredicate); + return new FieldPredicate.And(firstPredicate, secondPredicate); }; } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 3319b29df6dfa..026a20415aa91 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -128,6 +128,7 @@ import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.indices.store.CompositeIndexFoldersDeletionListener; import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.RepositoriesService; @@ -168,7 +169,6 @@ import java.util.function.Consumer; import java.util.function.Function; import java.util.function.LongSupplier; -import java.util.function.Predicate; import java.util.stream.Collectors; import static java.util.Collections.emptyList; @@ -1756,7 +1756,7 @@ public void clearIndexShardCache(ShardId shardId, boolean queryCache, boolean fi * {@link org.elasticsearch.plugins.MapperPlugin#getFieldFilter()}, only fields that match all the registered filters will be * returned by get mappings, get index, get field mappings and field capabilities API. */ - public Function> getFieldFilter() { + public Function getFieldFilter() { return mapperRegistry.getFieldFilter(); } diff --git a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java index 2e745635d0fd9..6c1a01acb0dab 100644 --- a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java @@ -9,6 +9,7 @@ package org.elasticsearch.inference; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.xcontent.ToXContentObject; public interface ServiceSettings extends ToXContentObject, VersionedNamedWriteable { @@ -36,4 +37,13 @@ default Integer dimensions() { return null; } + /** + * The data type for the embeddings this service works with. Defaults to null, + * Text Embedding models should return a non-null value + * + * @return the element type + */ + default DenseVectorFieldMapper.ElementType elementType() { + return null; + } } diff --git a/server/src/main/java/org/elasticsearch/inference/SimilarityMeasure.java b/server/src/main/java/org/elasticsearch/inference/SimilarityMeasure.java index cd81cc461bd1d..ff9fedee02fac 100644 --- a/server/src/main/java/org/elasticsearch/inference/SimilarityMeasure.java +++ b/server/src/main/java/org/elasticsearch/inference/SimilarityMeasure.java @@ -8,11 +8,18 @@ package org.elasticsearch.inference; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; + +import java.util.EnumSet; import java.util.Locale; public enum SimilarityMeasure { COSINE, - DOT_PRODUCT; + DOT_PRODUCT, + L2_NORM; + + private static final EnumSet BEFORE_L2_NORM_ENUMS = EnumSet.range(COSINE, DOT_PRODUCT); @Override public String toString() { @@ -22,4 +29,21 @@ public String toString() { public static SimilarityMeasure fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); } + + /** + * Returns a similarity measure that is known based on the transport version provided. If the similarity enum was not yet + * introduced it will be defaulted to null. + * + * @param similarityMeasure the value to translate if necessary + * @param version the version that dictates the translation + * @return the similarity that is known to the version passed in + */ + public static SimilarityMeasure translateSimilarity(SimilarityMeasure similarityMeasure, TransportVersion version) { + if (version.before(TransportVersions.ML_INFERENCE_L2_NORM_SIMILARITY_ADDED) + && BEFORE_L2_NORM_ENUMS.contains(similarityMeasure) == false) { + return null; + } + + return similarityMeasure; + } } diff --git a/server/src/main/java/org/elasticsearch/inference/TaskType.java b/server/src/main/java/org/elasticsearch/inference/TaskType.java index 5afedee873145..1e301ad796e90 100644 --- a/server/src/main/java/org/elasticsearch/inference/TaskType.java +++ b/server/src/main/java/org/elasticsearch/inference/TaskType.java @@ -21,6 +21,7 @@ public enum TaskType implements Writeable { TEXT_EMBEDDING, SPARSE_EMBEDDING, + COMPLETION, ANY { @Override public boolean isAnyOrSame(TaskType other) { diff --git a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatter.java b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatter.java index 6ae2f53a94ad8..41a68494e7cbb 100644 --- a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatter.java +++ b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatter.java @@ -23,11 +23,13 @@ public class CustomPassageFormatter extends PassageFormatter { private final String preTag; private final String postTag; private final Encoder encoder; + private final int numberOfFragments; - public CustomPassageFormatter(String preTag, String postTag, Encoder encoder) { + public CustomPassageFormatter(String preTag, String postTag, Encoder encoder, int numberOfFragments) { this.preTag = preTag; this.postTag = postTag; this.encoder = encoder; + this.numberOfFragments = numberOfFragments; } @Override @@ -66,8 +68,12 @@ public Snippet[] format(Passage[] passages, String content) { } else if (sb.charAt(sb.length() - 1) == HighlightUtils.NULL_SEPARATOR) { sb.deleteCharAt(sb.length() - 1); } - // and we trim the snippets too - snippets[j] = new Snippet(sb.toString().trim(), passage.getScore(), passage.getNumMatches() > 0); + // and we trim the snippets too, if the number of fragments > 0 + if (numberOfFragments == 0) { + snippets[j] = new Snippet(sb.toString(), passage.getScore(), passage.getNumMatches() > 0); + } else { + snippets[j] = new Snippet(sb.toString().trim(), passage.getScore(), passage.getNumMatches() > 0); + } } return snippets; } diff --git a/server/src/main/java/org/elasticsearch/plugins/FieldPredicate.java b/server/src/main/java/org/elasticsearch/plugins/FieldPredicate.java new file mode 100644 index 0000000000000..32692b9740f91 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/FieldPredicate.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.plugins; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.cluster.metadata.MappingMetadata; + +import java.util.function.Predicate; + +/** + * Filter for visible fields. + */ +public interface FieldPredicate extends Accountable, Predicate { + /** + * The default field predicate applied, which doesn't filter anything. That means that by default get mappings, get index + * get field mappings and field capabilities API will return every field that's present in the mappings. + */ + FieldPredicate ACCEPT_ALL = new FieldPredicate() { + @Override + public boolean test(String field) { + return true; + } + + @Override + public String modifyHash(String hash) { + return hash; + } + + @Override + public long ramBytesUsed() { + return 0; // Shared + } + + @Override + public String toString() { + return "accept all"; + } + }; + + /** + * Should this field be included? + */ + @Override + boolean test(String field); + + /** + * Modify the {@link MappingMetadata#getSha256} to track any filtering this predicate + * has performed on the list of fields. + */ + String modifyHash(String hash); + + class And implements FieldPredicate { + private static final long SHALLOW_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(And.class); + + private final FieldPredicate first; + private final FieldPredicate second; + + public And(FieldPredicate first, FieldPredicate second) { + this.first = first; + this.second = second; + } + + @Override + public boolean test(String field) { + return first.test(field) && second.test(field); + } + + @Override + public String modifyHash(String hash) { + return second.modifyHash(first.modifyHash(hash)); + } + + @Override + public long ramBytesUsed() { + return SHALLOW_RAM_BYTES_USED + first.ramBytesUsed() + second.ramBytesUsed(); + } + + @Override + public String toString() { + return first + " then " + second; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java b/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java index 401c014488f88..45f04487886d3 100644 --- a/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java @@ -8,7 +8,6 @@ package org.elasticsearch.plugins; -import org.elasticsearch.core.Predicates; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.RuntimeField; @@ -16,7 +15,6 @@ import java.util.Collections; import java.util.Map; import java.util.function.Function; -import java.util.function.Predicate; /** * An extension point for {@link Plugin} implementations to add custom mappers @@ -62,19 +60,23 @@ default Map getMetadataMappers() { * get index, get field mappings and field capabilities API. Useful to filter the fields that such API return. The predicate receives * the field name as input argument and should return true to show the field and false to hide it. */ - default Function> getFieldFilter() { + default Function getFieldFilter() { return NOOP_FIELD_FILTER; } - /** - * The default field predicate applied, which doesn't filter anything. That means that by default get mappings, get index - * get field mappings and field capabilities API will return every field that's present in the mappings. - */ - Predicate NOOP_FIELD_PREDICATE = Predicates.always(); - /** * The default field filter applied, which doesn't filter anything. That means that by default get mappings, get index * get field mappings and field capabilities API will return every field that's present in the mappings. */ - Function> NOOP_FIELD_FILTER = index -> NOOP_FIELD_PREDICATE; + Function NOOP_FIELD_FILTER = new Function<>() { + @Override + public FieldPredicate apply(String index) { + return FieldPredicate.ACCEPT_ALL; + } + + @Override + public String toString() { + return "accept all"; + } + }; } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptLanguagesInfo.java b/server/src/main/java/org/elasticsearch/script/ScriptLanguagesInfo.java index 7b3ea4fbe4581..b64383c562c50 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptLanguagesInfo.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptLanguagesInfo.java @@ -11,23 +11,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; /** * The allowable types, languages and their corresponding contexts. When serialized there is a top level types_allowed list, @@ -68,10 +61,10 @@ * */ public class ScriptLanguagesInfo implements ToXContentObject, Writeable { - private static final ParseField TYPES_ALLOWED = new ParseField("types_allowed"); - private static final ParseField LANGUAGE_CONTEXTS = new ParseField("language_contexts"); - private static final ParseField LANGUAGE = new ParseField("language"); - private static final ParseField CONTEXTS = new ParseField("contexts"); + public static final ParseField TYPES_ALLOWED = new ParseField("types_allowed"); + public static final ParseField LANGUAGE_CONTEXTS = new ParseField("language_contexts"); + public static final ParseField LANGUAGE = new ParseField("language"); + public static final ParseField CONTEXTS = new ParseField("contexts"); public final Set typesAllowed; public final Map> languageContexts; @@ -86,31 +79,6 @@ public ScriptLanguagesInfo(StreamInput in) throws IOException { languageContexts = in.readImmutableMap(sin -> sin.readCollectionAsImmutableSet(StreamInput::readString)); } - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "script_languages_info", - true, - (a) -> new ScriptLanguagesInfo( - new HashSet<>((List) a[0]), - ((List>>) a[1]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2)) - ) - ); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser>, Void> LANGUAGE_CONTEXT_PARSER = - new ConstructingObjectParser<>("language_contexts", true, (m, name) -> new Tuple<>((String) m[0], Set.copyOf((List) m[1]))); - - static { - PARSER.declareStringArray(constructorArg(), TYPES_ALLOWED); - PARSER.declareObjectArray(constructorArg(), LANGUAGE_CONTEXT_PARSER, LANGUAGE_CONTEXTS); - LANGUAGE_CONTEXT_PARSER.declareString(constructorArg(), LANGUAGE); - LANGUAGE_CONTEXT_PARSER.declareStringArray(constructorArg(), CONTEXTS); - } - - public static ScriptLanguagesInfo fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(typesAllowed); diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 60ced289929a0..62a62fb93a4a0 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -33,22 +32,15 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbers; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.fetch.subphase.LookupField; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.transport.RemoteClusterAware; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParser.Token; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; @@ -56,7 +48,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Iterator; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -66,10 +57,6 @@ import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.lucene.Lucene.readExplanation; import static org.elasticsearch.common.lucene.Lucene.writeExplanation; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * A single search hit. @@ -80,10 +67,10 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted private final transient int docId; - private static final float DEFAULT_SCORE = Float.NaN; + static final float DEFAULT_SCORE = Float.NaN; private float score; - private static final int NO_RANK = -1; + static final int NO_RANK = -1; private int rank; private final Text id; @@ -935,258 +922,6 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t return builder; } - // All fields on the root level of the parsed SearhHit are interpreted as metadata fields - // public because we use it in a completion suggestion option - @SuppressWarnings("unchecked") - public static final ObjectParser.UnknownFieldConsumer> unknownMetaFieldConsumer = (map, fieldName, fieldValue) -> { - Map fieldMap = (Map) map.computeIfAbsent( - METADATA_FIELDS, - v -> new HashMap() - ); - if (fieldName.equals(IgnoredFieldMapper.NAME)) { - fieldMap.put(fieldName, new DocumentField(fieldName, (List) fieldValue)); - } else { - fieldMap.put(fieldName, new DocumentField(fieldName, Collections.singletonList(fieldValue))); - } - }; - - /** - * This parser outputs a temporary map of the objects needed to create the - * SearchHit instead of directly creating the SearchHit. The reason for this - * is that this way we can reuse the parser when parsing xContent from - * {@link org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option} which unfortunately inlines - * the output of - * {@link #toInnerXContent(XContentBuilder, org.elasticsearch.xcontent.ToXContent.Params)} - * of the included search hit. The output of the map is used to create the - * actual SearchHit instance via {@link #createFromMap(Map)} - */ - private static final ObjectParser, Void> MAP_PARSER = new ObjectParser<>( - "innerHitParser", - unknownMetaFieldConsumer, - HashMap::new - ); - - static { - declareInnerHitsParseFields(MAP_PARSER); - } - - public static SearchHit fromXContent(XContentParser parser) { - return createFromMap(MAP_PARSER.apply(parser, null)); - } - - public static void declareInnerHitsParseFields(ObjectParser, Void> parser) { - parser.declareString((map, value) -> map.put(Fields._INDEX, value), new ParseField(Fields._INDEX)); - parser.declareString((map, value) -> map.put(Fields._ID, value), new ParseField(Fields._ID)); - parser.declareString((map, value) -> map.put(Fields._NODE, value), new ParseField(Fields._NODE)); - parser.declareField( - (map, value) -> map.put(Fields._SCORE, value), - SearchHit::parseScore, - new ParseField(Fields._SCORE), - ValueType.FLOAT_OR_NULL - ); - parser.declareInt((map, value) -> map.put(Fields._RANK, value), new ParseField(Fields._RANK)); - - parser.declareLong((map, value) -> map.put(Fields._VERSION, value), new ParseField(Fields._VERSION)); - parser.declareLong((map, value) -> map.put(Fields._SEQ_NO, value), new ParseField(Fields._SEQ_NO)); - parser.declareLong((map, value) -> map.put(Fields._PRIMARY_TERM, value), new ParseField(Fields._PRIMARY_TERM)); - parser.declareField( - (map, value) -> map.put(Fields._SHARD, value), - (p, c) -> ShardId.fromString(p.text()), - new ParseField(Fields._SHARD), - ValueType.STRING - ); - parser.declareObject( - (map, value) -> map.put(SourceFieldMapper.NAME, value), - (p, c) -> parseSourceBytes(p), - new ParseField(SourceFieldMapper.NAME) - ); - parser.declareObject( - (map, value) -> map.put(Fields.HIGHLIGHT, value), - (p, c) -> parseHighlightFields(p), - new ParseField(Fields.HIGHLIGHT) - ); - parser.declareObject((map, value) -> { - Map fieldMap = get(Fields.FIELDS, map, new HashMap()); - fieldMap.putAll(value); - map.put(DOCUMENT_FIELDS, fieldMap); - }, (p, c) -> parseFields(p), new ParseField(Fields.FIELDS)); - parser.declareObject( - (map, value) -> map.put(Fields._EXPLANATION, value), - (p, c) -> parseExplanation(p), - new ParseField(Fields._EXPLANATION) - ); - parser.declareObject( - (map, value) -> map.put(NestedIdentity._NESTED, value), - NestedIdentity::fromXContent, - new ParseField(NestedIdentity._NESTED) - ); - parser.declareObject( - (map, value) -> map.put(Fields.INNER_HITS, value), - (p, c) -> parseInnerHits(p), - new ParseField(Fields.INNER_HITS) - ); - - parser.declareField((p, map, context) -> { - XContentParser.Token token = p.currentToken(); - Map matchedQueries = new LinkedHashMap<>(); - if (token == XContentParser.Token.START_OBJECT) { - String fieldName = null; - while ((token = p.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = p.currentName(); - } else if (token.isValue()) { - matchedQueries.put(fieldName, p.floatValue()); - } - } - } else if (token == XContentParser.Token.START_ARRAY) { - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - matchedQueries.put(p.text(), Float.NaN); - } - } - map.put(Fields.MATCHED_QUERIES, matchedQueries); - }, new ParseField(Fields.MATCHED_QUERIES), ObjectParser.ValueType.OBJECT_ARRAY); - - parser.declareField( - (map, list) -> map.put(Fields.SORT, list), - SearchSortValues::fromXContent, - new ParseField(Fields.SORT), - ValueType.OBJECT_ARRAY - ); - } - - public static SearchHit createFromMap(Map values) { - String id = get(Fields._ID, values, null); - String index = get(Fields._INDEX, values, null); - String clusterAlias = null; - if (index != null) { - int indexOf = index.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); - if (indexOf > 0) { - clusterAlias = index.substring(0, indexOf); - index = index.substring(indexOf + 1); - } - } - ShardId shardId = get(Fields._SHARD, values, null); - String nodeId = get(Fields._NODE, values, null); - final SearchShardTarget shardTarget; - if (shardId != null && nodeId != null) { - assert shardId.getIndexName().equals(index); - shardTarget = new SearchShardTarget(nodeId, shardId, clusterAlias); - index = shardTarget.getIndex(); - clusterAlias = shardTarget.getClusterAlias(); - } else { - shardTarget = null; - } - return new SearchHit( - -1, - get(Fields._SCORE, values, DEFAULT_SCORE), - get(Fields._RANK, values, NO_RANK), - id == null ? null : new Text(id), - get(NestedIdentity._NESTED, values, null), - get(Fields._VERSION, values, -1L), - get(Fields._SEQ_NO, values, SequenceNumbers.UNASSIGNED_SEQ_NO), - get(Fields._PRIMARY_TERM, values, SequenceNumbers.UNASSIGNED_PRIMARY_TERM), - get(SourceFieldMapper.NAME, values, null), - get(Fields.HIGHLIGHT, values, null), - get(Fields.SORT, values, SearchSortValues.EMPTY), - get(Fields.MATCHED_QUERIES, values, null), - get(Fields._EXPLANATION, values, null), - shardTarget, - index, - clusterAlias, - null, - get(Fields.INNER_HITS, values, null), - get(DOCUMENT_FIELDS, values, Collections.emptyMap()), - get(METADATA_FIELDS, values, Collections.emptyMap()), - ALWAYS_REFERENCED // TODO: do we ever want pooling here? - ); - } - - @SuppressWarnings("unchecked") - private static T get(String key, Map map, T defaultValue) { - return (T) map.getOrDefault(key, defaultValue); - } - - private static float parseScore(XContentParser parser) throws IOException { - if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER || parser.currentToken() == XContentParser.Token.VALUE_STRING) { - return parser.floatValue(); - } else { - return Float.NaN; - } - } - - private static BytesReference parseSourceBytes(XContentParser parser) throws IOException { - try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) { - // the original document gets slightly modified: whitespaces or - // pretty printing are not preserved, - // it all depends on the current builder settings - builder.copyCurrentStructure(parser); - return BytesReference.bytes(builder); - } - } - - private static Map parseFields(XContentParser parser) throws IOException { - Map fields = new HashMap<>(); - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - DocumentField field = DocumentField.fromXContent(parser); - fields.put(field.getName(), field); - } - return fields; - } - - private static Map parseInnerHits(XContentParser parser) throws IOException { - Map innerHits = new HashMap<>(); - while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); - String name = parser.currentName(); - ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser); - ensureFieldName(parser, parser.nextToken(), SearchHits.Fields.HITS); - innerHits.put(name, SearchHits.fromXContent(parser)); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - } - return innerHits; - } - - private static Map parseHighlightFields(XContentParser parser) throws IOException { - Map highlightFields = new HashMap<>(); - while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { - HighlightField highlightField = HighlightField.fromXContent(parser); - highlightFields.put(highlightField.name(), highlightField); - } - return highlightFields; - } - - private static Explanation parseExplanation(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - XContentParser.Token token; - Float value = null; - String description = null; - List details = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - if (Fields.VALUE.equals(currentFieldName)) { - value = parser.floatValue(); - } else if (Fields.DESCRIPTION.equals(currentFieldName)) { - description = parser.textOrNull(); - } else if (Fields.DETAILS.equals(currentFieldName)) { - ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - details.add(parseExplanation(parser)); - } - } else { - parser.skipChildren(); - } - } - if (value == null) { - throw new ParsingException(parser.getTokenLocation(), "missing explanation value"); - } - if (description == null) { - throw new ParsingException(parser.getTokenLocation(), "missing explanation description"); - } - return Explanation.match(value, description, details); - } - private static void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { builder.startObject(); builder.field(Fields.VALUE, explanation.getValue()); @@ -1251,9 +986,9 @@ public int hashCode() { */ public static final class NestedIdentity implements Writeable, ToXContentFragment { - private static final String _NESTED = "_nested"; - private static final String FIELD = "field"; - private static final String OFFSET = "offset"; + static final String _NESTED = "_nested"; + static final String FIELD = "field"; + static final String OFFSET = "offset"; private final Text field; private final int offset; @@ -1379,25 +1114,6 @@ XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws I return builder; } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "nested_identity", - true, - ctorArgs -> new NestedIdentity((String) ctorArgs[0], (int) ctorArgs[1], (NestedIdentity) ctorArgs[2]) - ); - static { - PARSER.declareString(constructorArg(), new ParseField(FIELD)); - PARSER.declareInt(constructorArg(), new ParseField(OFFSET)); - PARSER.declareObject(optionalConstructorArg(), PARSER, new ParseField(_NESTED)); - } - - static NestedIdentity fromXContent(XContentParser parser, Void context) { - return fromXContent(parser); - } - - public static NestedIdentity fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object obj) { if (this == obj) { diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index d559fc60fa72d..15b83b202fd98 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -27,14 +27,10 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; -import java.util.List; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - public final class SearchHits implements Writeable, ChunkedToXContent, RefCounted, Iterable { public static final SearchHit[] EMPTY = new SearchHit[0]; @@ -310,50 +306,6 @@ public Iterator toXContentChunked(ToXContent.Params params }), ChunkedToXContentHelper.array(Fields.HITS, Iterators.forArray(hits)), ChunkedToXContentHelper.endObject()); } - public static SearchHits fromXContent(XContentParser parser) throws IOException { - if (parser.currentToken() != XContentParser.Token.START_OBJECT) { - parser.nextToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - } - XContentParser.Token token = parser.currentToken(); - String currentFieldName = null; - List hits = new ArrayList<>(); - TotalHits totalHits = null; - float maxScore = 0f; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (Fields.TOTAL.equals(currentFieldName)) { - // For BWC with nodes pre 7.0 - long value = parser.longValue(); - totalHits = value == -1 ? null : new TotalHits(value, Relation.EQUAL_TO); - } else if (Fields.MAX_SCORE.equals(currentFieldName)) { - maxScore = parser.floatValue(); - } - } else if (token == XContentParser.Token.VALUE_NULL) { - if (Fields.MAX_SCORE.equals(currentFieldName)) { - maxScore = Float.NaN; // NaN gets rendered as null-field - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (Fields.HITS.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - hits.add(SearchHit.fromXContent(parser)); - } - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (Fields.TOTAL.equals(currentFieldName)) { - totalHits = parseTotalHitsFragment(parser); - } else { - parser.skipChildren(); - } - } - } - return SearchHits.unpooled(hits.toArray(SearchHits.EMPTY), totalHits, maxScore); - } - @Override public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) { diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 70a002d676235..d11a5b9908514 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -40,8 +40,10 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; @@ -1578,9 +1580,9 @@ void addResultsObject(SearchContext context) { abstract void addResultsObject(SearchContext context); } - class Reaper implements Runnable { + class Reaper extends AbstractRunnable { @Override - public void run() { + protected void doRun() { assert Transports.assertNotTransportThread("closing contexts may do IO, e.g. deleting dangling files") && ThreadPool.assertNotScheduleThread("closing contexts may do IO, e.g. deleting dangling files"); for (ReaderContext context : activeReaders.values()) { @@ -1590,6 +1592,27 @@ public void run() { } } } + + @Override + public void onFailure(Exception e) { + logger.error("unexpected error when freeing search contexts", e); + assert false : e; + } + + @Override + public void onRejection(Exception e) { + if (e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown()) { + logger.debug("rejected execution when freeing search contexts"); + } else { + onFailure(e); + } + } + + @Override + public boolean isForceExecution() { + // mustn't reject this task even if the queue is full + return true; + } } public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java index 42eb80cfb1316..ed1c442e1dc28 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -97,6 +98,11 @@ public Collection getSubAggregations() { return factoriesBuilder.getAggregatorFactories(); } + /** Return the aggregation's query if it's different from the search query, or null otherwise. */ + public QueryBuilder getQuery() { + return null; + } + /** Return the configured set of pipeline aggregations **/ public Collection getPipelineAggregations() { return factoriesBuilder.getPipelineAggregatorFactories(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketAggregatorsReducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketReducer.java similarity index 67% rename from server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketAggregatorsReducer.java rename to server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketReducer.java index 176ca2f918fff..a9aa3efd536d4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketAggregatorsReducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketReducer.java @@ -15,32 +15,40 @@ import org.elasticsearch.search.aggregations.InternalAggregations; /** - * Class for reducing a list of {@link MultiBucketsAggregation.Bucket} to a single - * {@link InternalAggregations} and the number of documents. + * Class for reducing a list of {@link B} to a single {@link InternalAggregations} + * and the number of documents. */ -public final class MultiBucketAggregatorsReducer implements Releasable { +public final class BucketReducer implements Releasable { private final AggregatorsReducer aggregatorsReducer; - long count = 0; + private final B proto; + private long count = 0; - public MultiBucketAggregatorsReducer(AggregationReduceContext context, int size) { + public BucketReducer(B proto, AggregationReduceContext context, int size) { this.aggregatorsReducer = new AggregatorsReducer(context, size); + this.proto = proto; } /** - * Adds a {@link MultiBucketsAggregation.Bucket} for reduction. + * Adds a {@link B} for reduction. */ - public void accept(MultiBucketsAggregation.Bucket bucket) { + public void accept(B bucket) { count += bucket.getDocCount(); aggregatorsReducer.accept(bucket.getAggregations()); } + /** + * returns the bucket prototype. + */ + public B getProto() { + return proto; + } + /** * returns the reduced {@link InternalAggregations}. */ - public InternalAggregations get() { + public InternalAggregations getAggregations() { return aggregatorsReducer.get(); - } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedBucketReducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedBucketReducer.java new file mode 100644 index 0000000000000..b29159c66ac40 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedBucketReducer.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.aggregations.bucket; + +import org.elasticsearch.search.aggregations.AggregationReduceContext; +import org.elasticsearch.search.aggregations.AggregatorsReducer; +import org.elasticsearch.search.aggregations.InternalAggregations; + +import java.util.ArrayList; +import java.util.List; + +/** + * Class for reducing a list of {@link B} to a single {@link InternalAggregations} + * and the number of documents in a delayable fashion. + * + * This class can be reused by calling {@link #reset(B)}. + * + * @see BucketReducer + */ +public final class DelayedBucketReducer { + + private final AggregationReduceContext context; + // changes at reset time + private B proto; + // the maximum size of this array is the number of shards to be reduced. We currently do it in a batches of 256 + // by default. if we expect bigger batches, we might consider to use ObjectArray. + private final List internalAggregations; + private long count = 0; + + public DelayedBucketReducer(B proto, AggregationReduceContext context) { + this.proto = proto; + this.context = context; + this.internalAggregations = new ArrayList<>(); + } + + /** + * Adds a {@link B} for reduction. + */ + public void accept(B bucket) { + count += bucket.getDocCount(); + internalAggregations.add(bucket.getAggregations()); + } + + /** + * returns the bucket prototype. + */ + public B getProto() { + return proto; + } + + /** + * Reset the content of this reducer. + */ + public void reset(B proto) { + this.proto = proto; + count = 0L; + internalAggregations.clear(); + } + + /** + * returns the reduced {@link InternalAggregations}. + */ + public InternalAggregations getAggregations() { + try (AggregatorsReducer aggregatorsReducer = new AggregatorsReducer(context, internalAggregations.size())) { + for (InternalAggregations agg : internalAggregations) { + aggregatorsReducer.accept(agg); + } + return aggregatorsReducer.get(); + } + } + + /** + * returns the number of docs + */ + public long getDocCount() { + return count; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/FixedMultiBucketAggregatorsReducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/FixedMultiBucketAggregatorsReducer.java index 899d9dad7229c..a7261c9fd73f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/FixedMultiBucketAggregatorsReducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/FixedMultiBucketAggregatorsReducer.java @@ -23,15 +23,13 @@ public abstract class FixedMultiBucketAggregatorsReducer implements Releasable { // we could use an ObjectArray here but these arrays are in normally small, so it is not worthy - private final MultiBucketAggregatorsReducer[] bucketsReducer; - private final List protoList; + private final List> bucketReducer; public FixedMultiBucketAggregatorsReducer(AggregationReduceContext reduceContext, int size, List protoList) { reduceContext.consumeBucketsAndMaybeBreak(protoList.size()); - this.protoList = protoList; - this.bucketsReducer = new MultiBucketAggregatorsReducer[protoList.size()]; + this.bucketReducer = new ArrayList<>(protoList.size()); for (int i = 0; i < protoList.size(); ++i) { - bucketsReducer[i] = new MultiBucketAggregatorsReducer(reduceContext, size); + bucketReducer.add(new BucketReducer<>(protoList.get(i), reduceContext, size)); } } @@ -40,10 +38,9 @@ public FixedMultiBucketAggregatorsReducer(AggregationReduceContext reduceContext * of the list passed on the constructor */ public final void accept(List buckets) { - assert buckets.size() == protoList.size(); - int i = 0; - for (B bucket : buckets) { - bucketsReducer[i++].accept(bucket); + assert buckets.size() == bucketReducer.size(); + for (int i = 0; i < buckets.size(); i++) { + bucketReducer.get(i).accept(buckets.get(i)); } } @@ -51,19 +48,17 @@ public final void accept(List buckets) { * returns the reduced buckets. */ public final List get() { - final List reduceBuckets = new ArrayList<>(protoList.size()); - for (int i = 0; i < protoList.size(); i++) { - final B proto = protoList.get(i); - final MultiBucketAggregatorsReducer reducer = bucketsReducer[i]; - reduceBuckets.add(createBucket(proto, reducer.getDocCount(), reducer.get())); + final List reduceBuckets = new ArrayList<>(bucketReducer.size()); + for (final BucketReducer reducer : bucketReducer) { + reduceBuckets.add(createBucket(reducer.getProto(), reducer.getDocCount(), reducer.getAggregations())); } return reduceBuckets; } - protected abstract B createBucket(B proto, long focCount, InternalAggregations aggregations); + protected abstract B createBucket(B proto, long docCount, InternalAggregations aggregations); @Override public final void close() { - Releasables.close(bucketsReducer); + Releasables.close(bucketReducer); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 31cd5c9426755..22c967bb2ea14 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -12,6 +12,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; +import org.elasticsearch.common.util.ObjectObjectPagedHashMap; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; @@ -20,6 +22,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; +import org.elasticsearch.search.aggregations.bucket.DelayedBucketReducer; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -201,56 +204,29 @@ int[] getReverseMuls() { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final ObjectArrayPriorityQueue pq = new ObjectArrayPriorityQueue<>(size, reduceContext.bigArrays()) { - @Override - protected boolean lessThan(BucketIterator a, BucketIterator b) { - return a.compareTo(b) < 0; - } - }; + final BucketsQueue queue = new BucketsQueue(reduceContext); boolean earlyTerminated = false; @Override public void accept(InternalAggregation aggregation) { InternalComposite sortedAgg = (InternalComposite) aggregation; earlyTerminated |= sortedAgg.earlyTerminated; - BucketIterator it = new BucketIterator(sortedAgg.buckets); - if (it.next() != null) { - pq.add(it); + for (InternalBucket bucket : sortedAgg.getBuckets()) { + if (queue.add(bucket) == false) { + // if the bucket is not competitive, we can break + // because incoming buckets are sorted + break; + } } } @Override public InternalAggregation get() { - InternalBucket lastBucket = null; - List buckets = new ArrayList<>(); - List result = new ArrayList<>(); - while (pq.size() > 0) { - BucketIterator bucketIt = pq.top(); - if (lastBucket != null && bucketIt.current.compareKey(lastBucket) != 0) { - InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); - buckets.clear(); - result.add(reduceBucket); - if (result.size() >= getSize()) { - break; - } - } - lastBucket = bucketIt.current; - buckets.add(bucketIt.current); - if (bucketIt.next() != null) { - pq.updateTop(); - } else { - pq.pop(); - } - } - if (buckets.size() > 0) { - InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); - result.add(reduceBucket); - } - + final List result = queue.get(); List reducedFormats = formats; CompositeKey lastKey = null; - if (result.size() > 0) { - lastBucket = result.get(result.size() - 1); + if (result.isEmpty() == false) { + InternalBucket lastBucket = result.get(result.size() - 1); /* Attach the formats from the last bucket to the reduced composite * so that we can properly format the after key. */ reducedFormats = lastBucket.formats; @@ -275,11 +251,82 @@ public InternalAggregation get() { @Override public void close() { - Releasables.close(pq); + Releasables.close(queue); } }; } + private class BucketsQueue implements Releasable { + private final ObjectObjectPagedHashMap> bucketReducers; + private final ObjectArrayPriorityQueue queue; + private final AggregationReduceContext reduceContext; + + private BucketsQueue(AggregationReduceContext reduceContext) { + this.reduceContext = reduceContext; + bucketReducers = new ObjectObjectPagedHashMap<>(getSize(), reduceContext.bigArrays()); + queue = new ObjectArrayPriorityQueue<>(getSize(), reduceContext.bigArrays()) { + @Override + protected boolean lessThan(InternalBucket a, InternalBucket b) { + return b.compareKey(a) < 0; + } + }; + } + + /** adds a bucket to the queue. Return false if the bucket is not competitive, otherwise true.*/ + boolean add(InternalBucket bucket) { + DelayedBucketReducer delayed = bucketReducers.get(bucket.key); + if (delayed == null) { + final InternalBucket out = queue.insertWithOverflow(bucket); + if (out == null) { + // bucket is added + delayed = new DelayedBucketReducer<>(bucket, reduceContext); + } else if (out == bucket) { + // bucket is not competitive + return false; + } else { + // bucket replaces existing bucket + delayed = bucketReducers.remove(out.key); + assert delayed != null; + delayed.reset(bucket); + } + bucketReducers.put(bucket.key, delayed); + } + delayed.accept(bucket); + return true; + } + + /** Return the list of reduced buckets */ + List get() { + final int bucketsSize = (int) bucketReducers.size(); + final InternalBucket[] result = new InternalBucket[bucketsSize]; + for (int i = bucketsSize - 1; i >= 0; i--) { + final InternalBucket bucket = queue.pop(); + assert bucket != null; + /* Use the formats from the bucket because they'll be right to format + * the key. The formats on the InternalComposite doing the reducing are + * just whatever formats make sense for *its* index. This can be real + * trouble when the index doing the reducing is unmapped. */ + final var reducedFormats = bucket.formats; + final DelayedBucketReducer reducer = Objects.requireNonNull(bucketReducers.get(bucket.key)); + result[i] = new InternalBucket( + sourceNames, + reducedFormats, + bucket.key, + reverseMuls, + missingOrders, + reducer.getDocCount(), + reducer.getAggregations() + ); + } + return List.of(result); + } + + @Override + public void close() { + Releasables.close(bucketReducers, queue); + } + } + @Override public InternalAggregation finalizeSampling(SamplingContext samplingContext) { return new InternalComposite( @@ -296,22 +343,6 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { ); } - private InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.isEmpty() == false; - long docCount = 0; - for (InternalBucket bucket : buckets) { - docCount += bucket.docCount; - } - final List aggregations = new BucketAggregationList<>(buckets); - final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - /* Use the formats from the bucket because they'll be right to format - * the key. The formats on the InternalComposite doing the reducing are - * just whatever formats make sense for *its* index. This can be real - * trouble when the index doing the reducing is unmapped. */ - final var reducedFormats = buckets.get(0).formats; - return new InternalBucket(sourceNames, reducedFormats, buckets.get(0).key, reverseMuls, missingOrders, docCount, aggs); - } - @Override public boolean equals(Object obj) { if (this == obj) return true; @@ -331,24 +362,6 @@ public int hashCode() { return Objects.hash(super.hashCode(), size, buckets, afterKey, Arrays.hashCode(reverseMuls), Arrays.hashCode(missingOrders)); } - private static class BucketIterator implements Comparable { - final Iterator it; - InternalBucket current; - - private BucketIterator(List buckets) { - this.it = buckets.iterator(); - } - - @Override - public int compareTo(BucketIterator other) { - return current.compareKey(other.current); - } - - InternalBucket next() { - return current = it.hasNext() ? it.next() : null; - } - } - public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements CompositeAggregation.Bucket, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java index 4918a57b29ed1..027551288be5f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java @@ -17,7 +17,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -81,7 +81,7 @@ public List getBuckets() { protected AggregatorReducer getLeaderReducer(AggregationReduceContext context, int size) { return new AggregatorReducer() { - final LongObjectPagedHashMap bucketsReducer = new LongObjectPagedHashMap<>( + final LongObjectPagedHashMap> bucketsReducer = new LongObjectPagedHashMap<>( size, context.bigArrays() ); @@ -91,9 +91,9 @@ public void accept(InternalAggregation aggregation) { @SuppressWarnings("unchecked") final InternalGeoGrid grid = (InternalGeoGrid) aggregation; for (InternalGeoGridBucket bucket : grid.getBuckets()) { - MultiBucketAggregatorsReducer reducer = bucketsReducer.get(bucket.hashAsLong()); + BucketReducer reducer = bucketsReducer.get(bucket.hashAsLong()); if (reducer == null) { - reducer = new MultiBucketAggregatorsReducer(context, size); + reducer = new BucketReducer<>(bucket, context, size); bucketsReducer.put(bucket.hashAsLong(), reducer); } reducer.accept(bucket); @@ -106,8 +106,8 @@ public InternalAggregation get() { context.isFinalReduce() == false ? bucketsReducer.size() : Math.min(requiredSize, bucketsReducer.size()) ); try (BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, context.bigArrays())) { - bucketsReducer.iterator().forEachRemaining(entry -> { - InternalGeoGridBucket bucket = createBucket(entry.key, entry.value.getDocCount(), entry.value.get()); + bucketsReducer.forEach(entry -> { + InternalGeoGridBucket bucket = createBucket(entry.key, entry.value.getDocCount(), entry.value.getAggregations()); ordered.insertWithOverflow(bucket); }); final InternalGeoGridBucket[] list = new InternalGeoGridBucket[(int) ordered.size()]; @@ -121,7 +121,7 @@ public InternalAggregation get() { @Override public void close() { - bucketsReducer.iterator().forEachRemaining(r -> Releasables.close(r.value)); + bucketsReducer.forEach(r -> Releasables.close(r.value)); Releasables.close(bucketsReducer); } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java index 3478773464feb..27a79095eb49d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -492,7 +492,7 @@ public void accept(InternalAggregation aggregation) { long key = NumericUtils.doubleToSortableLong(bucket.centroid()); ReducerAndExtraInfo reducer = bucketsReducer.get(key); if (reducer == null) { - reducer = new ReducerAndExtraInfo(new MultiBucketAggregatorsReducer(reduceContext, size)); + reducer = new ReducerAndExtraInfo(new BucketReducer<>(bucket, reduceContext, size)); bucketsReducer.put(key, reducer); reduceContext.consumeBucketsAndMaybeBreak(1); } @@ -506,10 +506,12 @@ public void accept(InternalAggregation aggregation) { @Override public InternalAggregation get() { final List reducedBuckets = new ArrayList<>((int) bucketsReducer.size()); - bucketsReducer.iterator().forEachRemaining(entry -> { + bucketsReducer.forEach(entry -> { final double centroid = entry.value.sum[0] / entry.value.reducer.getDocCount(); final Bucket.BucketBounds bounds = new Bucket.BucketBounds(entry.value.min[0], entry.value.max[0]); - reducedBuckets.add(new Bucket(centroid, bounds, entry.value.reducer.getDocCount(), format, entry.value.reducer.get())); + reducedBuckets.add( + new Bucket(centroid, bounds, entry.value.reducer.getDocCount(), format, entry.value.reducer.getAggregations()) + ); }); reducedBuckets.sort(Comparator.comparing(Bucket::centroid)); mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext); @@ -523,14 +525,14 @@ public InternalAggregation get() { @Override public void close() { - bucketsReducer.iterator().forEachRemaining(entry -> Releasables.close(entry.value.reducer)); + bucketsReducer.forEach(entry -> Releasables.close(entry.value.reducer)); Releasables.close(bucketsReducer); } }; } - private record ReducerAndExtraInfo(MultiBucketAggregatorsReducer reducer, double[] min, double[] max, double[] sum) { - private ReducerAndExtraInfo(MultiBucketAggregatorsReducer reducer) { + private record ReducerAndExtraInfo(BucketReducer reducer, double[] min, double[] max, double[] sum) { + private ReducerAndExtraInfo(BucketReducer reducer) { this(reducer, new double[] { Double.POSITIVE_INFINITY }, new double[] { Double.NEGATIVE_INFINITY }, new double[] { 0 }); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/LongKeyedMultiBucketsAggregatorReducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/LongKeyedMultiBucketsAggregatorReducer.java index 71374421481eb..07208ab2096a0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/LongKeyedMultiBucketsAggregatorReducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/LongKeyedMultiBucketsAggregatorReducer.java @@ -13,7 +13,7 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import java.util.ArrayList; @@ -28,7 +28,7 @@ abstract class LongKeyedMultiBucketsAggregatorReducer bucketsReducer; + private final LongObjectPagedHashMap> bucketsReducer; int consumeBucketCount = 0; LongKeyedMultiBucketsAggregatorReducer(AggregationReduceContext reduceContext, int size, long minDocCount) { @@ -42,16 +42,16 @@ abstract class LongKeyedMultiBucketsAggregatorReducer reducer = bucketsReducer.get(key); if (reducer == null) { - reducer = new MultiBucketAggregatorsReducer(reduceContext, size); + reducer = new BucketReducer<>(bucket, reduceContext, size); bucketsReducer.put(key, reducer); } consumeBucketsAndMaybeBreak(reducer, bucket); reducer.accept(bucket); } - private void consumeBucketsAndMaybeBreak(MultiBucketAggregatorsReducer reducer, B bucket) { + private void consumeBucketsAndMaybeBreak(BucketReducer reducer, B bucket) { if (reduceContext.isFinalReduce() == false || minDocCount == 0) { if (reducer.getDocCount() == 0 && bucket.getDocCount() > 0) { consumeBucketsAndMaybeBreak(); @@ -76,9 +76,9 @@ private void consumeBucketsAndMaybeBreak() { public final List get() { reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); final List reducedBuckets = new ArrayList<>((int) bucketsReducer.size()); - bucketsReducer.iterator().forEachRemaining(entry -> { + bucketsReducer.forEach(entry -> { if (reduceContext.isFinalReduce() == false || entry.value.getDocCount() >= minDocCount) { - reducedBuckets.add(createBucket(entry.key, entry.value.getDocCount(), entry.value.get())); + reducedBuckets.add(createBucket(entry.key, entry.value.getDocCount(), entry.value.getAggregations())); } }); return reducedBuckets; @@ -91,7 +91,7 @@ public final List get() { @Override public final void close() { - bucketsReducer.iterator().forEachRemaining(r -> Releasables.close(r.value)); + bucketsReducer.forEach(r -> Releasables.close(r.value)); Releasables.close(bucketsReducer); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index a3f53b494acfa..3557947bb9ea7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -11,6 +11,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; @@ -19,14 +20,13 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -225,30 +225,40 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final Map buckets = new HashMap<>(); + final ObjectObjectPagedHashMap> buckets = new ObjectObjectPagedHashMap<>( + getBuckets().size(), + reduceContext.bigArrays() + ); @Override public void accept(InternalAggregation aggregation) { final InternalIpPrefix ipPrefix = (InternalIpPrefix) aggregation; for (Bucket bucket : ipPrefix.getBuckets()) { - ReducerAndProto reducerAndProto = buckets.computeIfAbsent( - bucket.key, - k -> new ReducerAndProto(new MultiBucketAggregatorsReducer(reduceContext, size), bucket) - ); - reducerAndProto.reducer.accept(bucket); + BucketReducer bucketReducer = buckets.get(bucket.key); + if (bucketReducer == null) { + bucketReducer = new BucketReducer<>(bucket, reduceContext, size); + boolean success = false; + try { + buckets.put(bucket.key, bucketReducer); + success = true; + } finally { + if (success == false) { + Releasables.close(bucketReducer); + } + } + } + bucketReducer.accept(bucket); } } @Override public InternalAggregation get() { - final List reducedBuckets = new ArrayList<>(buckets.size()); - for (ReducerAndProto reducerAndProto : buckets.values()) { - if (false == reduceContext.isFinalReduce() || reducerAndProto.reducer.getDocCount() >= minDocCount) { - reducedBuckets.add( - createBucket(reducerAndProto.proto, reducerAndProto.reducer.get(), reducerAndProto.reducer.getDocCount()) - ); + final List reducedBuckets = new ArrayList<>(Math.toIntExact(buckets.size())); + buckets.forEach(entry -> { + if (false == reduceContext.isFinalReduce() || entry.value.getDocCount() >= minDocCount) { + reducedBuckets.add(createBucket(entry.value.getProto(), entry.value.getAggregations(), entry.value.getDocCount())); } - } + }); reduceContext.consumeBucketsAndMaybeBreak(reducedBuckets.size()); reducedBuckets.sort(Comparator.comparing(a -> a.key)); return new InternalIpPrefix(getName(), format, keyed, minDocCount, reducedBuckets, metadata); @@ -256,15 +266,12 @@ public InternalAggregation get() { @Override public void close() { - for (ReducerAndProto reducerAndProto : buckets.values()) { - Releasables.close(reducerAndProto.reducer); - } + buckets.forEach(entry -> Releasables.close(entry.value)); + Releasables.close(buckets); } }; } - private record ReducerAndProto(MultiBucketAggregatorsReducer reducer, Bucket proto) {} - @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { if (keyed) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index af966963e43fc..c423b2ca8cb51 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -18,6 +18,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InternalOrder; +import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.TopBucketBuilder; import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.support.SamplingContext; @@ -30,7 +31,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.function.Function; +import java.util.function.Consumer; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyAsc; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; @@ -40,7 +41,7 @@ /** * Base class for terms and multi_terms aggregation that handles common reduce logic */ -public abstract class AbstractInternalTerms, B extends AbstractInternalTerms.AbstractTermsBucket> +public abstract class AbstractInternalTerms, B extends AbstractInternalTerms.AbstractTermsBucket> extends InternalMultiBucketAggregation { public AbstractInternalTerms(String name, Map metadata) { @@ -52,7 +53,9 @@ protected AbstractInternalTerms(StreamInput in) throws IOException { super(in); } - public abstract static class AbstractTermsBucket extends InternalMultiBucketAggregation.InternalBucket { + public abstract static class AbstractTermsBucket> extends InternalMultiBucketAggregation.InternalBucket + implements + KeyComparable { protected abstract void updateDocCountError(long docCountErrorDiff); @@ -153,7 +156,7 @@ private long getDocCountError(A terms) { private BucketOrder reduceBuckets( List aggregations, AggregationReduceContext reduceContext, - Function, Boolean> sink + Consumer> sink ) { /* * Buckets returned by a partial reduce or a shard response are sorted by key since {@link Version#V_7_10_0}. @@ -176,7 +179,7 @@ private void reduceMergeSort( List aggregations, BucketOrder thisReduceOrder, AggregationReduceContext reduceContext, - Function, Boolean> sink + Consumer> sink ) { assert isKeyOrder(thisReduceOrder); final Comparator cmp = thisReduceOrder.comparator(); @@ -201,12 +204,7 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { assert lastBucket == null || cmp.compare(top.current(), lastBucket) >= 0; if (lastBucket != null && cmp.compare(top.current(), lastBucket) != 0) { // the key changed so bundle up the last key's worth of buckets - boolean shouldContinue = sink.apply( - new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets) - ); - if (false == shouldContinue) { - return; - } + sink.accept(new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets)); sameTermBuckets = new ArrayList<>(); } lastBucket = top.current(); @@ -226,14 +224,14 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { } if (sameTermBuckets.isEmpty() == false) { - sink.apply(new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets)); + sink.accept(new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets)); } } private void reduceLegacy( List aggregations, AggregationReduceContext reduceContext, - Function, Boolean> sink + Consumer> sink ) { Map> bucketMap = new HashMap<>(); for (InternalAggregation aggregation : aggregations) { @@ -246,12 +244,7 @@ private void reduceLegacy( } } for (List sameTermBuckets : bucketMap.values()) { - boolean shouldContinue = sink.apply( - new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets) - ); - if (false == shouldContinue) { - return; - } + sink.accept(new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets)); } } @@ -304,7 +297,6 @@ public InternalAggregation doReduce(List aggregations, Aggr if (bucket.getDocCount() >= getMinDocCount()) { top.add(bucket); } - return true; }); result = top.build(); } else { @@ -316,8 +308,11 @@ public InternalAggregation doReduce(List aggregations, Aggr boolean canPrune = isKeyOrder(getOrder()) && getMinDocCount() == 0; result = new ArrayList<>(); thisReduceOrder = reduceBuckets(aggregations, reduceContext, bucket -> { - result.add(bucket.reduced()); - return false == canPrune || result.size() < getRequiredSize(); + if (canPrune == false || result.size() < getRequiredSize()) { + result.add(bucket.reduced()); + } else { + otherDocCount[0] += bucket.getDocCount(); + } }); } for (B r : result) { @@ -361,12 +356,12 @@ protected static XContentBuilder doXContentCommon( Params params, Long docCountError, long otherDocCount, - List buckets + List> buckets ) throws IOException { builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), docCountError); builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), otherDocCount); builder.startArray(CommonFields.BUCKETS.getPreferredName()); - for (AbstractTermsBucket bucket : buckets) { + for (AbstractTermsBucket bucket : buckets) { bucket.toXContent(builder, params); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index 0d06e455c57fa..f8e7f3cf3a69c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; @@ -17,14 +18,14 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.terms.heuristic.SignificanceHeuristic; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -202,7 +203,10 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont return new AggregatorReducer() { long globalSubsetSize = 0; long globalSupersetSize = 0; - final Map> buckets = new HashMap<>(); + final ObjectObjectPagedHashMap> buckets = new ObjectObjectPagedHashMap<>( + getBuckets().size(), + reduceContext.bigArrays() + ); @Override public void accept(InternalAggregation aggregation) { @@ -213,29 +217,38 @@ public void accept(InternalAggregation aggregation) { globalSubsetSize += terms.getSubsetSize(); globalSupersetSize += terms.getSupersetSize(); for (B bucket : terms.getBuckets()) { - final ReducerAndProto reducerAndProto = buckets.computeIfAbsent( - bucket.getKeyAsString(), - k -> new ReducerAndProto<>(new MultiBucketAggregatorsReducer(reduceContext, size), bucket) - ); - reducerAndProto.reducer.accept(bucket); - reducerAndProto.subsetDf[0] += bucket.subsetDf; - reducerAndProto.supersetDf[0] += bucket.supersetDf; + ReducerAndExtraInfo reducerAndExtraInfo = buckets.get(bucket.getKeyAsString()); + if (reducerAndExtraInfo == null) { + reducerAndExtraInfo = new ReducerAndExtraInfo<>(new BucketReducer<>(bucket, reduceContext, size)); + boolean success = false; + try { + buckets.put(bucket.getKeyAsString(), reducerAndExtraInfo); + success = true; + } finally { + if (success == false) { + Releasables.close(reducerAndExtraInfo.reducer); + } + } + } + reducerAndExtraInfo.reducer.accept(bucket); + reducerAndExtraInfo.subsetDf[0] += bucket.subsetDf; + reducerAndExtraInfo.supersetDf[0] += bucket.supersetDf; } } @Override public InternalAggregation get() { final SignificanceHeuristic heuristic = getSignificanceHeuristic().rewrite(reduceContext); - final int size = reduceContext.isFinalReduce() == false ? buckets.size() : Math.min(requiredSize, buckets.size()); + final int size = (int) (reduceContext.isFinalReduce() == false ? buckets.size() : Math.min(requiredSize, buckets.size())); try (BucketSignificancePriorityQueue ordered = new BucketSignificancePriorityQueue<>(size, reduceContext.bigArrays())) { - for (ReducerAndProto reducerAndProto : buckets.values()) { + buckets.forEach(entry -> { final B b = createBucket( - reducerAndProto.subsetDf[0], + entry.value.subsetDf[0], globalSubsetSize, - reducerAndProto.supersetDf[0], + entry.value.supersetDf[0], globalSupersetSize, - reducerAndProto.reducer.get(), - reducerAndProto.proto + entry.value.reducer.getAggregations(), + entry.value.reducer.getProto() ); b.updateScore(heuristic); if (((b.score > 0) && (b.subsetDf >= minDocCount)) || reduceContext.isFinalReduce() == false) { @@ -248,7 +261,7 @@ public InternalAggregation get() { } else { reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(b)); } - } + }); final B[] list = createBucketsArray((int) ordered.size()); for (int i = (int) ordered.size() - 1; i >= 0; i--) { list[i] = ordered.pop(); @@ -259,16 +272,19 @@ public InternalAggregation get() { @Override public void close() { - for (ReducerAndProto reducerAndProto : buckets.values()) { - Releasables.close(reducerAndProto.reducer); - } + buckets.forEach(entry -> Releasables.close(entry.value.reducer)); + Releasables.close(buckets); } }; } - private record ReducerAndProto(MultiBucketAggregatorsReducer reducer, B proto, long[] subsetDf, long[] supersetDf) { - private ReducerAndProto(MultiBucketAggregatorsReducer reducer, B proto) { - this(reducer, proto, new long[] { 0 }, new long[] { 0 }); + private record ReducerAndExtraInfo( + BucketReducer reducer, + long[] subsetDf, + long[] supersetDf + ) { + private ReducerAndExtraInfo(BucketReducer reducer) { + this(reducer, new long[] { 0 }, new long[] { 0 }); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 66a3ddb2c94c4..e6e8698e8b568 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -13,7 +13,6 @@ import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalOrder; -import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -29,7 +28,7 @@ public abstract class InternalTerms, B extends Int public static final ParseField DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME = new ParseField("doc_count_error_upper_bound"); public static final ParseField SUM_OF_OTHER_DOC_COUNTS = new ParseField("sum_other_doc_count"); - public abstract static class Bucket> extends AbstractTermsBucket implements Terms.Bucket, KeyComparable { + public abstract static class Bucket> extends AbstractTermsBucket implements Terms.Bucket { /** * Reads a bucket. Should be a constructor reference. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java index ce911379d9ddb..e8d5050129e27 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -257,6 +258,11 @@ public SignificantTermsAggregationBuilder backgroundFilter(QueryBuilder backgrou return this; } + @Override + public QueryBuilder getQuery() { + return backgroundFilter != null ? backgroundFilter : QueryBuilders.matchAllQuery(); + } + /** * Set terms to include and exclude from the aggregation results */ diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index d91b6e8b4e4a3..80cc459569dea 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -643,6 +643,9 @@ static > BiFunction fromStoredFields() { return ctx -> new LeafFieldLookupProvider() { StoredFields storedFields; - int currentDoc = -1; - final List currentValues = new ArrayList<>(2); @Override public void populateFieldLookup(FieldLookup fieldLookup, int doc) throws IOException { if (storedFields == null) { storedFields = ctx.reader().storedFields(); } - if (doc == currentDoc) { - fieldLookup.setValues(currentValues); - } else { - currentDoc = doc; - currentValues.clear(); - // TODO can we remember which fields have been loaded here and get them eagerly next time? - // likelihood is if a script is loading several fields on one doc they will load the same - // set of fields next time round - SingleFieldsVisitor visitor = new SingleFieldsVisitor(fieldLookup.fieldType(), currentValues); - storedFields.document(doc, visitor); - fieldLookup.setValues(currentValues); - } + // TODO can we remember which fields have been loaded here and get them eagerly next time? + // likelihood is if a script is loading several fields on one doc they will load the same + // set of fields next time round + final List currentValues = new ArrayList<>(2); + storedFields.document(doc, new SingleFieldsVisitor(fieldLookup.fieldType(), currentValues)); + fieldLookup.setValues(currentValues); } + }; } diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java index 5f8e6a893c1b5..e83fa79c79460 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java @@ -15,20 +15,16 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.QueryProfileShardResult; -import org.elasticsearch.xcontent.InstantiatingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ParserConstructor; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - public class SearchProfileDfsPhaseResult implements Writeable, ToXContentObject { private final ProfileResult dfsShardResult; @@ -63,24 +59,8 @@ public void writeTo(StreamOutput out) throws IOException { } } - private static final ParseField STATISTICS = new ParseField("statistics"); - private static final ParseField KNN = new ParseField("knn"); - private static final InstantiatingObjectParser PARSER; - - static { - InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( - "search_profile_dfs_phase_result", - true, - SearchProfileDfsPhaseResult.class - ); - parser.declareObject(optionalConstructorArg(), (p, c) -> ProfileResult.fromXContent(p), STATISTICS); - parser.declareObjectArray(optionalConstructorArg(), (p, c) -> QueryProfileShardResult.fromXContent(p), KNN); - PARSER = parser.build(); - } - - public static SearchProfileDfsPhaseResult fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } + public static final ParseField STATISTICS = new ParseField("statistics"); + public static final ParseField KNN = new ParseField("knn"); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java index 1cc6810f8e575..cb15e9af8956a 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java @@ -15,36 +15,28 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; -import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; -import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.TreeSet; import java.util.regex.Matcher; import java.util.regex.Pattern; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * Profile results for all shards. */ public final class SearchProfileResults implements Writeable, ToXContentFragment { private static final Logger logger = LogManager.getLogger(SearchProfileResults.class); - private static final String ID_FIELD = "id"; + public static final String ID_FIELD = "id"; private static final String NODE_ID_FIELD = "node_id"; private static final String CLUSTER_FIELD = "cluster"; private static final String INDEX_NAME_FIELD = "index"; private static final String SHARD_ID_FIELD = "shard_id"; - private static final String SHARDS_FIELD = "shards"; + public static final String SHARDS_FIELD = "shards"; public static final String PROFILE_FIELD = "profile"; // map key is the composite "id" of form [nodeId][(clusterName:)indexName][shardId] created from SearchShardTarget.toString @@ -117,75 +109,6 @@ public String toString() { return Strings.toString(this); } - public static SearchProfileResults fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - Map profileResults = new HashMap<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.START_ARRAY) { - if (SHARDS_FIELD.equals(parser.currentName())) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - parseProfileResultsEntry(parser, profileResults); - } - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - parser.skipChildren(); - } - } - return new SearchProfileResults(profileResults); - } - - private static void parseProfileResultsEntry(XContentParser parser, Map searchProfileResults) - throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - SearchProfileDfsPhaseResult searchProfileDfsPhaseResult = null; - List queryProfileResults = new ArrayList<>(); - AggregationProfileShardResult aggProfileShardResult = null; - ProfileResult fetchResult = null; - String id = null; - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (ID_FIELD.equals(currentFieldName)) { - id = parser.text(); - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if ("searches".equals(currentFieldName)) { - while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { - queryProfileResults.add(QueryProfileShardResult.fromXContent(parser)); - } - } else if (AggregationProfileShardResult.AGGREGATIONS.equals(currentFieldName)) { - aggProfileShardResult = AggregationProfileShardResult.fromXContent(parser); - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if ("dfs".equals(currentFieldName)) { - searchProfileDfsPhaseResult = SearchProfileDfsPhaseResult.fromXContent(parser); - } else if ("fetch".equals(currentFieldName)) { - fetchResult = ProfileResult.fromXContent(parser); - } else { - parser.skipChildren(); - } - } else { - parser.skipChildren(); - } - } - SearchProfileShardResult result = new SearchProfileShardResult( - new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult), - fetchResult - ); - result.getQueryPhase().setSearchProfileDfsPhaseResult(searchProfileDfsPhaseResult); - searchProfileResults.put(id, result); - } - /** * Parsed representation of a composite id used for shards in a profile. * The composite id format is specified/created via the {@code SearchShardTarget} method. diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java index e72ef2d9b3ece..8aebde23d6a87 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java @@ -17,7 +17,6 @@ import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -25,8 +24,6 @@ import java.util.List; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * A container class to hold the profile results for a single shard in the request. * Contains a list of query profiles, a collector tree and a total rewrite tree. @@ -139,42 +136,4 @@ public int hashCode() { public String toString() { return Strings.toString(this); } - - public static QueryProfileShardResult fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - String currentFieldName = null; - List queryProfileResults = new ArrayList<>(); - long rewriteTime = 0; - Long vectorOperationsCount = null; - CollectorResult collector = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (REWRITE_TIME.equals(currentFieldName)) { - rewriteTime = parser.longValue(); - } else if (VECTOR_OPERATIONS_COUNT.equals(currentFieldName)) { - vectorOperationsCount = parser.longValue(); - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (QUERY_ARRAY.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - queryProfileResults.add(ProfileResult.fromXContent(parser)); - } - } else if (COLLECTOR.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - collector = CollectorResult.fromXContent(parser); - } - } else { - parser.skipChildren(); - } - } else { - parser.skipChildren(); - } - } - return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector, vectorOperationsCount); - } } diff --git a/server/src/main/java/org/elasticsearch/search/runtime/AbstractBooleanScriptFieldQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/AbstractBooleanScriptFieldQuery.java index 38363ee3e3fdd..c6ddd1964188f 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/AbstractBooleanScriptFieldQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/AbstractBooleanScriptFieldQuery.java @@ -23,7 +23,7 @@ abstract class AbstractBooleanScriptFieldQuery extends AbstractScriptFieldQuery< } @Override - protected boolean matches(BooleanFieldScript scriptContext, int docId) { + protected final boolean matches(BooleanFieldScript scriptContext, int docId) { scriptContext.runForDoc(docId); return matches(scriptContext.trues(), scriptContext.falses()); } diff --git a/server/src/main/java/org/elasticsearch/search/runtime/AbstractDoubleScriptFieldQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/AbstractDoubleScriptFieldQuery.java index 500d00628bd19..722cff6fc0edf 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/AbstractDoubleScriptFieldQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/AbstractDoubleScriptFieldQuery.java @@ -22,7 +22,7 @@ abstract class AbstractDoubleScriptFieldQuery extends AbstractScriptFieldQuery values) { + protected TwoPhaseIterator createTwoPhaseIterator(StringFieldScript scriptContext, DocIdSetIterator approximation) { + BytesRefBuilder scratch = new BytesRefBuilder(); + return new TwoPhaseIterator(approximation) { + @Override + public boolean matches() { + scriptContext.runForDoc(approximation.docID()); + return AbstractStringScriptFieldAutomatonQuery.this.matches(scriptContext.getValues(), scratch); + } + + @Override + public float matchCost() { + return MATCH_COST; + } + }; + } + + protected final boolean matches(List values, BytesRefBuilder scratch) { for (String value : values) { scratch.copyChars(value); if (automaton.run(scratch.bytes(), 0, scratch.length())) { @@ -41,6 +58,11 @@ protected final boolean matches(List values) { return false; } + @Override + protected final boolean matches(List values) { + throw new UnsupportedOperationException(); + } + @Override public final void visit(QueryVisitor visitor) { if (visitor.acceptField(fieldName())) { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java index f3371caf4c1a7..3f638bb7f9905 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -8,26 +8,20 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.SetOnce; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -35,12 +29,9 @@ import java.util.HashMap; import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * Top level suggest result, containing the result for each suggestion. */ @@ -125,29 +116,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * this parsing method assumes that the leading "suggest" field name has already been parsed by the caller - */ - public static Suggest fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - List>> suggestions = new ArrayList<>(); - while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); - String currentField = parser.currentName(); - ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - Suggestion> suggestion = Suggestion.fromXContent(parser); - if (suggestion != null) { - suggestions.add(suggestion); - } else { - throw new ParsingException( - parser.getTokenLocation(), - String.format(Locale.ROOT, "Could not parse suggestion keyed as [%s]", currentField) - ); - } - } - return new Suggest(suggestions); - } - public static List>> reduce(Map>> groupedSuggestions) { List>> reduced = new ArrayList<>(groupedSuggestions.size()); for (Map.Entry>> unmergedResults : groupedSuggestions.entrySet()) { @@ -362,33 +330,14 @@ public int hashCode() { return Objects.hash(name, size, entries); } - @SuppressWarnings("unchecked") - public static Suggestion> fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); - SetOnce suggestion = new SetOnce<>(); - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Suggestion.class, suggestion::set); - return suggestion.get(); - } - - protected static > void parseEntries( - XContentParser parser, - Suggestion suggestion, - CheckedFunction entryParser - ) throws IOException { - ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); - while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { - suggestion.addTerm(entryParser.apply(parser)); - } - } - /** * Represents a part from the suggest text with suggested options. */ public abstract static class Entry implements Iterable, Writeable, ToXContentFragment { - private static final String TEXT = "text"; - private static final String OFFSET = "offset"; - private static final String LENGTH = "length"; + static final String TEXT = "text"; + static final String OFFSET = "offset"; + static final String LENGTH = "length"; protected static final String OPTIONS = "options"; protected Text text; @@ -561,12 +510,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - protected static void declareCommonFields(ObjectParser, Void> parser) { - parser.declareString((entry, text) -> entry.text = new Text(text), new ParseField(TEXT)); - parser.declareInt((entry, offset) -> entry.offset = offset, new ParseField(OFFSET)); - parser.declareInt((entry, length) -> entry.length = length, new ParseField(LENGTH)); - } - /** * Contains the suggested text with its document frequency and score. */ diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java index 7210c35d961ac..234ccda755f7b 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java @@ -18,24 +18,17 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.search.suggest.Suggest.Suggestion; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.search.SearchHit.unknownMetaFieldConsumer; import static org.elasticsearch.search.suggest.Suggest.COMPARATOR; /** @@ -114,12 +107,6 @@ public int hashCode() { return Objects.hash(super.hashCode(), skipDuplicates); } - public static CompletionSuggestion fromXContent(XContentParser parser, String name) throws IOException { - CompletionSuggestion suggestion = new CompletionSuggestion(name, -1, false); - parseEntries(parser, suggestion, CompletionSuggestion.Entry::fromXContent); - return suggestion; - } - private static final class OptionPriorityQueue extends PriorityQueue { OptionPriorityQueue(int maxSize) { super(maxSize); @@ -230,7 +217,7 @@ public Entry(Text text, int offset, int length) { super(text, offset, length); } - private Entry() {} + public Entry() {} public Entry(StreamInput in) throws IOException { super(in); @@ -241,20 +228,6 @@ protected Option newOption(StreamInput in) throws IOException { return new Option(in); } - private static final ObjectParser PARSER = new ObjectParser<>("CompletionSuggestionEntryParser", true, Entry::new); - static { - declareCommonFields(PARSER); - /* - * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. - * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 - */ - PARSER.declareObjectArray((e, o) -> e.addOptions(o), (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS)); - } - - public static Entry fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - public static class Option extends Suggest.Suggestion.Entry.Option { private final Map> contexts; private final ScoreDoc doc; @@ -336,69 +309,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - private static final ObjectParser, Void> PARSER = new ObjectParser<>( - "CompletionOptionParser", - unknownMetaFieldConsumer, - HashMap::new - ); - - static { - SearchHit.declareInnerHitsParseFields(PARSER); - PARSER.declareString( - (map, value) -> map.put(Suggestion.Entry.Option.TEXT.getPreferredName(), value), - Suggestion.Entry.Option.TEXT - ); - PARSER.declareFloat( - (map, value) -> map.put(Suggestion.Entry.Option.SCORE.getPreferredName(), value), - Suggestion.Entry.Option.SCORE - ); - PARSER.declareObject( - (map, value) -> map.put(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName(), value), - (p, c) -> parseContexts(p), - CompletionSuggestion.Entry.Option.CONTEXTS - ); - } - - private static Map> parseContexts(XContentParser parser) throws IOException { - Map> contexts = new HashMap<>(); - while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); - String key = parser.currentName(); - ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - Set values = new HashSet<>(); - while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { - ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser); - values.add(parser.text()); - } - contexts.put(key, values); - } - return contexts; - } - - public static Option fromXContent(XContentParser parser) { - Map values = PARSER.apply(parser, null); - - Text text = new Text((String) values.get(Suggestion.Entry.Option.TEXT.getPreferredName())); - Float score = (Float) values.get(Suggestion.Entry.Option.SCORE.getPreferredName()); - @SuppressWarnings("unchecked") - Map> contexts = (Map>) values.get( - CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName() - ); - if (contexts == null) { - contexts = Collections.emptyMap(); - } - - SearchHit hit = null; - // the option either prints SCORE or inlines the search hit - if (score == null) { - hit = SearchHit.createFromMap(values); - score = hit.getScore(); - } - CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option(-1, text, score, contexts); - option.setHit(hit); - return option; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java index 1c881a9887583..e92f6bd8d52cd 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java @@ -13,17 +13,10 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest.Suggestion; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * Suggestion entry returned from the {@link PhraseSuggester}. */ @@ -47,12 +40,6 @@ protected Entry newEntry(StreamInput in) throws IOException { return new Entry(in); } - public static PhraseSuggestion fromXContent(XContentParser parser, String name) throws IOException { - PhraseSuggestion suggestion = new PhraseSuggestion(name, -1); - parseEntries(parser, suggestion, PhraseSuggestion.Entry::fromXContent); - return suggestion; - } - public static class Entry extends Suggestion.Entry { protected double cutoffScore = Double.MIN_VALUE; @@ -66,7 +53,7 @@ public Entry(Text text, int offset, int length) { super(text, offset, length); } - Entry() {} + public Entry() {} public Entry(StreamInput in) throws IOException { super(in); @@ -94,20 +81,6 @@ public void addOption(Option option) { } } - private static final ObjectParser PARSER = new ObjectParser<>("PhraseSuggestionEntryParser", true, Entry::new); - static { - declareCommonFields(PARSER); - /* - * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. - * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 - */ - PARSER.declareObjectArray((e, o) -> e.addOptions(o), (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS)); - } - - public static Entry fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override protected Option newOption(StreamInput in) throws IOException { return new Option(in); @@ -142,30 +115,6 @@ public Option(Text text, Text highlighted, float score) { public Option(StreamInput in) throws IOException { super(in); } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "PhraseOptionParser", - true, - args -> { - Text text = new Text((String) args[0]); - float score = (Float) args[1]; - String highlighted = (String) args[2]; - Text highlightedText = highlighted == null ? null : new Text(highlighted); - Boolean collateMatch = (Boolean) args[3]; - return new Option(text, highlightedText, score, collateMatch); - } - ); - - static { - PARSER.declareString(constructorArg(), TEXT); - PARSER.declareFloat(constructorArg(), SCORE); - PARSER.declareString(optionalConstructorArg(), HIGHLIGHTED); - PARSER.declareBoolean(optionalConstructorArg(), COLLATE_MATCH); - } - - public static Option fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java index ce55385dfa550..eb4edcf0a5aa4 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java @@ -14,18 +14,13 @@ import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Comparator; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * The suggestion responses corresponding with the suggestions in the request. */ @@ -104,13 +99,6 @@ public String getWriteableName() { return TermSuggestionBuilder.SUGGESTION_NAME; } - public static TermSuggestion fromXContent(XContentParser parser, String name) throws IOException { - // the "size" parameter and the SortBy for TermSuggestion cannot be parsed from the response, use default values - TermSuggestion suggestion = new TermSuggestion(name, -1, SortBy.SCORE); - parseEntries(parser, suggestion, TermSuggestion.Entry::fromXContent); - return suggestion; - } - @Override protected Entry newEntry(StreamInput in) throws IOException { return new Entry(in); @@ -135,7 +123,7 @@ public Entry(Text text, int offset, int length) { super(text, offset, length); } - private Entry() {} + public Entry() {} public Entry(StreamInput in) throws IOException { super(in); @@ -146,20 +134,6 @@ protected Option newOption(StreamInput in) throws IOException { return new Option(in); } - private static final ObjectParser PARSER = new ObjectParser<>("TermSuggestionEntryParser", true, Entry::new); - static { - declareCommonFields(PARSER); - /* - * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. - * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 - */ - PARSER.declareObjectArray((e, o) -> e.addOptions(o), (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS)); - } - - public static Entry fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - /** * Contains the suggested text with its document frequency and score. */ @@ -204,27 +178,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(FREQ.getPreferredName(), freq); return builder; } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "TermSuggestionOptionParser", - true, - args -> { - Text text = new Text((String) args[0]); - int freq = (Integer) args[1]; - float score = (Float) args[2]; - return new Option(text, freq, score); - } - ); - - static { - PARSER.declareString(constructorArg(), Suggestion.Entry.Option.TEXT); - PARSER.declareInt(constructorArg(), FREQ); - PARSER.declareFloat(constructorArg(), Suggestion.Entry.Option.SCORE); - } - - public static Option fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index 243df88cfab00..8a1f68c867943 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -23,7 +23,6 @@ import org.elasticsearch.repositories.RepositoryShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentFragment; @@ -53,237 +52,40 @@ public final class SnapshotInfo implements Comparable, ToXContentF public static final String INCLUDE_REPOSITORY_XCONTENT_PARAM = "include_repository"; private static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time"); - private static final String SNAPSHOT = "snapshot"; - private static final String UUID = "uuid"; - private static final String REPOSITORY = "repository"; - private static final String INDICES = "indices"; - private static final String DATA_STREAMS = "data_streams"; - private static final String STATE = "state"; - private static final String REASON = "reason"; - private static final String START_TIME = "start_time"; - private static final String START_TIME_IN_MILLIS = "start_time_in_millis"; - private static final String END_TIME = "end_time"; - private static final String END_TIME_IN_MILLIS = "end_time_in_millis"; - private static final String DURATION = "duration"; - private static final String DURATION_IN_MILLIS = "duration_in_millis"; - private static final String FAILURES = "failures"; - private static final String SHARDS = "shards"; - private static final String TOTAL = "total"; - private static final String FAILED = "failed"; - private static final String SUCCESSFUL = "successful"; - private static final String VERSION_ID = "version_id"; - private static final String VERSION = "version"; - private static final String NAME = "name"; - private static final String TOTAL_SHARDS = "total_shards"; - private static final String SUCCESSFUL_SHARDS = "successful_shards"; - private static final String INCLUDE_GLOBAL_STATE = "include_global_state"; - private static final String USER_METADATA = "metadata"; - private static final String FEATURE_STATES = "feature_states"; - private static final String INDEX_DETAILS = "index_details"; - - private static final String UNKNOWN_REPO_NAME = "_na_"; + + static final String SNAPSHOT = "snapshot"; + static final String UUID = "uuid"; + static final String REPOSITORY = "repository"; + static final String INDICES = "indices"; + static final String DATA_STREAMS = "data_streams"; + static final String STATE = "state"; + static final String REASON = "reason"; + static final String START_TIME = "start_time"; + static final String START_TIME_IN_MILLIS = "start_time_in_millis"; + static final String END_TIME = "end_time"; + static final String END_TIME_IN_MILLIS = "end_time_in_millis"; + static final String DURATION = "duration"; + static final String DURATION_IN_MILLIS = "duration_in_millis"; + static final String FAILURES = "failures"; + static final String SHARDS = "shards"; + static final String TOTAL = "total"; + static final String FAILED = "failed"; + static final String SUCCESSFUL = "successful"; + static final String VERSION_ID = "version_id"; + static final String VERSION = "version"; + static final String NAME = "name"; + static final String TOTAL_SHARDS = "total_shards"; + static final String SUCCESSFUL_SHARDS = "successful_shards"; + static final String INCLUDE_GLOBAL_STATE = "include_global_state"; + static final String USER_METADATA = "metadata"; + static final String FEATURE_STATES = "feature_states"; + static final String INDEX_DETAILS = "index_details"; + + static final String UNKNOWN_REPO_NAME = "_na_"; private static final Comparator COMPARATOR = Comparator.comparing(SnapshotInfo::startTime) .thenComparing(SnapshotInfo::snapshotId); - public static final class SnapshotInfoBuilder { - private String snapshotName = null; - private String snapshotUUID = null; - private String repository = UNKNOWN_REPO_NAME; - private String state = null; - private String reason = null; - private List indices = null; - private List dataStreams = null; - private List featureStates = null; - private Map indexSnapshotDetails = null; - private long startTime = 0L; - private long endTime = 0L; - private ShardStatsBuilder shardStatsBuilder = null; - private Boolean includeGlobalState = null; - private Map userMetadata = null; - private int version = -1; - private List shardFailures = null; - - private void setSnapshotName(String snapshotName) { - this.snapshotName = snapshotName; - } - - private void setSnapshotUUID(String snapshotUUID) { - this.snapshotUUID = snapshotUUID; - } - - private void setRepository(String repository) { - this.repository = repository; - } - - private void setState(String state) { - this.state = state; - } - - private void setReason(String reason) { - this.reason = reason; - } - - private void setIndices(List indices) { - this.indices = indices; - } - - private void setDataStreams(List dataStreams) { - this.dataStreams = dataStreams; - } - - private void setFeatureStates(List featureStates) { - this.featureStates = featureStates; - } - - private void setIndexSnapshotDetails(Map indexSnapshotDetails) { - this.indexSnapshotDetails = indexSnapshotDetails; - } - - private void setStartTime(long startTime) { - this.startTime = startTime; - } - - private void setEndTime(long endTime) { - this.endTime = endTime; - } - - private void setShardStatsBuilder(ShardStatsBuilder shardStatsBuilder) { - this.shardStatsBuilder = shardStatsBuilder; - } - - private void setIncludeGlobalState(Boolean includeGlobalState) { - this.includeGlobalState = includeGlobalState; - } - - private void setUserMetadata(Map userMetadata) { - this.userMetadata = userMetadata; - } - - private void setVersion(int version) { - this.version = version; - } - - private void setShardFailures(List shardFailures) { - this.shardFailures = shardFailures; - } - - public SnapshotInfo build() { - final Snapshot snapshot = new Snapshot(repository, new SnapshotId(snapshotName, snapshotUUID)); - - if (indices == null) { - indices = Collections.emptyList(); - } - - if (dataStreams == null) { - dataStreams = Collections.emptyList(); - } - - if (featureStates == null) { - featureStates = Collections.emptyList(); - } - - if (indexSnapshotDetails == null) { - indexSnapshotDetails = Collections.emptyMap(); - } - - SnapshotState snapshotState = state == null ? null : SnapshotState.valueOf(state); - IndexVersion version = this.version == -1 ? IndexVersion.current() : IndexVersion.fromId(this.version); - - int totalShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.getTotalShards(); - int successfulShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.getSuccessfulShards(); - - if (shardFailures == null) { - shardFailures = new ArrayList<>(); - } - - return new SnapshotInfo( - snapshot, - indices, - dataStreams, - featureStates, - reason, - version, - startTime, - endTime, - totalShards, - successfulShards, - shardFailures, - includeGlobalState, - userMetadata, - snapshotState, - indexSnapshotDetails - ); - } - } - - private static final class ShardStatsBuilder { - private int totalShards; - private int successfulShards; - - private void setTotalShards(int totalShards) { - this.totalShards = totalShards; - } - - int getTotalShards() { - return totalShards; - } - - private void setSuccessfulShards(int successfulShards) { - this.successfulShards = successfulShards; - } - - int getSuccessfulShards() { - return successfulShards; - } - } - - public static final ObjectParser SNAPSHOT_INFO_PARSER = new ObjectParser<>( - SnapshotInfoBuilder.class.getName(), - true, - SnapshotInfoBuilder::new - ); - - private static final ObjectParser SHARD_STATS_PARSER = new ObjectParser<>( - ShardStatsBuilder.class.getName(), - true, - ShardStatsBuilder::new - ); - - static { - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotName, new ParseField(SNAPSHOT)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotUUID, new ParseField(UUID)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setRepository, new ParseField(REPOSITORY)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setState, new ParseField(STATE)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setReason, new ParseField(REASON)); - SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setIndices, new ParseField(INDICES)); - SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setDataStreams, new ParseField(DATA_STREAMS)); - SNAPSHOT_INFO_PARSER.declareObjectArray( - SnapshotInfoBuilder::setFeatureStates, - SnapshotFeatureInfo.SNAPSHOT_FEATURE_INFO_PARSER, - new ParseField(FEATURE_STATES) - ); - SNAPSHOT_INFO_PARSER.declareObject( - SnapshotInfoBuilder::setIndexSnapshotDetails, - (p, c) -> p.map(HashMap::new, p2 -> IndexSnapshotDetails.PARSER.parse(p2, c)), - new ParseField(INDEX_DETAILS) - ); - SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setStartTime, new ParseField(START_TIME_IN_MILLIS)); - SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setEndTime, new ParseField(END_TIME_IN_MILLIS)); - SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setShardStatsBuilder, SHARD_STATS_PARSER, new ParseField(SHARDS)); - SNAPSHOT_INFO_PARSER.declareBoolean(SnapshotInfoBuilder::setIncludeGlobalState, new ParseField(INCLUDE_GLOBAL_STATE)); - SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setUserMetadata, (p, c) -> p.map(), new ParseField(USER_METADATA)); - SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID)); - SNAPSHOT_INFO_PARSER.declareObjectArray( - SnapshotInfoBuilder::setShardFailures, - SnapshotShardFailure.SNAPSHOT_SHARD_FAILURE_PARSER, - new ParseField(FAILURES) - ); - - SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setTotalShards, new ParseField(TOTAL)); - SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setSuccessfulShards, new ParseField(SUCCESSFUL)); - } - private final Snapshot snapshot; @Nullable diff --git a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java index 438a32a2b8630..33f3f6d631455 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java +++ b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java @@ -107,7 +107,7 @@ static boolean awaitTermination( * not be interrupted. */ default Cancellable scheduleWithFixedDelay(Runnable command, TimeValue interval, Executor executor) { - var runnable = new ReschedulingRunnable(command, interval, executor, this, (e) -> {}, (e) -> {}); + var runnable = new ReschedulingRunnable(command, interval, executor, this, e -> {}, e -> {}); runnable.start(); return runnable; } @@ -226,13 +226,25 @@ public void doRun() { @Override public void onFailure(Exception e) { - failureConsumer.accept(e); + try { + if (runnable instanceof AbstractRunnable abstractRunnable) { + abstractRunnable.onFailure(e); + } + } finally { + failureConsumer.accept(e); + } } @Override public void onRejection(Exception e) { run = false; - rejectionConsumer.accept(e); + try { + if (runnable instanceof AbstractRunnable abstractRunnable) { + abstractRunnable.onRejection(e); + } + } finally { + rejectionConsumer.accept(e); + } } @Override @@ -247,6 +259,11 @@ public void onAfter() { } } + @Override + public boolean isForceExecution() { + return runnable instanceof AbstractRunnable abstractRunnable && abstractRunnable.isForceExecution(); + } + @Override public String toString() { return "ReschedulingRunnable{" + "runnable=" + runnable + ", interval=" + interval + '}'; diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 97c9ce755c130..cf554fe81d4a3 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -597,11 +597,14 @@ public void scheduleUnlessShuttingDown(TimeValue delay, Executor executor, Runna } public Cancellable scheduleWithFixedDelay(Runnable command, TimeValue interval, Executor executor) { - var runnable = new ReschedulingRunnable(command, interval, executor, this, (e) -> { - if (logger.isDebugEnabled()) { - logger.debug(() -> format("scheduled task [%s] was rejected on thread pool [%s]", command, executor), e); - } - }, (e) -> logger.warn(() -> format("failed to run scheduled task [%s] on thread pool [%s]", command, executor), e)); + var runnable = new ReschedulingRunnable( + command, + interval, + executor, + this, + e -> logger.debug(() -> format("scheduled task [%s] was rejected on thread pool [%s]", command, executor), e), + e -> logger.warn(() -> format("failed to run scheduled task [%s] on thread pool [%s]", command, executor), e) + ); runnable.start(); return runnable; } diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index b392111557615..0a1480526c9f0 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -66,6 +66,7 @@ 7.17.16,7171699 7.17.17,7171799 7.17.18,7171899 +7.17.19,7171999 8.0.0,8000099 8.0.1,8000199 8.1.0,8010099 @@ -112,3 +113,4 @@ 8.12.0,8560000 8.12.1,8560001 8.12.2,8560001 +8.13.0,8595000 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index f2da9fcaf60ce..f66cda3c08fc7 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -66,6 +66,7 @@ 7.17.16,7171699 7.17.17,7171799 7.17.18,7171899 +7.17.19,7171999 8.0.0,8000099 8.0.1,8000199 8.1.0,8010099 @@ -112,3 +113,4 @@ 8.12.0,8500008 8.12.1,8500010 8.12.2,8500010 +8.13.0,8503000 diff --git a/server/src/test/java/org/elasticsearch/TransportVersionTests.java b/server/src/test/java/org/elasticsearch/TransportVersionTests.java index b8b8380ee4a96..2de973622248b 100644 --- a/server/src/test/java/org/elasticsearch/TransportVersionTests.java +++ b/server/src/test/java/org/elasticsearch/TransportVersionTests.java @@ -159,6 +159,18 @@ public void testMax() { } } + public void testIsPatchFrom() { + TransportVersion patchVersion = TransportVersion.fromId(8_800_00_4); + assertThat(TransportVersion.fromId(8_799_00_0).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_799_00_9).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_00_0).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_00_3).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_00_4).isPatchFrom(patchVersion), is(true)); + assertThat(TransportVersion.fromId(8_800_00_9).isPatchFrom(patchVersion), is(true)); + assertThat(TransportVersion.fromId(8_800_01_0).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_801_00_0).isPatchFrom(patchVersion), is(false)); + } + public void testVersionConstantPresent() { Set ignore = Set.of(TransportVersions.ZERO, TransportVersion.current(), TransportVersions.MINIMUM_COMPATIBLE); assertThat(TransportVersion.current(), sameInstance(TransportVersion.fromId(TransportVersion.current().id()))); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java index d4231c9f7538b..11655a93097cc 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java @@ -110,7 +110,7 @@ public class ClusterHealthResponsesTests extends AbstractXContentSerializingTest private static final ObjectParser.NamedObjectParser INDEX_PARSER = ( XContentParser parser, Void context, - String index) -> ClusterIndexHealth.innerFromXContent(parser, index); + String index) -> ClusterIndexHealthTests.parseInstance(parser, index); static { // ClusterStateHealth fields diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponseTests.java index 8f5712d90487f..bf90d962912c5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponseTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.action.admin.cluster.repositories.verify; import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.util.ArrayList; @@ -15,9 +17,28 @@ public class VerifyRepositoryResponseTests extends AbstractXContentTestCase { + private static final ObjectParser PARSER = new ObjectParser<>( + VerifyRepositoryResponse.class.getName(), + true, + VerifyRepositoryResponse::new + ); + static { + ObjectParser internalParser = new ObjectParser<>( + VerifyRepositoryResponse.NODES, + true, + null + ); + internalParser.declareString(VerifyRepositoryResponse.NodeView::setName, new ParseField(VerifyRepositoryResponse.NAME)); + PARSER.declareNamedObjects( + VerifyRepositoryResponse::setNodes, + (p, v, name) -> internalParser.parse(p, new VerifyRepositoryResponse.NodeView(name), null), + new ParseField("nodes") + ); + } + @Override protected VerifyRepositoryResponse doParseInstance(XContentParser parser) { - return VerifyRepositoryResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java index 3f5692c30cfef..3d46994faacf7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java @@ -14,17 +14,32 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.util.List; import java.util.Set; import java.util.function.Predicate; +import static org.elasticsearch.action.support.master.AcknowledgedResponse.declareAcknowledgedField; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class ClusterUpdateSettingsResponseTests extends AbstractXContentSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "cluster_update_settings_response", + true, + args -> new ClusterUpdateSettingsResponse((boolean) args[0], (Settings) args[1], (Settings) args[2]) + ); + static { + declareAcknowledgedField(PARSER); + PARSER.declareObject(constructorArg(), (p, c) -> Settings.fromXContent(p), ClusterUpdateSettingsResponse.TRANSIENT); + PARSER.declareObject(constructorArg(), (p, c) -> Settings.fromXContent(p), ClusterUpdateSettingsResponse.PERSISTENT); + } + @Override protected ClusterUpdateSettingsResponse doParseInstance(XContentParser parser) { - return ClusterUpdateSettingsResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java index cb3c9a3557d61..87cb67a53fc37 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotInfoTestUtils; +import org.elasticsearch.snapshots.SnapshotInfoUtils; import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -30,7 +31,7 @@ public class CreateSnapshotResponseTests extends AbstractXContentTestCase { + static final ObjectParser.NamedObjectParser PARSER; + static { + ConstructingObjectParser innerParser = new ConstructingObjectParser<>( + "snapshot_index_status", + true, + (Object[] parsedObjects, String index) -> { + int i = 0; + SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]); + SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]); + @SuppressWarnings("unchecked") + List shardStatuses = (List) parsedObjects[i]; + + final Map indexShards; + if (shardStatuses == null || shardStatuses.isEmpty()) { + indexShards = emptyMap(); + } else { + indexShards = Maps.newMapWithExpectedSize(shardStatuses.size()); + for (SnapshotIndexShardStatus shardStatus : shardStatuses) { + indexShards.put(shardStatus.getShardId().getId(), shardStatus); + } + } + return new SnapshotIndexStatus(index, indexShards, shardsStats, stats); + } + ); + innerParser.declareObject( + constructorArg(), + (p, c) -> SnapshotShardsStatsTests.PARSER.apply(p, null), + new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS) + ); + innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), new ParseField(SnapshotStats.Fields.STATS)); + innerParser.declareNamedObjects( + constructorArg(), + SnapshotIndexShardStatus.PARSER, + new ParseField(SnapshotIndexStatus.Fields.SHARDS) + ); + PARSER = ((p, c, name) -> innerParser.apply(p, name)); + } + @Override protected SnapshotIndexStatus createTestInstance() { String index = randomAlphaOfLength(10); @@ -40,7 +86,8 @@ protected Predicate getRandomFieldsExcludeFilter() { protected SnapshotIndexStatus doParseInstance(XContentParser parser) throws IOException { XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - SnapshotIndexStatus status = SnapshotIndexStatus.fromXContent(parser); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + SnapshotIndexStatus status = PARSER.parse(parser, null, parser.currentName()); XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); return status; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java index 9d4b8d601c63b..a9eacb49798f9 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java @@ -9,12 +9,39 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class SnapshotShardsStatsTests extends AbstractXContentTestCase { + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + SnapshotShardsStats.Fields.SHARDS_STATS, + true, + (Object[] parsedObjects) -> { + int i = 0; + int initializingShards = (int) parsedObjects[i++]; + int startedShards = (int) parsedObjects[i++]; + int finalizingShards = (int) parsedObjects[i++]; + int doneShards = (int) parsedObjects[i++]; + int failedShards = (int) parsedObjects[i++]; + int totalShards = (int) parsedObjects[i]; + return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards); + } + ); + static { + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.INITIALIZING)); + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.STARTED)); + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.FINALIZING)); + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.DONE)); + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.FAILED)); + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.TOTAL)); + } + @Override protected SnapshotShardsStats createTestInstance() { int initializingShards = randomInt(); @@ -28,7 +55,7 @@ protected SnapshotShardsStats createTestInstance() { @Override protected SnapshotShardsStats doParseInstance(XContentParser parser) throws IOException { - return SnapshotShardsStats.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java index 9c28930f12382..a32a66a55454f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java @@ -11,20 +11,79 @@ import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Strings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.function.Predicate; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + public class SnapshotStatusTests extends AbstractChunkedSerializingTestCase { + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "snapshot_status", + true, + (Object[] parsedObjects) -> { + int i = 0; + String name = (String) parsedObjects[i++]; + String repository = (String) parsedObjects[i++]; + String uuid = (String) parsedObjects[i++]; + String rawState = (String) parsedObjects[i++]; + Boolean includeGlobalState = (Boolean) parsedObjects[i++]; + SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]); + SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]); + @SuppressWarnings("unchecked") + List indices = ((List) parsedObjects[i]); + + Snapshot snapshot = new Snapshot(repository, new SnapshotId(name, uuid)); + SnapshotsInProgress.State state = SnapshotsInProgress.State.valueOf(rawState); + Map indicesStatus; + List shards; + if (indices == null || indices.isEmpty()) { + indicesStatus = emptyMap(); + shards = emptyList(); + } else { + indicesStatus = Maps.newMapWithExpectedSize(indices.size()); + shards = new ArrayList<>(); + for (SnapshotIndexStatus index : indices) { + indicesStatus.put(index.getIndex(), index); + shards.addAll(index.getShards().values()); + } + } + return new SnapshotStatus(snapshot, state, shards, indicesStatus, shardsStats, stats, includeGlobalState); + } + ); + static { + PARSER.declareString(constructorArg(), new ParseField(SnapshotStatus.SNAPSHOT)); + PARSER.declareString(constructorArg(), new ParseField(SnapshotStatus.REPOSITORY)); + PARSER.declareString(constructorArg(), new ParseField(SnapshotStatus.UUID)); + PARSER.declareString(constructorArg(), new ParseField(SnapshotStatus.STATE)); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField(SnapshotStatus.INCLUDE_GLOBAL_STATE)); + PARSER.declareField( + constructorArg(), + SnapshotStats::fromXContent, + new ParseField(SnapshotStats.Fields.STATS), + ObjectParser.ValueType.OBJECT + ); + PARSER.declareObject(constructorArg(), SnapshotShardsStatsTests.PARSER, new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS)); + PARSER.declareNamedObjects(constructorArg(), SnapshotIndexStatusTests.PARSER, new ParseField(SnapshotStatus.INDICES)); + } + public void testToString() throws Exception { SnapshotsInProgress.State state = randomFrom(SnapshotsInProgress.State.values()); String uuid = UUIDs.randomBase64UUID(); @@ -180,7 +239,7 @@ protected Predicate getRandomFieldsExcludeFilter() { @Override protected SnapshotStatus doParseInstance(XContentParser parser) throws IOException { - return SnapshotStatus.fromXContent(parser); + return PARSER.parse(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java index f90b37f75fa41..6b921419c0fd4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java @@ -10,6 +10,8 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -17,11 +19,26 @@ import java.util.List; import java.util.function.Predicate; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class SnapshotsStatusResponseTests extends AbstractChunkedSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "snapshots_status_response", + true, + (Object[] parsedObjects) -> { + @SuppressWarnings("unchecked") + List snapshots = (List) parsedObjects[0]; + return new SnapshotsStatusResponse(snapshots); + } + ); + static { + PARSER.declareObjectArray(constructorArg(), SnapshotStatusTests.PARSER, new ParseField("snapshots")); + } + @Override protected SnapshotsStatusResponse doParseInstance(XContentParser parser) throws IOException { - return SnapshotsStatusResponse.fromXContent(parser); + return PARSER.parse(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponseTests.java index efb1e61e19fa2..41faaf3517e76 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponseTests.java @@ -8,14 +8,38 @@ package org.elasticsearch.action.admin.cluster.storedscripts; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.script.ScriptContextInfo; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; public class GetScriptContextResponseTests extends AbstractXContentSerializingTestCase { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_script_context", + true, + (a) -> { + Map contexts = ((List) a[0]).stream() + .collect(Collectors.toMap(ScriptContextInfo::getName, c -> c)); + return new GetScriptContextResponse(contexts); + } + ); + + static { + PARSER.declareObjectArray( + ConstructingObjectParser.constructorArg(), + ScriptContextInfo.PARSER::apply, + GetScriptContextResponse.CONTEXTS + ); + } + @Override protected GetScriptContextResponse createTestInstance() { if (randomBoolean()) { @@ -31,7 +55,7 @@ protected Writeable.Reader instanceReader() { @Override protected GetScriptContextResponse doParseInstance(XContentParser parser) throws IOException { - return GetScriptContextResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java index f8d3871fbfa8f..ec56a57aa3a90 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java @@ -10,8 +10,10 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Tuple; import org.elasticsearch.script.ScriptLanguagesInfo; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -22,8 +24,33 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; public class GetScriptLanguageResponseTests extends AbstractXContentSerializingTestCase { + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "script_languages_info", + true, + (a) -> new ScriptLanguagesInfo( + new HashSet<>((List) a[0]), + ((List>>) a[1]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2)) + ) + ); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser>, Void> LANGUAGE_CONTEXT_PARSER = + new ConstructingObjectParser<>("language_contexts", true, (m, name) -> new Tuple<>((String) m[0], Set.copyOf((List) m[1]))); + + static { + PARSER.declareStringArray(constructorArg(), ScriptLanguagesInfo.TYPES_ALLOWED); + PARSER.declareObjectArray(constructorArg(), LANGUAGE_CONTEXT_PARSER, ScriptLanguagesInfo.LANGUAGE_CONTEXTS); + LANGUAGE_CONTEXT_PARSER.declareString(constructorArg(), ScriptLanguagesInfo.LANGUAGE); + LANGUAGE_CONTEXT_PARSER.declareStringArray(constructorArg(), ScriptLanguagesInfo.CONTEXTS); + } + private static int MAX_VALUES = 4; private static final int MIN_LENGTH = 1; private static final int MAX_LENGTH = 16; @@ -38,7 +65,7 @@ protected GetScriptLanguageResponse createTestInstance() { @Override protected GetScriptLanguageResponse doParseInstance(XContentParser parser) throws IOException { - return new GetScriptLanguageResponse(ScriptLanguagesInfo.fromXContent(parser)); + return new GetScriptLanguageResponse(PARSER.parse(parser, null)); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java index a6524932dd775..8cf8a1c064004 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java @@ -8,10 +8,12 @@ package org.elasticsearch.action.admin.indices.analyze; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -23,8 +25,51 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class ReloadAnalyzersResponseTests extends AbstractBroadcastResponseTestCase { + @SuppressWarnings({ "unchecked" }) + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "reload_analyzer", + true, + arg -> { + BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; + List results = (List) arg[1]; + Map reloadedNodeIds = new HashMap<>(); + for (ReloadAnalyzersResponse.ReloadDetails result : results) { + reloadedNodeIds.put(result.getIndexName(), result); + } + return new ReloadAnalyzersResponse( + response.getTotalShards(), + response.getSuccessfulShards(), + response.getFailedShards(), + Arrays.asList(response.getShardFailures()), + reloadedNodeIds + ); + } + ); + + @SuppressWarnings({ "unchecked" }) + private static final ConstructingObjectParser ENTRY_PARSER = + new ConstructingObjectParser<>( + "reload_analyzer.entry", + true, + arg -> new ReloadAnalyzersResponse.ReloadDetails( + (String) arg[0], + new HashSet<>((List) arg[1]), + new HashSet<>((List) arg[2]) + ) + ); + + static { + declareBroadcastFields(PARSER); + PARSER.declareObjectArray(constructorArg(), ENTRY_PARSER, ReloadAnalyzersResponse.RELOAD_DETAILS_FIELD); + ENTRY_PARSER.declareString(constructorArg(), ReloadAnalyzersResponse.INDEX_FIELD); + ENTRY_PARSER.declareStringArray(constructorArg(), ReloadAnalyzersResponse.RELOADED_ANALYZERS_FIELD); + ENTRY_PARSER.declareStringArray(constructorArg(), ReloadAnalyzersResponse.RELOADED_NODE_IDS_FIELD); + } + @Override protected ReloadAnalyzersResponse createTestInstance( int totalShards, @@ -50,7 +95,7 @@ public static Map createRandomRel @Override protected ReloadAnalyzersResponse doParseInstance(XContentParser parser) throws IOException { - return ReloadAnalyzersResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponseTests.java index 962304ef8aadc..a8d8980e6358c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponseTests.java @@ -10,13 +10,26 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; +import static org.elasticsearch.action.support.master.ShardsAcknowledgedResponse.declareAcknowledgedAndShardsAcknowledgedFields; + public class OpenIndexResponseTests extends AbstractXContentSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "open_index", + true, + args -> new OpenIndexResponse((boolean) args[0], (boolean) args[1]) + ); + + static { + declareAcknowledgedAndShardsAcknowledgedFields(PARSER); + } + @Override protected OpenIndexResponse doParseInstance(XContentParser parser) { - return OpenIndexResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 814cff37e0708..db156f983220e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -552,6 +552,47 @@ public void testLazyRolloverFails() throws Exception { } } + public void testRolloverAliasToDataStreamFails() throws Exception { + final IndexMetadata backingIndexMetadata = IndexMetadata.builder(".ds-logs-ds-000001") + .settings(settings(IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + final DataStream dataStream = new DataStream( + "logs-ds", + List.of(backingIndexMetadata.getIndex()), + 1, + Map.of(), + false, + false, + false, + false, + IndexMode.STANDARD + ); + Metadata.Builder metadataBuilder = Metadata.builder().put(backingIndexMetadata, false).put(dataStream); + metadataBuilder.put("ds-alias", dataStream.getName(), true, null); + final ClusterState stateBefore = ClusterState.builder(ClusterName.DEFAULT).metadata(metadataBuilder).build(); + + final TransportRolloverAction transportRolloverAction = new TransportRolloverAction( + mock(TransportService.class), + mockClusterService, + mockThreadPool, + mockActionFilters, + mockIndexNameExpressionResolver, + rolloverService, + mockClient, + mockAllocationService, + mockMetadataDataStreamService, + dataStreamAutoShardingService + ); + + final PlainActionFuture future = new PlainActionFuture<>(); + RolloverRequest rolloverRequest = new RolloverRequest("ds-alias", null); + transportRolloverAction.masterOperation(mock(CancellableTask.class), rolloverRequest, stateBefore, future); + IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, future::actionGet); + assertThat(illegalStateException.getMessage(), containsString("Aliases to data streams cannot be rolled over.")); + } + private IndicesStatsResponse createIndicesStatResponse(String indexName, long totalDocs, long primariesDocs) { final CommonStats primaryStats = mock(CommonStats.class); when(primaryStats.getDocs()).thenReturn(new DocsStats(primariesDocs, 0, between(1, 10000))); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java index 3b24f90b9d854..025f51b7df997 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java @@ -8,15 +8,34 @@ package org.elasticsearch.action.admin.indices.template.get; +import org.elasticsearch.action.admin.indices.rollover.RolloverConfigurationTests; +import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComponentTemplateTests; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetentionTests; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; -import java.util.Collections; +import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.cluster.metadata.ComponentTemplateTests.randomAliases; +import static org.elasticsearch.cluster.metadata.ComponentTemplateTests.randomMappings; +import static org.elasticsearch.cluster.metadata.ComponentTemplateTests.randomSettings; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + public class GetComponentTemplateResponseTests extends AbstractWireSerializingTestCase { @Override protected Writeable.Reader instanceReader() { @@ -25,18 +44,81 @@ protected Writeable.Reader instanceReader() @Override protected GetComponentTemplateAction.Response createTestInstance() { + return new GetComponentTemplateAction.Response( + randomBoolean() ? Map.of() : randomTemplates(), + RolloverConfigurationTests.randomRolloverConditions(), + DataStreamGlobalRetentionTests.randomGlobalRetention() + ); + } + + @Override + protected GetComponentTemplateAction.Response mutateInstance(GetComponentTemplateAction.Response instance) { + var templates = instance.getComponentTemplates(); + var rolloverConditions = instance.getRolloverConfiguration(); + var globalRetention = instance.getGlobalRetention(); + switch (randomInt(2)) { + case 0 -> templates = templates == null ? randomTemplates() : null; + case 1 -> rolloverConditions = randomValueOtherThan(rolloverConditions, RolloverConfigurationTests::randomRolloverConditions); + case 2 -> globalRetention = randomValueOtherThan(globalRetention, DataStreamGlobalRetentionTests::randomGlobalRetention); + } + return new GetComponentTemplateAction.Response(templates, rolloverConditions, globalRetention); + } + + public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { + Settings settings = null; + CompressedXContent mappings = null; + Map aliases = null; + DataStreamLifecycle lifecycle = new DataStreamLifecycle(); if (randomBoolean()) { - return new GetComponentTemplateAction.Response(Collections.emptyMap()); + settings = randomSettings(); } - Map templates = new HashMap<>(); - for (int i = 0; i < randomIntBetween(1, 4); i++) { - templates.put(randomAlphaOfLength(4), ComponentTemplateTests.randomInstance()); + if (randomBoolean()) { + mappings = randomMappings(); + } + if (randomBoolean()) { + aliases = randomAliases(); + } + + var template = new ComponentTemplate( + new Template(settings, mappings, aliases, lifecycle), + randomBoolean() ? null : randomNonNegativeLong(), + null, + false + ); + var globalRetention = DataStreamGlobalRetentionTests.randomGlobalRetention(); + var rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); + var response = new GetComponentTemplateAction.Response( + Map.of(randomAlphaOfLength(10), template), + rolloverConfiguration, + globalRetention + ); + + try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { + builder.humanReadable(true); + response.toXContent(builder, EMPTY_PARAMS); + String serialized = Strings.toString(builder); + assertThat(serialized, containsString("rollover")); + for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention(globalRetention)) + .getConditions() + .keySet()) { + assertThat(serialized, containsString(label)); + } + // We check that even if there was no retention provided by the user, the global retention applies + assertThat(serialized, not(containsString("data_retention"))); + assertThat(serialized, containsString("effective_retention")); } - return new GetComponentTemplateAction.Response(templates); } @Override - protected GetComponentTemplateAction.Response mutateInstance(GetComponentTemplateAction.Response instance) { - return randomValueOtherThan(instance, this::createTestInstance); + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(IndicesModule.getNamedWriteables()); + } + + private static Map randomTemplates() { + Map templates = new HashMap<>(); + for (int i = 0; i < randomIntBetween(1, 4); i++) { + templates.put(randomAlphaOfLength(4), ComponentTemplateTests.randomInstance()); + } + return templates; } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java index 6bbb2884f1bf3..5df0fa27f1016 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java @@ -10,19 +10,52 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.test.AbstractBroadcastResponseTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + public class ValidateQueryResponseTests extends AbstractBroadcastResponseTestCase { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "validate_query", + true, + arg -> { + BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; + return new ValidateQueryResponse( + (boolean) arg[1], + (List) arg[2], + response.getTotalShards(), + response.getSuccessfulShards(), + response.getFailedShards(), + Arrays.asList(response.getShardFailures()) + ); + } + ); + static { + declareBroadcastFields(PARSER); + PARSER.declareBoolean(constructorArg(), new ParseField(ValidateQueryResponse.VALID_FIELD)); + PARSER.declareObjectArray( + optionalConstructorArg(), + QueryExplanation.PARSER, + new ParseField(ValidateQueryResponse.EXPLANATIONS_FIELD) + ); + } + private static ValidateQueryResponse createRandomValidateQueryResponse( int totalShards, int successfulShards, @@ -60,7 +93,7 @@ private static ValidateQueryResponse createRandomValidateQueryResponse() { @Override protected ValidateQueryResponse doParseInstance(XContentParser parser) throws IOException { - return ValidateQueryResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java index 76b1fa0011540..7c50ba3beae76 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java @@ -17,12 +17,15 @@ import org.elasticsearch.action.delete.DeleteResponseTests; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.IndexResponseTests; +import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.action.update.UpdateResponseTests; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; @@ -43,6 +46,54 @@ public class BulkItemResponseTests extends ESTestCase { + /** + * Parse the output of the {@link DocWriteResponse#innerToXContent(XContentBuilder, ToXContent.Params)} method. + * + * This method is intended to be called by subclasses and must be called multiple times to parse all the information concerning + * {@link DocWriteResponse} objects. It always parses the current token, updates the given parsing context accordingly + * if needed and then immediately returns. + */ + public static void parseInnerToXContent(XContentParser parser, DocWriteResponse.Builder context) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + + String currentFieldName = parser.currentName(); + token = parser.nextToken(); + + if (token.isValue()) { + if (DocWriteResponse._INDEX.equals(currentFieldName)) { + // index uuid and shard id are unknown and can't be parsed back for now. + context.setShardId(new ShardId(new Index(parser.text(), IndexMetadata.INDEX_UUID_NA_VALUE), -1)); + } else if (DocWriteResponse._ID.equals(currentFieldName)) { + context.setId(parser.text()); + } else if (DocWriteResponse._VERSION.equals(currentFieldName)) { + context.setVersion(parser.longValue()); + } else if (DocWriteResponse.RESULT.equals(currentFieldName)) { + String result = parser.text(); + for (DocWriteResponse.Result r : DocWriteResponse.Result.values()) { + if (r.getLowercase().equals(result)) { + context.setResult(r); + break; + } + } + } else if (DocWriteResponse.FORCED_REFRESH.equals(currentFieldName)) { + context.setForcedRefresh(parser.booleanValue()); + } else if (DocWriteResponse._SEQ_NO.equals(currentFieldName)) { + context.setSeqNo(parser.longValue()); + } else if (DocWriteResponse._PRIMARY_TERM.equals(currentFieldName)) { + context.setPrimaryTerm(parser.longValue()); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (DocWriteResponse._SHARDS.equals(currentFieldName)) { + context.setShardInfo(ReplicationResponse.ShardInfo.fromXContent(parser)); + } else { + parser.skipChildren(); // skip potential inner objects for forward compatibility + } + } else if (token == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); // skip potential inner arrays for forward compatibility + } + } + public void testBulkItemResponseShouldContainTypeInV7CompatibilityMode() throws IOException { BulkItemResponse bulkItemResponse = BulkItemResponse.success( randomInt(), @@ -192,17 +243,16 @@ public static BulkItemResponse itemResponseFromXContent(XContentParser parser, i if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) { final IndexResponse.Builder indexResponseBuilder = new IndexResponse.Builder(); builder = indexResponseBuilder; - itemParser = (indexParser) -> IndexResponse.parseXContentFields(indexParser, indexResponseBuilder); - + itemParser = indexParser -> parseInnerToXContent(indexParser, indexResponseBuilder); } else if (opType == DocWriteRequest.OpType.UPDATE) { final UpdateResponse.Builder updateResponseBuilder = new UpdateResponse.Builder(); builder = updateResponseBuilder; - itemParser = (updateParser) -> UpdateResponse.parseXContentFields(updateParser, updateResponseBuilder); + itemParser = updateParser -> UpdateResponseTests.parseXContentFields(updateParser, updateResponseBuilder); } else if (opType == DocWriteRequest.OpType.DELETE) { final DeleteResponse.Builder deleteResponseBuilder = new DeleteResponse.Builder(); builder = deleteResponseBuilder; - itemParser = (deleteParser) -> DeleteResponse.parseXContentFields(deleteParser, deleteResponseBuilder); + itemParser = deleteParser -> parseInnerToXContent(deleteParser, deleteResponseBuilder); } else { throwUnknownField(currentFieldName, parser); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java new file mode 100644 index 0000000000000..2226c40b618f4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java @@ -0,0 +1,870 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.coordination.NoMasterBlockService; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.MapperException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpNodeClient; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class BulkOperationTests extends ESTestCase { + + private final long millis = randomMillisUpToYear9999(); + private final String indexName = "my_index"; + private final String dataStreamName = "my_data_stream"; + private final String fsDataStreamName = "my_failure_store_data_stream"; + + private final IndexMetadata indexMetadata = IndexMetadata.builder(indexName) + .settings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .build() + ) + .build(); + private final IndexMetadata ds1BackingIndex1 = DataStreamTestHelper.createBackingIndex(dataStreamName, 1, millis) + .numberOfShards(2) + .build(); + private final IndexMetadata ds1BackingIndex2 = DataStreamTestHelper.createBackingIndex(dataStreamName, 2, millis + 1) + .numberOfShards(2) + .build(); + private final IndexMetadata ds2BackingIndex1 = DataStreamTestHelper.createBackingIndex(fsDataStreamName, 1, millis) + .numberOfShards(2) + .build(); + private final IndexMetadata ds2FailureStore1 = DataStreamTestHelper.createFailureStore(fsDataStreamName, 1, millis) + .numberOfShards(1) + .build(); + + private final DataStream dataStream1 = DataStreamTestHelper.newInstance( + dataStreamName, + List.of(ds1BackingIndex1.getIndex(), ds1BackingIndex2.getIndex()) + ); + private final DataStream dataStream2 = DataStreamTestHelper.newInstance( + fsDataStreamName, + List.of(ds2BackingIndex1.getIndex()), + List.of(ds2FailureStore1.getIndex()) + ); + + private final ClusterState DEFAULT_STATE = ClusterState.builder(ClusterName.DEFAULT) + .metadata( + Metadata.builder() + .indexTemplates( + Map.of( + "ds-template", + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, false)) + .template(new Template(null, null, null, null)) + .build(), + "ds-template-with-failure-store", + ComposableIndexTemplate.builder() + .indexPatterns(List.of(fsDataStreamName)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, true)) + .template(new Template(null, null, null, null)) + .build() + ) + ) + .indices( + Map.of( + indexName, + indexMetadata, + ds1BackingIndex1.getIndex().getName(), + ds1BackingIndex1, + ds1BackingIndex2.getIndex().getName(), + ds1BackingIndex2, + ds2BackingIndex1.getIndex().getName(), + ds2BackingIndex1, + ds2FailureStore1.getIndex().getName(), + ds2FailureStore1 + ) + ) + .dataStreams(Map.of(dataStreamName, dataStream1, fsDataStreamName, dataStream2), Map.of()) + .build() + ) + .build(); + + private TestThreadPool threadPool; + + @Before + public void setupThreadpool() { + threadPool = new TestThreadPool(getClass().getName()); + } + + @After + public void tearDownThreadpool() { + terminate(threadPool); + } + + /** + * If a bulk operation begins and the cluster is experiencing a non-retryable block, the bulk operation should fail + */ + public void testClusterBlockedFailsBulk() { + NodeClient client = getNodeClient((r) -> { + fail("Should not have executed shard action on blocked cluster"); + return null; + }); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + // Not retryable + ClusterState state = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(Metadata.CLUSTER_READ_ONLY_BLOCK).build()) + .build(); + + // Make sure we don't wait at all + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(state); + when(observer.isTimedOut()).thenReturn(false); + doThrow(new AssertionError("Should not wait")).when(observer).waitForNextChange(any()); + + newBulkOperation(client, new BulkRequest(), state, observer, listener).run(); + + expectThrows(ExecutionException.class, ClusterBlockException.class, future::get); + } + + /** + * If a bulk operation times out while waiting for cluster blocks to be cleared, it should fail the request. + */ + public void testTimeoutOnRetryableClusterBlockedFailsBulk() { + NodeClient client = getNodeClient((r) -> { + fail("Should not have executed shard action on blocked cluster"); + return null; + }); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + // Retryable + final ClusterState state = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_WRITES).build()) + .build(); + + // Always return cluster state, first observation: return same cluster state, second observation: time out, ensure no further wait + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(state); + when(observer.isTimedOut()).thenReturn(false, true); + doAnswer((i) -> { + // Returning same state or timing out will result in one more attempt. + if (randomBoolean()) { + i.getArgument(0, ClusterStateObserver.Listener.class).onNewClusterState(state); + } else { + i.getArgument(0, ClusterStateObserver.Listener.class).onTimeout(null); + } + return null; + }).doThrow(new AssertionError("Should not wait")).when(observer).waitForNextChange(any()); + + newBulkOperation(client, new BulkRequest(), state, observer, listener).run(); + + expectThrows(ExecutionException.class, ClusterBlockException.class, future::get); + verify(observer, times(2)).isTimedOut(); + verify(observer, times(1)).waitForNextChange(any()); + } + + /** + * If the cluster service closes while a bulk operation is waiting for cluster blocks to be cleared, it should fail the request. + */ + public void testNodeClosedOnRetryableClusterBlockedFailsBulk() { + NodeClient client = getNodeClient((r) -> { + fail("Should not have executed shard action on blocked cluster"); + return null; + }); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + // Retryable + final ClusterState state = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_WRITES).build()) + .build(); + + // Always return cluster state, first observation: signal cluster service closed, ensure no further wait + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(state); + when(observer.isTimedOut()).thenReturn(false); + doAnswer((i) -> { + i.getArgument(0, ClusterStateObserver.Listener.class).onClusterServiceClose(); + return null; + }).doThrow(new AssertionError("Should not wait")).when(observer).waitForNextChange(any()); + + newBulkOperation(client, new BulkRequest(), state, observer, listener).run(); + + expectThrows(ExecutionException.class, NodeClosedException.class, future::get); + verify(observer, times(1)).isTimedOut(); + verify(observer, times(1)).waitForNextChange(any()); + } + + /** + * A bulk operation to an index should succeed if all of its shard level requests succeed + */ + public void testBulkToIndex() throws Exception { + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(indexName).id("1").source(Map.of("key", "val"))); + bulkRequest.add(new IndexRequest(indexName).id("3").source(Map.of("key", "val"))); + + NodeClient client = getNodeClient(this::acceptAllShardWrites); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(false)); + } + + /** + * A bulk operation to an index should partially succeed if only some of its shard level requests fail + */ + public void testBulkToIndexFailingEntireShard() throws Exception { + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(indexName).id("1").source(Map.of("key", "val"))); + bulkRequest.add(new IndexRequest(indexName).id("3").source(Map.of("key", "val"))); + + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(indexMetadata.getIndex(), 0), () -> new MapperException("test"))) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find failed item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("test"))); + } + + /** + * A bulk operation to a data stream should succeed if all of its shard level requests succeed + */ + public void testBulkToDataStream() throws Exception { + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(dataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(dataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + NodeClient client = getNodeClient(this::acceptAllShardWrites); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(false)); + } + + /** + * A bulk operation to a data stream should partially succeed if only some of its shard level requests fail + */ + public void testBulkToDataStreamFailingEntireShard() throws Exception { + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(dataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(dataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(ds1BackingIndex2.getIndex(), 0), () -> new MapperException("test"))) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find failed item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("test"))); + } + + /** + * A bulk operation to a data stream with a failure store enabled should redirect any shard level failures to the failure store. + */ + public void testFailingEntireShardRedirectsToFailureStore() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(ds2BackingIndex1.getIndex(), 0), () -> new MapperException("test"))) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(false)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(item -> item.getIndex().equals(ds2FailureStore1.getIndex().getName())) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem, is(notNullValue())); + } + + /** + * A bulk operation to a data stream with a failure store enabled should redirect any documents that fail at a shard level to the + * failure store. + */ + public void testFailingDocumentRedirectsToFailureStore() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + NodeClient client = getNodeClient( + thatFailsDocuments(Map.of(new IndexAndId(ds2BackingIndex1.getIndex().getName(), "3"), () -> new MapperException("test"))) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(false)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(item -> item.getIndex().equals(ds2FailureStore1.getIndex().getName())) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getIndex(), is(notNullValue())); + } + + /** + * A bulk operation to a data stream with a failure store enabled may still partially fail if the redirected documents experience + * a shard-level failure while writing to the failure store indices. + */ + public void testFailureStoreShardFailureRejectsDocument() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + // Mock client that rejects all shard requests on the first shard in the backing index, and all requests to the only shard of + // the failure store index. + NodeClient client = getNodeClient( + failingShards( + Map.of( + new ShardId(ds2BackingIndex1.getIndex(), 0), + () -> new MapperException("root cause"), + new ShardId(ds2FailureStore1.getIndex(), 0), + () -> new MapperException("failure store test failure") + ) + ) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("root cause"))); + assertThat(failedItem.getFailure().getCause().getSuppressed().length, is(not(equalTo(0)))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0], is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0].getMessage(), is(equalTo("failure store test failure"))); + } + + /** + * A document that fails at the shard level will be converted into a failure document if an applicable failure store is present. + * In the unlikely case that the failure document cannot be created, the document will not be redirected to the failure store and + * instead will simply report its original failure in the response, with the conversion failure present as a suppressed exception. + */ + public void testFailedDocumentCanNotBeConvertedFails() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + NodeClient client = getNodeClient( + thatFailsDocuments(Map.of(new IndexAndId(ds2BackingIndex1.getIndex().getName(), "3"), () -> new MapperException("root cause"))) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + // Mock a failure store document converter that always fails + FailureStoreDocumentConverter mockConverter = mock(FailureStoreDocumentConverter.class); + when(mockConverter.transformFailedRequest(any(), any(), any(), any())).thenThrow(new IOException("Could not serialize json")); + + newBulkOperation(client, bulkRequest, mockConverter, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("root cause"))); + assertThat(failedItem.getFailure().getCause().getSuppressed().length, is(not(equalTo(0)))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0], is(instanceOf(IOException.class))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0].getMessage(), is(equalTo("Could not serialize json"))); + } + + /** + * A bulk operation to a data stream with a failure store enabled may still partially fail if the cluster is experiencing a + * non-retryable block when the redirected documents would be sent to the shard-level action. + */ + public void testBlockedClusterRejectsFailureStoreDocument() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + // Mock client that rejects all shard requests on the first shard in the backing index, and all requests to the only shard of + // the failure store index. + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(ds2BackingIndex1.getIndex(), 0), () -> new MapperException("root cause"))) + ); + + // Create a new cluster state that has a non-retryable cluster block on it + ClusterState blockedState = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(IndexMetadata.INDEX_READ_ONLY_BLOCK).build()) + .build(); + + // First time we will return the normal cluster state (before normal writes) which skips any further interactions, + // Second time we will return a blocked cluster state (before the redirects) causing us to start observing the cluster + // Finally, we will simulate the observer timing out causing the redirects to fail. + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(DEFAULT_STATE).thenReturn(blockedState); + when(observer.isTimedOut()).thenReturn(false); + doThrow(new AssertionError("Should not wait on non retryable block")).when(observer).waitForNextChange(any()); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, DEFAULT_STATE, observer, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("root cause"))); + assertThat(failedItem.getFailure().getCause().getSuppressed().length, is(not(equalTo(0)))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0], is(instanceOf(ClusterBlockException.class))); + assertThat( + failedItem.getFailure().getCause().getSuppressed()[0].getMessage(), + is(equalTo("blocked by: [FORBIDDEN/5/index read-only (api)];")) + ); + + verify(observer, times(0)).isTimedOut(); + verify(observer, times(0)).waitForNextChange(any()); + } + + /** + * A bulk operation to a data stream with a failure store enabled may still partially fail if the cluster times out while waiting for a + * retryable block to clear when the redirected documents would be sent to the shard-level action. + */ + public void testOperationTimeoutRejectsFailureStoreDocument() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + // Mock client that rejects all shard requests on the first shard in the backing index, and all requests to the only shard of + // the failure store index. + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(ds2BackingIndex1.getIndex(), 0), () -> new MapperException("root cause"))) + ); + + // Create a new cluster state that has a retryable cluster block on it + ClusterState blockedState = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_WRITES).build()) + .build(); + + // First time we will return the normal cluster state (before normal writes) which skips any further interactions, + // Second time we will return a blocked cluster state (before the redirects) causing us to start observing the cluster + // Finally, we will simulate the observer timing out causing the redirects to fail. + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(DEFAULT_STATE).thenReturn(blockedState); + when(observer.isTimedOut()).thenReturn(false, true); + doAnswer((i) -> { + // Returning same state or timing out will result in one more attempt. + if (randomBoolean()) { + i.getArgument(0, ClusterStateObserver.Listener.class).onNewClusterState(blockedState); + } else { + i.getArgument(0, ClusterStateObserver.Listener.class).onTimeout(null); + } + return null; + }).doThrow(new AssertionError("Should not wait any longer")).when(observer).waitForNextChange(any()); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, DEFAULT_STATE, observer, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("root cause"))); + assertThat(failedItem.getFailure().getCause().getSuppressed().length, is(not(equalTo(0)))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0], is(instanceOf(ClusterBlockException.class))); + assertThat( + failedItem.getFailure().getCause().getSuppressed()[0].getMessage(), + is(equalTo("blocked by: [SERVICE_UNAVAILABLE/2/no master];")) + ); + + verify(observer, times(2)).isTimedOut(); + verify(observer, times(1)).waitForNextChange(any()); + } + + /** + * A bulk operation to a data stream with a failure store enabled may completely fail if the cluster service closes out while waiting + * for a retryable block to clear when the redirected documents would be sent to the shard-level action. + */ + public void testNodeClosureRejectsFailureStoreDocument() { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + // Mock client that rejects all shard requests on the first shard in the backing index, and all requests to the only shard of + // the failure store index. + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(ds2BackingIndex1.getIndex(), 0), () -> new MapperException("root cause"))) + ); + + // Create a new cluster state that has a retryable cluster block on it + ClusterState blockedState = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_WRITES).build()) + .build(); + + // First time we will return the normal cluster state (before normal writes) which skips any further interactions, + // Second time we will return a blocked cluster state (before the redirects) causing us to start observing the cluster + // Finally, we will simulate the node closing causing the redirects to fail. + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(DEFAULT_STATE).thenReturn(blockedState); + when(observer.isTimedOut()).thenReturn(false, true); + doAnswer((i) -> { + i.getArgument(0, ClusterStateObserver.Listener.class).onClusterServiceClose(); + return null; + }).doThrow(new AssertionError("Should not wait any longer")).when(observer).waitForNextChange(any()); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, DEFAULT_STATE, observer, listener).run(); + + expectThrows(ExecutionException.class, NodeClosedException.class, future::get); + + verify(observer, times(1)).isTimedOut(); + verify(observer, times(1)).waitForNextChange(any()); + } + + /** + * Accepts all write operations from the given request object when it is encountered in the mock shard bulk action + */ + private BulkShardResponse acceptAllShardWrites(BulkShardRequest request) { + return new BulkShardResponse( + request.shardId(), + Arrays.stream(request.items()).map(item -> requestToResponse(request.shardId(), item)).toArray(BulkItemResponse[]::new) + ); + } + + /** + * Maps an entire shard id to an exception to throw when it is encountered in the mock shard bulk action + */ + private CheckedFunction failingShards(Map> shardsToFail) { + return (BulkShardRequest request) -> { + if (shardsToFail.containsKey(request.shardId())) { + throw shardsToFail.get(request.shardId()).get(); + } else { + return acceptAllShardWrites(request); + } + }; + } + + /** + * Index name / id tuple + */ + private record IndexAndId(String indexName, String id) {} + + /** + * Maps a document to an exception to thrown when it is encountered in the mock shard bulk action + */ + private CheckedFunction thatFailsDocuments( + Map> documentsToFail + ) { + return (BulkShardRequest request) -> new BulkShardResponse(request.shardId(), Arrays.stream(request.items()).map(item -> { + IndexAndId key = new IndexAndId(request.index(), item.request().id()); + if (documentsToFail.containsKey(key)) { + return requestToFailedResponse(item, documentsToFail.get(key).get()); + } else { + return requestToResponse(request.shardId(), item); + } + }).toArray(BulkItemResponse[]::new)); + } + + /** + * Create a shard-level result given a bulk item + */ + private static BulkItemResponse requestToResponse(ShardId shardId, BulkItemRequest itemRequest) { + return BulkItemResponse.success(itemRequest.id(), itemRequest.request().opType(), switch (itemRequest.request().opType()) { + case INDEX, CREATE -> new IndexResponse(shardId, itemRequest.request().id(), 1, 1, 1, true); + case UPDATE -> new UpdateResponse(shardId, itemRequest.request().id(), 1, 1, 1, DocWriteResponse.Result.UPDATED); + case DELETE -> new DeleteResponse(shardId, itemRequest.request().id(), 1, 1, 1, true); + }); + } + + /** + * Create a shard-level failure given a bulk item + */ + private static BulkItemResponse requestToFailedResponse(BulkItemRequest itemRequest, Exception reason) { + return BulkItemResponse.failure( + itemRequest.id(), + itemRequest.request().opType(), + new BulkItemResponse.Failure(itemRequest.index(), itemRequest.request().id(), reason) + ); + } + + /** + * Create a client that redirects expected actions to the provided function and fails if an unexpected operation happens. + * @param onShardAction Called when TransportShardBulkAction is executed. + * @return A node client for the test. + */ + private NodeClient getNodeClient(CheckedFunction onShardAction) { + return new NoOpNodeClient(threadPool) { + @Override + @SuppressWarnings("unchecked") + public Task executeLocally( + ActionType action, + Request request, + ActionListener listener + ) { + if (TransportShardBulkAction.TYPE.equals(action)) { + Response response = null; + Exception exception = null; + try { + response = (Response) onShardAction.apply((BulkShardRequest) request); + } catch (Exception responseException) { + exception = responseException; + } + if (response != null) { + listener.onResponse(response); + } else { + listener.onFailure(exception); + } + } else { + fail("Unexpected client call to " + action.name()); + } + return null; + } + }; + } + + private BulkOperation newBulkOperation(NodeClient client, BulkRequest request, ActionListener listener) { + return newBulkOperation( + DEFAULT_STATE, + client, + request, + new AtomicArray<>(request.numberOfActions()), + Map.of(), + mockObserver(DEFAULT_STATE), + listener, + new FailureStoreDocumentConverter() + ); + } + + private BulkOperation newBulkOperation( + NodeClient client, + BulkRequest request, + FailureStoreDocumentConverter failureStoreDocumentConverter, + ActionListener listener + ) { + return newBulkOperation( + DEFAULT_STATE, + client, + request, + new AtomicArray<>(request.numberOfActions()), + Map.of(), + mockObserver(DEFAULT_STATE), + listener, + failureStoreDocumentConverter + ); + } + + private BulkOperation newBulkOperation( + NodeClient client, + BulkRequest request, + ClusterState state, + ClusterStateObserver observer, + ActionListener listener + ) { + return newBulkOperation( + state, + client, + request, + new AtomicArray<>(request.numberOfActions()), + Map.of(), + observer, + listener, + new FailureStoreDocumentConverter() + ); + } + + private BulkOperation newBulkOperation( + ClusterState state, + NodeClient client, + BulkRequest request, + AtomicArray existingResponses, + Map indicesThatCanNotBeCreated, + ClusterStateObserver observer, + ActionListener listener, + FailureStoreDocumentConverter failureStoreDocumentConverter + ) { + // Time provision + long timeZero = TimeUnit.MILLISECONDS.toNanos(randomMillisUpToYear9999() - TimeUnit.DAYS.toMillis(1)); + long duration = TimeUnit.SECONDS.toNanos(randomLongBetween(1, 60)); + long endTime = timeZero + duration; + + // Expressions + ThreadContext ctx = threadPool.getThreadContext(); + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(ctx, new SystemIndices(List.of())); + + // Mocks + final DiscoveryNode mockNode = mock(DiscoveryNode.class); + when(mockNode.getId()).thenReturn(randomAlphaOfLength(10)); + final ClusterService clusterService = mock(ClusterService.class); + when(clusterService.state()).thenReturn(state); + when(clusterService.localNode()).thenReturn(mockNode); + + return new BulkOperation( + null, + threadPool, + ThreadPool.Names.SAME, + clusterService, + request, + client, + existingResponses, + indicesThatCanNotBeCreated, + indexNameExpressionResolver, + () -> endTime, + timeZero, + listener, + observer, + failureStoreDocumentConverter + ); + } + + /** + * A default mock cluster state observer that simply returns the state + */ + private ClusterStateObserver mockObserver(ClusterState state) { + ClusterStateObserver mockObserver = mock(ClusterStateObserver.class); + when(mockObserver.setAndGetObservedState()).thenReturn(state); + when(mockObserver.isTimedOut()).thenReturn(false); + return mockObserver; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java b/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverterTests.java similarity index 90% rename from server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java rename to server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverterTests.java index 962c796e18c2a..67116bd40c2c8 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverterTests.java @@ -22,7 +22,7 @@ import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.startsWith; -public class FailureStoreDocumentTests extends ESTestCase { +public class FailureStoreDocumentConverterTests extends ESTestCase { public void testFailureStoreDocumentConverstion() throws Exception { IndexRequest source = new IndexRequest("original_index").routing("fake_routing") @@ -36,7 +36,12 @@ public void testFailureStoreDocumentConverstion() throws Exception { String targetIndexName = "rerouted_index"; long testTime = 1702357200000L; // 2023-12-12T05:00:00.000Z - IndexRequest convertedRequest = FailureStoreDocument.transformFailedRequest(source, exception, targetIndexName, () -> testTime); + IndexRequest convertedRequest = new FailureStoreDocumentConverter().transformFailedRequest( + source, + exception, + targetIndexName, + () -> testTime + ); // Retargeting write assertThat(convertedRequest.id(), is(nullValue())); @@ -63,7 +68,7 @@ public void testFailureStoreDocumentConverstion() throws Exception { ); assertThat( ObjectPath.eval("error.stack_trace", convertedRequest.sourceAsMap()), - containsString("at org.elasticsearch.action.bulk.FailureStoreDocumentTests.testFailureStoreDocumentConverstion") + containsString("at org.elasticsearch.action.bulk.FailureStoreDocumentConverterTests.testFailureStoreDocumentConverstion") ); assertThat(convertedRequest.isWriteToFailureStore(), is(true)); diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java new file mode 100644 index 0000000000000..81c8abed352ab --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.datastreams; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamMetadata; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class DataStreamsActionUtilTests extends ESTestCase { + + public void testDataStreamsResolveConcreteIndexNames() { + + var index1 = new Index("foo1", IndexMetadata.INDEX_UUID_NA_VALUE); + var index3 = new Index("bar", IndexMetadata.INDEX_UUID_NA_VALUE); + + var dataStreamIndex1 = new Index(".ds-foo1", IndexMetadata.INDEX_UUID_NA_VALUE); + var dataStreamIndex2 = new Index(".ds-bar2", IndexMetadata.INDEX_UUID_NA_VALUE); + var dataStreamIndex3 = new Index(".ds-foo2", IndexMetadata.INDEX_UUID_NA_VALUE); + var dataStreamIndex4 = new Index(".ds-baz1", IndexMetadata.INDEX_UUID_NA_VALUE); + + ClusterState clusterState = ClusterState.builder(new ClusterName("test-cluster")) + .metadata( + Metadata.builder() + .putCustom( + DataStreamMetadata.TYPE, + new DataStreamMetadata( + ImmutableOpenMap.builder() + .fPut("fooDs", DataStreamTestHelper.newInstance("fooDs", List.of(dataStreamIndex1))) + .fPut("barDs", DataStreamTestHelper.newInstance("barDs", List.of(dataStreamIndex2))) + .fPut("foo2Ds", DataStreamTestHelper.newInstance("foo2Ds", List.of(dataStreamIndex3))) + .fPut("bazDs", DataStreamTestHelper.newInstance("bazDs", List.of(dataStreamIndex4))) + .build(), + ImmutableOpenMap.of() + ) + ) + .indices( + createLocalOnlyIndicesMetadata( + index1, + index3, + dataStreamIndex1, + dataStreamIndex2, + dataStreamIndex3, + dataStreamIndex4 + ) + ) + .build() + ) + .build(); + + var query = new String[] { "foo*", "baz*" }; + var indexNameExpressionResolver = mock(IndexNameExpressionResolver.class); + when(indexNameExpressionResolver.dataStreamNames(any(), any(), eq(query))).thenReturn(List.of("fooDs", "foo2Ds", "bazDs")); + + var resolved = DataStreamsActionUtil.resolveConcreteIndexNames( + indexNameExpressionResolver, + clusterState, + query, + IndicesOptions.builder().wildcardOptions(IndicesOptions.WildcardOptions.builder().includeHidden(true)).build() + ).toList(); + + assertThat(resolved, containsInAnyOrder(".ds-foo1", ".ds-foo2", ".ds-baz1")); + } + + private Map createLocalOnlyIndicesMetadata(Index... indices) { + return Arrays.stream(indices) + .map( + index1 -> Map.entry( + index1.getName(), + IndexMetadata.builder(index1.getName()) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())) + .numberOfReplicas(0) + .numberOfShards(1) + .build() + ) + ) + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index bc1ec6788eec6..7f50ebca36fc5 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -51,9 +51,7 @@ import static org.elasticsearch.action.datastreams.autosharding.AutoShardingType.INCREASE_SHARDS; import static org.elasticsearch.action.datastreams.autosharding.AutoShardingType.NO_CHANGE_REQUIRED; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThan; public class DataStreamAutoShardingServiceTests extends ESTestCase { @@ -646,10 +644,10 @@ public void testGetMaxIndexLoadWithinCoolingPeriod() { () -> now ); // to cover the entire cooldown period, the last index before the cooling period is taken into account - assertThat(maxIndexLoadWithinCoolingPeriod, is(lastIndexBeforeCoolingPeriodHasLowWriteLoad ? 5.0 : 999.0)); + assertThat(maxIndexLoadWithinCoolingPeriod, is(lastIndexBeforeCoolingPeriodHasLowWriteLoad ? 15.0 : 999.0)); } - public void testIndexLoadWithinCoolingPeriodIsShardLoadsAvg() { + public void testIndexLoadWithinCoolingPeriodIsSumOfShardsLoads() { final TimeValue coolingPeriod = TimeValue.timeValueDays(3); final Metadata.Builder metadataBuilder = Metadata.builder(); @@ -658,6 +656,8 @@ public void testIndexLoadWithinCoolingPeriodIsShardLoadsAvg() { final String dataStreamName = "logs"; long now = System.currentTimeMillis(); + double expectedIsSumOfShardLoads = 0.5 + 3.0 + 0.3333; + for (int i = 0; i < numberOfBackingIndicesWithinCoolingPeriod; i++) { final long createdAt = now - (coolingPeriod.getMillis() / 2); IndexMetadata indexMetadata; @@ -705,8 +705,7 @@ public void testIndexLoadWithinCoolingPeriodIsShardLoadsAvg() { coolingPeriod, () -> now ); - assertThat(maxIndexLoadWithinCoolingPeriod, is(greaterThan(0.499))); - assertThat(maxIndexLoadWithinCoolingPeriod, is(lessThan(0.5))); + assertThat(maxIndexLoadWithinCoolingPeriod, is(expectedIsSumOfShardLoads)); } public void testAutoShardingResultValidation() { diff --git a/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java b/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java index e7019a583b729..b22a30b533dd2 100644 --- a/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.delete; +import org.elasticsearch.action.bulk.BulkItemResponseTests; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -26,6 +27,7 @@ import static org.elasticsearch.action.index.IndexResponseTests.assertDocWriteResponse; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_UUID_NA_VALUE; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; public class DeleteResponseTests extends ESTestCase { @@ -102,7 +104,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws } DeleteResponse parsedDeleteResponse; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - parsedDeleteResponse = DeleteResponse.fromXContent(parser); + parsedDeleteResponse = parseInstance(parser); assertNull(parser.nextToken()); } @@ -112,6 +114,16 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws assertDocWriteResponse(expectedDeleteResponse, parsedDeleteResponse); } + private static DeleteResponse parseInstance(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + + DeleteResponse.Builder context = new DeleteResponse.Builder(); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + BulkItemResponseTests.parseInnerToXContent(parser, context); + } + return context.build(); + } + /** * Returns a tuple of {@link DeleteResponse}s. *

diff --git a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java index 31fcfe342eb3a..2830d9408e494 100644 --- a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.test.RandomObjects; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -24,6 +25,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.function.Predicate; @@ -34,9 +36,46 @@ public class ExplainResponseTests extends AbstractXContentSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "explain", + true, + (arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], exists, (Explanation) arg[2], (GetResult) arg[3]) + ); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), ExplainResponse._INDEX); + PARSER.declareString(ConstructingObjectParser.constructorArg(), ExplainResponse._ID); + final ConstructingObjectParser explanationParser = getExplanationsParser(); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), explanationParser, ExplainResponse.EXPLANATION); + PARSER.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> GetResult.fromXContentEmbedded(p), + ExplainResponse.GET + ); + } + + @SuppressWarnings("unchecked") + private static ConstructingObjectParser getExplanationsParser() { + final ConstructingObjectParser explanationParser = new ConstructingObjectParser<>( + "explanation", + true, + arg -> { + if ((float) arg[0] > 0) { + return Explanation.match((float) arg[0], (String) arg[1], (Collection) arg[2]); + } else { + return Explanation.noMatch((String) arg[1], (Collection) arg[2]); + } + } + ); + explanationParser.declareFloat(ConstructingObjectParser.constructorArg(), ExplainResponse.VALUE); + explanationParser.declareString(ConstructingObjectParser.constructorArg(), ExplainResponse.DESCRIPTION); + explanationParser.declareObjectArray(ConstructingObjectParser.constructorArg(), explanationParser, ExplainResponse.DETAILS); + return explanationParser; + } + @Override protected ExplainResponse doParseInstance(XContentParser parser) throws IOException { - return ExplainResponse.fromXContent(parser, randomBoolean()); + return PARSER.apply(parser, randomBoolean()); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java index ffdc7b9ca7652..478012567c1ae 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java @@ -14,10 +14,10 @@ import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.plugins.FieldPredicate; import java.io.IOException; import java.util.Map; -import java.util.function.Predicate; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -46,7 +46,7 @@ public void testExcludeNestedFields() throws IOException { s -> true, new String[] { "-nested" }, Strings.EMPTY_ARRAY, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); @@ -74,7 +74,7 @@ public void testMetadataFilters() throws IOException { s -> true, new String[] { "+metadata" }, Strings.EMPTY_ARRAY, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); @@ -87,7 +87,7 @@ public void testMetadataFilters() throws IOException { s -> true, new String[] { "-metadata" }, Strings.EMPTY_ARRAY, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); @@ -120,7 +120,7 @@ public void testExcludeMultifields() throws IOException { s -> true, new String[] { "-multifield" }, Strings.EMPTY_ARRAY, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); @@ -151,7 +151,7 @@ public void testDontIncludeParentInfo() throws IOException { s -> true, new String[] { "-parent" }, Strings.EMPTY_ARRAY, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); @@ -171,7 +171,22 @@ public void testSecurityFilter() throws IOException { } } """); SearchExecutionContext sec = createSearchExecutionContext(mapperService); - Predicate securityFilter = f -> f.startsWith("permitted"); + FieldPredicate securityFilter = new FieldPredicate() { + @Override + public boolean test(String field) { + return field.startsWith("permitted"); + } + + @Override + public String modifyHash(String hash) { + return "only-permitted:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; { Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( @@ -223,7 +238,7 @@ public void testFieldTypeFiltering() throws IOException { s -> true, Strings.EMPTY_ARRAY, new String[] { "text", "keyword" }, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java index ea9e83021e781..878c35b449366 100644 --- a/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.index; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkItemResponseTests; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -29,6 +30,7 @@ import static org.elasticsearch.action.support.replication.ReplicationResponseTests.assertShardInfo; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_UUID_NA_VALUE; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; public class IndexResponseTests extends ESTestCase { @@ -111,7 +113,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws } IndexResponse parsedIndexResponse; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - parsedIndexResponse = IndexResponse.fromXContent(parser); + parsedIndexResponse = parseInstanceFromXContent(parser); assertNull(parser.nextToken()); } @@ -121,6 +123,15 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws assertDocWriteResponse(expectedIndexResponse, parsedIndexResponse); } + private static IndexResponse parseInstanceFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + IndexResponse.Builder context = new IndexResponse.Builder(); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + BulkItemResponseTests.parseInnerToXContent(parser, context); + } + return context.build(); + } + public static void assertDocWriteResponse(DocWriteResponse expected, DocWriteResponse actual) { assertEquals(expected.getIndex(), actual.getIndex()); assertEquals(expected.getId(), actual.getId()); diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 7166664181eb1..70c4d73f578b3 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -30,21 +30,24 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.CoordinatorRewriteContextProvider; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardLongFieldRange; import org.elasticsearch.search.CanMatchShardResponse; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.SignificantTermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.sort.MinAndMax; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -497,14 +500,14 @@ public void testCanMatchFilteringOnCoordinatorThatCanBeSkipped() throws Exceptio regularIndices, contextProviderBuilder.build(), queryBuilder, + List.of(), + null, (updatedSearchShardIterators, requests) -> { List skippedShards = updatedSearchShardIterators.stream().filter(SearchShardIterator::skip).toList(); - ; List nonSkippedShards = updatedSearchShardIterators.stream() .filter(searchShardIterator -> searchShardIterator.skip() == false) .toList(); - ; int regularIndexShardCount = (int) updatedSearchShardIterators.stream() .filter(s -> regularIndices.contains(s.shardId().getIndex())) @@ -568,6 +571,8 @@ public void testCanMatchFilteringOnCoordinatorParsingFails() throws Exception { regularIndices, contextProviderBuilder.build(), queryBuilder, + List.of(), + null, this::assertAllShardsAreQueried ); } @@ -624,6 +629,99 @@ public void testCanMatchFilteringOnCoordinatorThatCanNotBeSkipped() throws Excep regularIndices, contextProviderBuilder.build(), queryBuilder, + List.of(), + null, + this::assertAllShardsAreQueried + ); + } + + public void testCanMatchFilteringOnCoordinator_withSignificantTermsAggregation_withDefaultBackgroundFilter() throws Exception { + Index index1 = new Index("index1", UUIDs.base64UUID()); + Index index2 = new Index("index2", UUIDs.base64UUID()); + Index index3 = new Index("index3", UUIDs.base64UUID()); + + StaticCoordinatorRewriteContextProviderBuilder contextProviderBuilder = new StaticCoordinatorRewriteContextProviderBuilder(); + contextProviderBuilder.addIndexMinMaxTimestamps(index1, DataStream.TIMESTAMP_FIELD_NAME, 0, 999); + contextProviderBuilder.addIndexMinMaxTimestamps(index2, DataStream.TIMESTAMP_FIELD_NAME, 1000, 1999); + contextProviderBuilder.addIndexMinMaxTimestamps(index3, DataStream.TIMESTAMP_FIELD_NAME, 2000, 2999); + + QueryBuilder query = new BoolQueryBuilder().filter(new RangeQueryBuilder(DataStream.TIMESTAMP_FIELD_NAME).from(2100).to(2200)); + AggregationBuilder aggregation = new SignificantTermsAggregationBuilder("significant_terms"); + + assignShardsAndExecuteCanMatchPhase( + List.of(), + List.of(index1, index2, index3), + contextProviderBuilder.build(), + query, + List.of(aggregation), + null, + // The default background filter matches the whole index, so all shards must be queried. + this::assertAllShardsAreQueried + ); + } + + public void testCanMatchFilteringOnCoordinator_withSignificantTermsAggregation_withBackgroundFilter() throws Exception { + Index index1 = new Index("index1", UUIDs.base64UUID()); + Index index2 = new Index("index2", UUIDs.base64UUID()); + Index index3 = new Index("index3", UUIDs.base64UUID()); + Index index4 = new Index("index4", UUIDs.base64UUID()); + + StaticCoordinatorRewriteContextProviderBuilder contextProviderBuilder = new StaticCoordinatorRewriteContextProviderBuilder(); + contextProviderBuilder.addIndexMinMaxTimestamps(index1, DataStream.TIMESTAMP_FIELD_NAME, 0, 999); + contextProviderBuilder.addIndexMinMaxTimestamps(index2, DataStream.TIMESTAMP_FIELD_NAME, 1000, 1999); + contextProviderBuilder.addIndexMinMaxTimestamps(index3, DataStream.TIMESTAMP_FIELD_NAME, 2000, 2999); + contextProviderBuilder.addIndexMinMaxTimestamps(index4, DataStream.TIMESTAMP_FIELD_NAME, 3000, 3999); + + QueryBuilder query = new BoolQueryBuilder().filter(new RangeQueryBuilder(DataStream.TIMESTAMP_FIELD_NAME).from(3100).to(3200)); + AggregationBuilder aggregation = new SignificantTermsAggregationBuilder("significant_terms").backgroundFilter( + new RangeQueryBuilder(DataStream.TIMESTAMP_FIELD_NAME).from(0).to(1999) + ); + + assignShardsAndExecuteCanMatchPhase( + List.of(), + List.of(index1, index2, index3), + contextProviderBuilder.build(), + query, + List.of(aggregation), + null, + (updatedSearchShardIterators, requests) -> { + // The search query matches index4, the background query matches index1 and index2, + // so index3 is the only one that must be skipped. + for (SearchShardIterator shard : updatedSearchShardIterators) { + if (shard.shardId().getIndex().getName().equals("index3")) { + assertTrue(shard.skip()); + } else { + assertFalse(shard.skip()); + } + } + } + ); + } + + public void testCanMatchFilteringOnCoordinator_withSignificantTermsAggregation_withSuggest() throws Exception { + Index index1 = new Index("index1", UUIDs.base64UUID()); + Index index2 = new Index("index2", UUIDs.base64UUID()); + Index index3 = new Index("index3", UUIDs.base64UUID()); + + StaticCoordinatorRewriteContextProviderBuilder contextProviderBuilder = new StaticCoordinatorRewriteContextProviderBuilder(); + contextProviderBuilder.addIndexMinMaxTimestamps(index1, DataStream.TIMESTAMP_FIELD_NAME, 0, 999); + contextProviderBuilder.addIndexMinMaxTimestamps(index2, DataStream.TIMESTAMP_FIELD_NAME, 1000, 1999); + contextProviderBuilder.addIndexMinMaxTimestamps(index3, DataStream.TIMESTAMP_FIELD_NAME, 2000, 2999); + + QueryBuilder query = new BoolQueryBuilder().filter(new RangeQueryBuilder(DataStream.TIMESTAMP_FIELD_NAME).from(2100).to(2200)); + AggregationBuilder aggregation = new SignificantTermsAggregationBuilder("significant_terms").backgroundFilter( + new RangeQueryBuilder(DataStream.TIMESTAMP_FIELD_NAME).from(2000).to(2300) + ); + SuggestBuilder suggest = new SuggestBuilder().setGlobalText("test"); + + assignShardsAndExecuteCanMatchPhase( + List.of(), + List.of(index1, index2, index3), + contextProviderBuilder.build(), + query, + List.of(aggregation), + suggest, + // The query and aggregation and match only index3, but suggest should match everything. this::assertAllShardsAreQueried ); } @@ -669,6 +767,8 @@ public void testCanMatchFilteringOnCoordinatorThatCanBeSkippedTsdb() throws Exce List.of(), contextProviderBuilder.build(), queryBuilder, + List.of(), + null, (updatedSearchShardIterators, requests) -> { var skippedShards = updatedSearchShardIterators.stream().filter(SearchShardIterator::skip).toList(); var nonSkippedShards = updatedSearchShardIterators.stream() @@ -713,11 +813,13 @@ private void assertAllShardsAreQueried(List updatedSearchSh assertThat(requests.size(), equalTo(shardsWithPrimariesAssigned)); } - private > void assignShardsAndExecuteCanMatchPhase( + private void assignShardsAndExecuteCanMatchPhase( List dataStreams, List regularIndices, CoordinatorRewriteContextProvider contextProvider, - AbstractQueryBuilder query, + QueryBuilder query, + List aggregations, + SuggestBuilder suggest, BiConsumer, List> canMatchResultsConsumer ) throws Exception { Map lookup = new ConcurrentHashMap<>(); @@ -764,14 +866,20 @@ private > void assignShardsAndExecuteCanMatc searchRequest.allowPartialSearchResults(true); final AliasFilter aliasFilter; - if (randomBoolean()) { + if (aggregations.isEmpty() == false || randomBoolean()) { // Apply the query on the request body SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource(); searchSourceBuilder.query(query); + for (AggregationBuilder aggregation : aggregations) { + searchSourceBuilder.aggregation(aggregation); + } + if (suggest != null) { + searchSourceBuilder.suggest(suggest); + } searchRequest.source(searchSourceBuilder); // Sometimes apply the same query in the alias filter too - aliasFilter = AliasFilter.of(randomBoolean() ? query : null, Strings.EMPTY_ARRAY); + aliasFilter = AliasFilter.of(aggregations.isEmpty() && randomBoolean() ? query : null, Strings.EMPTY_ARRAY); } else { // Apply the query as an alias filter aliasFilter = AliasFilter.of(query, Strings.EMPTY_ARRAY); diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java index 05c974ea9d4d3..0eefeb87d3e02 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.update; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkItemResponseTests; import org.elasticsearch.action.index.IndexResponseTests; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.common.Strings; @@ -38,6 +39,7 @@ import static org.elasticsearch.action.DocWriteResponse.Result.UPDATED; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_UUID_NA_VALUE; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; @@ -174,7 +176,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws } UpdateResponse parsedUpdateResponse; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - parsedUpdateResponse = UpdateResponse.fromXContent(parser); + parsedUpdateResponse = parseInstanceFromXContent(parser); assertNull(parser.nextToken()); } @@ -191,6 +193,32 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws assertToXContentEquivalent(expectedBytes, parsedBytes, xContentType); } + private static UpdateResponse parseInstanceFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + + UpdateResponse.Builder context = new UpdateResponse.Builder(); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + parseXContentFields(parser, context); + } + return context.build(); + } + + /** + * Parse the current token and update the parsing context appropriately. + */ + public static void parseXContentFields(XContentParser parser, UpdateResponse.Builder context) throws IOException { + XContentParser.Token token = parser.currentToken(); + String currentFieldName = parser.currentName(); + + if (UpdateResponse.GET.equals(currentFieldName)) { + if (token == XContentParser.Token.START_OBJECT) { + context.setGetResult(GetResult.fromXContentEmbedded(parser)); + } + } else { + BulkItemResponseTests.parseInnerToXContent(parser, context); + } + } + /** * Returns a tuple of {@link UpdateResponse}s. *

diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/AtomicRegisterCoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/AtomicRegisterCoordinatorTests.java index 92f2f5c41d1a6..b453abd97ec84 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/AtomicRegisterCoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/AtomicRegisterCoordinatorTests.java @@ -45,6 +45,13 @@ import static org.elasticsearch.cluster.coordination.stateless.StoreHeartbeatService.HEARTBEAT_FREQUENCY; import static org.elasticsearch.cluster.coordination.stateless.StoreHeartbeatService.MAX_MISSED_HEARTBEATS; +/** + * Tests that the Coordinator code runs correctly relying on atomic register compare-and-swap. Stateless will use implementations of atomic + * register CAS in the cloud blob stores. + * + * StatelessCoordinationTests extends AtomicRegisterCoordinatorTests for testing, inheriting all the tests but using different + * {@link ElectionStrategy} implementations, etc. + */ @TestLogging(reason = "these tests do a lot of log-worthy things but we usually don't care", value = "org.elasticsearch:FATAL") public class AtomicRegisterCoordinatorTests extends CoordinatorTests { @@ -86,23 +93,23 @@ public void testAckListenerReceivesNacksIfLeaderStandsDown() { } @Override - @AwaitsFix(bugUrl = "ES-5645") public void testAckListenerReceivesNacksIfPublicationTimesOut() { // The leader still has access to the register, therefore it acknowledges the state update + testAckListenerReceivesNacksIfPublicationTimesOut(true); } @Override - @AwaitsFix(bugUrl = "ES-5645") - public void testClusterCannotFormWithFailingJoinValidation() { - // A single node can form a cluster in this case + public void testClusterCannotFormWithFailingJoinValidation() throws Exception { + // A single node can form a cluster if it is able to join (vote for) its own cluster, so we must disable all nodes from successfully + // joining a cluster. + clusterCannotFormWithFailingJoinValidation(true); } @Override - @AwaitsFix(bugUrl = "ES-5645") + @AwaitsFix(bugUrl = "ES-8099") public void testCannotJoinClusterWithDifferentUUID() { - // The cluster2 leader is considered dead since we only run the nodes in cluster 1 - // therefore the node coming from cluster 2 ends up taking over the old master in cluster 2 - // TODO: add more checks to avoid forming a mixed cluster between register based and traditional clusters + // Placeholder to implement a test wherein the blob store cluster state is suddenly swapped out with a different cluster's state + // with a different UUID. The cluster nodes should recognize the UUID change and refuse to load the foreign cluster state. } @Override @@ -192,6 +199,9 @@ protected CoordinatorStrategy createCoordinatorStrategy() { return new AtomicRegisterCoordinatorStrategy(); } + /** + * Strategy used to inject custom behavior into the {@link AbstractCoordinatorTestCase} test infrastructure. + */ class AtomicRegisterCoordinatorStrategy implements CoordinatorStrategy { private final AtomicLong currentTermRef = new AtomicLong(); private final AtomicReference heartBeatRef = new AtomicReference<>(); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index 2985cd33aaa64..f816b6ff6571c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -93,7 +93,6 @@ @TestLogging(reason = "these tests do a lot of log-worthy things but we usually don't care", value = "org.elasticsearch:FATAL") public class CoordinatorTests extends AbstractCoordinatorTestCase { - public void testCanUpdateClusterStateAfterStabilisation() { try (Cluster cluster = new Cluster(randomIntBetween(1, 5))) { cluster.runRandomly(); @@ -637,6 +636,10 @@ public void testAckListenerReceivesNoAckFromHangingFollower() { } public void testAckListenerReceivesNacksIfPublicationTimesOut() { + testAckListenerReceivesNacksIfPublicationTimesOut(false); + } + + protected void testAckListenerReceivesNacksIfPublicationTimesOut(boolean expectLeaderAcksSuccessfullyInStateless) { try (Cluster cluster = new Cluster(3)) { cluster.runRandomly(); cluster.stabilise(); @@ -651,12 +654,19 @@ public void testAckListenerReceivesNacksIfPublicationTimesOut() { assertFalse("expected no immediate ack from " + leader, ackCollector.hasAcked(leader)); assertFalse("expected no immediate ack from " + follower0, ackCollector.hasAcked(follower0)); assertFalse("expected no immediate ack from " + follower1, ackCollector.hasAcked(follower1)); + follower0.heal(); follower1.heal(); cluster.stabilise(); assertTrue("expected eventual nack from " + follower0, ackCollector.hasAckedUnsuccessfully(follower0)); assertTrue("expected eventual nack from " + follower1, ackCollector.hasAckedUnsuccessfully(follower1)); - assertTrue("expected eventual nack from " + leader, ackCollector.hasAckedUnsuccessfully(leader)); + if (expectLeaderAcksSuccessfullyInStateless) { + // A stateless leader directly updates the cluster state in the remote blob store: it does not require communication with + // the other cluster nodes to procceed with an update commit to the cluster state. + assertTrue("expected ack from leader, " + leader, ackCollector.hasAckedSuccessfully(leader)); + } else { + assertTrue("expected eventual nack from leader, " + leader, ackCollector.hasAckedUnsuccessfully(leader)); + } } } @@ -1271,21 +1281,50 @@ public void testNodeCannotJoinIfJoinValidationFailsOnJoiningNode() { } } - public void testClusterCannotFormWithFailingJoinValidation() { + public void testClusterCannotFormWithFailingJoinValidation() throws Exception { + clusterCannotFormWithFailingJoinValidation(false); + } + + /** + * Forms a random sized cluster and then disables join validation on either a random majority subset or all cluster nodes. Then checks + * that election fails. + * + * @param failJoinOnAllNodes this controls whether to fail join on all nodes or only a majority subset. The atomic register CAS election + * strategy will succeed in electing a master if any node can vote (even the master candidate voting for + * itself). + * @throws Exception + */ + protected void clusterCannotFormWithFailingJoinValidation(boolean failJoinOnAllNodes) throws Exception { try (Cluster cluster = new Cluster(randomIntBetween(1, 5))) { - // fail join validation on a majority of nodes in the initial configuration - randomValueOtherThanMany( - nodes -> cluster.initialConfiguration.hasQuorum( - nodes.stream().map(ClusterNode::getLocalNode).map(DiscoveryNode::getId).collect(Collectors.toSet()) - ) == false, - () -> randomSubsetOf(cluster.clusterNodes) - ).forEach(cn -> cn.extraJoinValidators.add((discoveryNode, clusterState) -> { + List clusterNodesToFailJoin; + if (failJoinOnAllNodes) { + // The AtomicRegister strategy succeeds if a master candidate votes for itself, so we must disable all nodes from passing + // join validation so that none of them can self-elect. + clusterNodesToFailJoin = cluster.clusterNodes; + } else { + // Fetch a random subset of cluster nodes that form a quorum (majority subset). + clusterNodesToFailJoin = randomValueOtherThanMany( + nodes -> cluster.initialConfiguration.hasQuorum( + nodes.stream().map(ClusterNode::getLocalNode).map(DiscoveryNode::getId).collect(Collectors.toSet()) + ) == false, + () -> randomSubsetOf(cluster.clusterNodes) + ); + } + + // Fail join validation on the set of nodes so that election will fail in the initial configuration. + clusterNodesToFailJoin.forEach(cn -> cn.extraJoinValidators.add((discoveryNode, clusterState) -> { throw new IllegalArgumentException("join validation failed"); })); + cluster.bootstrapIfNecessary(); + + // Run the cluster for 10 seconds to give the cluster time to elect a master. + // It's possible stabilisation takes longer, but essentially impossible that it _always_ takes longer. cluster.runFor(10000, "failing join validation"); + assertTrue(cluster.clusterNodes.stream().allMatch(cn -> cn.getLastAppliedClusterState().version() == 0)); + // Now clear the validation failures and verify that the cluster stabilizes. for (ClusterNode clusterNode : cluster.clusterNodes) { clusterNode.extraJoinValidators.clear(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java b/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java index 9ee942df1c2b0..48d28462231a0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java @@ -12,8 +12,12 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingTableGenerator; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; @@ -21,11 +25,16 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.function.Predicate; import java.util.regex.Pattern; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.hamcrest.CoreMatchers.equalTo; public class ClusterIndexHealthTests extends AbstractXContentSerializingTestCase { @@ -101,7 +110,13 @@ protected Writeable.Reader instanceReader() { @Override protected ClusterIndexHealth doParseInstance(XContentParser parser) throws IOException { - return ClusterIndexHealth.fromXContent(parser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String index = parser.currentName(); + ClusterIndexHealth parsed = parseInstance(parser, index); + ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); + return parsed; } @Override @@ -281,4 +296,66 @@ protected ClusterIndexHealth mutateInstance(ClusterIndexHealth instance) { throw new UnsupportedOperationException(); } } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "cluster_index_health", + true, + (parsedObjects, index) -> { + int i = 0; + int numberOfShards = (int) parsedObjects[i++]; + int numberOfReplicas = (int) parsedObjects[i++]; + int activeShards = (int) parsedObjects[i++]; + int relocatingShards = (int) parsedObjects[i++]; + int initializingShards = (int) parsedObjects[i++]; + int unassignedShards = (int) parsedObjects[i++]; + int activePrimaryShards = (int) parsedObjects[i++]; + String statusStr = (String) parsedObjects[i++]; + ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); + @SuppressWarnings("unchecked") + List shardList = (List) parsedObjects[i]; + final Map shards; + if (shardList == null || shardList.isEmpty()) { + shards = emptyMap(); + } else { + shards = Maps.newMapWithExpectedSize(shardList.size()); + for (ClusterShardHealth shardHealth : shardList) { + shards.put(shardHealth.getShardId(), shardHealth); + } + } + return new ClusterIndexHealth( + index, + numberOfShards, + numberOfReplicas, + activeShards, + relocatingShards, + initializingShards, + unassignedShards, + activePrimaryShards, + status, + shards + ); + } + ); + + public static final ObjectParser.NamedObjectParser SHARD_PARSER = ( + XContentParser p, + String indexIgnored, + String shardId) -> ClusterShardHealthTests.PARSER.apply(p, Integer.valueOf(shardId)); + + static { + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.NUMBER_OF_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.NUMBER_OF_REPLICAS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.ACTIVE_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.RELOCATING_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.INITIALIZING_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.UNASSIGNED_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.ACTIVE_PRIMARY_SHARDS)); + PARSER.declareString(constructorArg(), new ParseField(ClusterIndexHealth.STATUS)); + // Can be absent if LEVEL == 'indices' or 'cluster' + PARSER.declareNamedObjects(optionalConstructorArg(), SHARD_PARSER, new ParseField(ClusterIndexHealth.SHARDS)); + } + + public static ClusterIndexHealth parseInstance(XContentParser parser, String index) { + return PARSER.apply(parser, index); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/health/ClusterShardHealthTests.java b/server/src/test/java/org/elasticsearch/cluster/health/ClusterShardHealthTests.java index ce7c366ff30e6..1e1eacba183d2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/health/ClusterShardHealthTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/health/ClusterShardHealthTests.java @@ -9,17 +9,61 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Arrays; import java.util.function.Predicate; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class ClusterShardHealthTests extends AbstractXContentSerializingTestCase { + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "cluster_shard_health", + true, + (parsedObjects, shardId) -> { + int i = 0; + boolean primaryActive = (boolean) parsedObjects[i++]; + int activeShards = (int) parsedObjects[i++]; + int relocatingShards = (int) parsedObjects[i++]; + int initializingShards = (int) parsedObjects[i++]; + int unassignedShards = (int) parsedObjects[i++]; + String statusStr = (String) parsedObjects[i]; + ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); + return new ClusterShardHealth( + shardId, + status, + activeShards, + relocatingShards, + initializingShards, + unassignedShards, + primaryActive + ); + } + ); + + static { + PARSER.declareBoolean(constructorArg(), new ParseField(ClusterShardHealth.PRIMARY_ACTIVE)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterShardHealth.ACTIVE_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterShardHealth.RELOCATING_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterShardHealth.INITIALIZING_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterShardHealth.UNASSIGNED_SHARDS)); + PARSER.declareString(constructorArg(), new ParseField(ClusterShardHealth.STATUS)); + } + @Override protected ClusterShardHealth doParseInstance(XContentParser parser) throws IOException { - return ClusterShardHealth.fromXContent(parser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String shardIdStr = parser.currentName(); + ClusterShardHealth parsed = PARSER.apply(parser, Integer.valueOf(shardIdStr)); + ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); + return parsed; } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java index 8b9ef91923839..067a67ee025a1 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java @@ -32,6 +32,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public class ComponentTemplateTests extends SimpleDiffableSerializationTestCase { @Override @@ -112,7 +113,7 @@ public static Map randomAliases() { return Collections.singletonMap(aliasName, aliasMeta); } - private static CompressedXContent randomMappings() { + public static CompressedXContent randomMappings() { try { return new CompressedXContent("{\"properties\":{\"" + randomAlphaOfLength(5) + "\":{\"type\":\"keyword\"}}}"); } catch (IOException e) { @@ -121,7 +122,7 @@ private static CompressedXContent randomMappings() { } } - private static Settings randomSettings() { + public static Settings randomSettings() { return indexSettings(randomIntBetween(1, 10), randomIntBetween(0, 5)).put(IndexMetadata.SETTING_BLOCKS_READ, randomBoolean()) .put(IndexMetadata.SETTING_BLOCKS_WRITE, randomBoolean()) .put(IndexMetadata.SETTING_BLOCKS_WRITE, randomBoolean()) @@ -265,7 +266,7 @@ public void testMappingsEquals() throws IOException { } } - public void testXContentSerializationWithRollover() throws IOException { + public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { Settings settings = null; CompressedXContent mappings = null; Map aliases = null; @@ -278,7 +279,7 @@ public void testXContentSerializationWithRollover() throws IOException { if (randomBoolean()) { aliases = randomAliases(); } - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build(); + DataStreamLifecycle lifecycle = new DataStreamLifecycle(); ComponentTemplate template = new ComponentTemplate( new Template(settings, mappings, aliases, lifecycle), randomNonNegativeLong(), @@ -288,14 +289,19 @@ public void testXContentSerializationWithRollover() throws IOException { try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); - template.toXContent(builder, ToXContent.EMPTY_PARAMS, rolloverConfiguration); + DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionTests.randomGlobalRetention(); + ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); + template.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); assertThat(serialized, containsString("rollover")); - for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention()) + for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention(globalRetention)) .getConditions() .keySet()) { assertThat(serialized, containsString(label)); } + // We check that even if there was no retention provided by the user, the global retention applies + assertThat(serialized, not(containsString("data_retention"))); + assertThat(serialized, containsString("effective_retention")); } } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java index fe678ec23afad..15b55b5f002bb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java @@ -30,6 +30,7 @@ import static org.elasticsearch.cluster.metadata.DataStream.TIMESTAMP_FIELD_NAME; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public class ComposableIndexTemplateTests extends SimpleDiffableSerializationTestCase { @Override @@ -109,10 +110,6 @@ private static Map randomAliases() { return Collections.singletonMap(aliasName, aliasMeta); } - private static DataStreamLifecycle randomLifecycle() { - return DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build(); - } - private static CompressedXContent randomMappings(ComposableIndexTemplate.DataStreamTemplate dataStreamTemplate) { try { if (dataStreamTemplate != null) { @@ -212,7 +209,7 @@ public void testComponentTemplatesEquals() { assertThat(ComposableIndexTemplate.componentTemplatesEquals(List.of(), List.of(randomAlphaOfLength(5))), equalTo(false)); } - public void testXContentSerializationWithRollover() throws IOException { + public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { Settings settings = null; CompressedXContent mappings = null; Map aliases = null; @@ -226,7 +223,8 @@ public void testXContentSerializationWithRollover() throws IOException { if (randomBoolean()) { aliases = randomAliases(); } - DataStreamLifecycle lifecycle = randomLifecycle(); + // We use the empty lifecycle so the global retention can be in effect + DataStreamLifecycle lifecycle = new DataStreamLifecycle(); Template template = new Template(settings, mappings, aliases, lifecycle); ComposableIndexTemplate.builder() .indexPatterns(List.of(randomAlphaOfLength(4))) @@ -240,14 +238,19 @@ public void testXContentSerializationWithRollover() throws IOException { try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); - template.toXContent(builder, ToXContent.EMPTY_PARAMS, rolloverConfiguration); + DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionTests.randomGlobalRetention(); + ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); + template.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); assertThat(serialized, containsString("rollover")); - for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention()) + for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention(globalRetention)) .getConditions() .keySet()) { assertThat(serialized, containsString(label)); } + // We check that even if there was no retention provided by the user, the global retention applies + assertThat(serialized, not(containsString("data_retention"))); + assertThat(serialized, containsString("effective_retention")); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionSerializationTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionTests.java similarity index 90% rename from server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionSerializationTests.java rename to server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionTests.java index 491ba868dfd9b..e65b4d41bbe02 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionTests.java @@ -18,7 +18,7 @@ import java.util.List; -public class DataStreamGlobalRetentionSerializationTests extends SimpleDiffableWireSerializationTestCase { +public class DataStreamGlobalRetentionTests extends SimpleDiffableWireSerializationTestCase { @Override protected ClusterState.Custom makeTestChanges(ClusterState.Custom testInstance) { @@ -75,9 +75,10 @@ protected ClusterState.Custom mutateInstance(ClusterState.Custom instance) { } public static DataStreamGlobalRetention randomGlobalRetention() { + boolean withDefault = randomBoolean(); return new DataStreamGlobalRetention( - randomBoolean() ? null : TimeValue.timeValueDays(randomIntBetween(1, 1000)), - randomBoolean() ? null : TimeValue.timeValueDays(randomIntBetween(1000, 2000)) + withDefault == false ? null : TimeValue.timeValueDays(randomIntBetween(1, 1000)), + withDefault && randomBoolean() ? null : TimeValue.timeValueDays(randomIntBetween(1000, 2000)) ); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java index e3bf5260a7445..38b09f3690870 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java @@ -39,6 +39,7 @@ import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.RetentionSource.MAX_GLOBAL_RETENTION; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class DataStreamLifecycleTests extends AbstractXContentSerializingTestCase { @@ -106,13 +107,14 @@ protected DataStreamLifecycle doParseInstance(XContentParser parser) throws IOEx return DataStreamLifecycle.fromXContent(parser); } - public void testXContentSerializationWithRollover() throws IOException { + public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { DataStreamLifecycle lifecycle = createTestInstance(); try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); - DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); - lifecycle.toXContent(builder, ToXContent.EMPTY_PARAMS, rolloverConfiguration, globalRetention); + DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionTests.randomGlobalRetention(); + ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); + lifecycle.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); assertThat(serialized, containsString("rollover")); for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention(globalRetention)) @@ -124,6 +126,17 @@ public void testXContentSerializationWithRollover() throws IOException { if (rolloverConfiguration.getAutomaticConditions().isEmpty() == false) { assertThat(serialized, containsString("[automatic]")); } + // We check that even if there was no retention provided by the user, the global retention applies + if (lifecycle.getDataRetention() == null) { + assertThat(serialized, not(containsString("data_retention"))); + } else { + assertThat(serialized, containsString("data_retention")); + } + if (lifecycle.isEnabled()) { + assertThat(serialized, containsString("effective_retention")); + } else { + assertThat(serialized, not(containsString("effective_retention"))); + } } } @@ -317,7 +330,12 @@ public void testEffectiveRetention() { TimeValue maxRetentionLessThanDataStream = TimeValue.timeValueDays(dataStreamRetention.days() - 1); effectiveDataRetentionWithSource = lifecycleRetention.getEffectiveDataRetentionWithSource( - new DataStreamGlobalRetention(randomBoolean() ? null : TimeValue.timeValueDays(10), maxRetentionLessThanDataStream) + new DataStreamGlobalRetention( + randomBoolean() + ? null + : TimeValue.timeValueDays(randomIntBetween(1, (int) (maxRetentionLessThanDataStream.days() - 1))), + maxRetentionLessThanDataStream + ) ); assertThat(effectiveDataRetentionWithSource.v1(), equalTo(maxRetentionLessThanDataStream)); assertThat(effectiveDataRetentionWithSource.v2(), equalTo(MAX_GLOBAL_RETENTION)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 3e758df17c432..9db7d1047e249 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -1660,7 +1660,7 @@ private DataStream createDataStream( return newInstance(dataStreamName, backingIndices, backingIndicesCount, null, false, lifecycle); } - public void testXContentSerializationWithRollover() throws IOException { + public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); List indices = randomIndexInstances(); long generation = indices.size() + ESTestCase.randomLongBetween(1, 128); @@ -1675,7 +1675,7 @@ public void testXContentSerializationWithRollover() throws IOException { failureIndices = randomNonEmptyIndexInstances(); } - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build(); + DataStreamLifecycle lifecycle = new DataStreamLifecycle(); DataStream dataStream = new DataStream( dataStreamName, indices, @@ -1697,8 +1697,10 @@ public void testXContentSerializationWithRollover() throws IOException { try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); - DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); - dataStream.toXContent(builder, ToXContent.EMPTY_PARAMS, rolloverConfiguration, globalRetention); + DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionTests.randomGlobalRetention(); + + ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); + dataStream.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); assertThat(serialized, containsString("rollover")); for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention(globalRetention)) @@ -1706,6 +1708,9 @@ public void testXContentSerializationWithRollover() throws IOException { .keySet()) { assertThat(serialized, containsString(label)); } + // We check that even if there was no retention provided by the user, the global retention applies + assertThat(serialized, not(containsString("data_retention"))); + assertThat(serialized, containsString("effective_retention")); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index 1e35a40dedc17..955d7d2de6882 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.test.ESTestCase; @@ -786,7 +787,7 @@ public void testFindMappingsWithFilters() throws IOException { if (index.equals("index2")) { return Predicates.never(); } - return MapperPlugin.NOOP_FIELD_PREDICATE; + return FieldPredicate.ACCEPT_ALL; }, Metadata.ON_NEXT_INDEX_FIND_MAPPINGS_NOOP); assertIndexMappingsNoFields(mappings, "index2"); diff --git a/server/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java b/server/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java index 351efa59f2381..67f74df78e256 100644 --- a/server/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java +++ b/server/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java @@ -9,12 +9,14 @@ package org.elasticsearch.common.collect; import org.elasticsearch.common.Randomness; +import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; @@ -242,6 +244,29 @@ public Integer next() { assertEquals(array.length, index.get()); } + public void testEnumerate() { + assertEmptyIterator(Iterators.enumerate(Iterators.concat(), Tuple::new)); + + final var array = randomIntegerArray(); + final var index = new AtomicInteger(); + Iterators.enumerate(Iterators.forArray(array), Tuple::new).forEachRemaining(t -> { + int idx = index.getAndIncrement(); + assertEquals(idx, t.v1().intValue()); + assertEquals(array[idx], t.v2()); + }); + assertEquals(array.length, index.get()); + } + + public void testSupplier() { + assertEmptyIterator(Iterators.fromSupplier(() -> null)); + + final var array = randomIntegerArray(); + final var index = new AtomicInteger(); + final var queue = new LinkedList<>(Arrays.asList(array)); + Iterators.fromSupplier(queue::pollFirst).forEachRemaining(i -> assertEquals(array[index.getAndIncrement()], i)); + assertEquals(array.length, index.get()); + } + public void testEquals() { final BiPredicate notCalled = (a, b) -> { throw new AssertionError("not called"); }; diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 13f789a8b5fae..b198bf6dbed86 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -40,6 +40,7 @@ import java.util.stream.Stream; import static org.elasticsearch.index.IndexSettingsTests.newIndexMeta; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -914,19 +915,32 @@ public void testAffixKeySetting() { assertFalse(listAffixSetting.match("foo")); } - public void testAffixKeyExists() { - Setting setting = Setting.affixKeySetting("foo.", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope)); + public void testAffixKeySettingWithSecure() { + Setting.AffixSetting secureSetting = Setting.affixKeySetting( + "foo.", + "secret", + (key) -> SecureSetting.secureString(key, null) + ); - assertFalse(setting.exists(Settings.EMPTY)); - assertTrue(setting.exists(Settings.builder().put("foo.test.enable", "true").build())); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("foo.a.secret", "secret1"); + secureSettings.setString("foo.b.secret", "secret2"); + Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + + assertThat(secureSetting.exists(settings), is(true)); + + Map secrets = secureSetting.getAsMap(settings); + assertThat(secrets.keySet(), contains("a", "b")); + + Setting secureA = secureSetting.getConcreteSetting("foo.a.secret"); + assertThat(secureA.get(settings), is("secret1")); + assertThat(secrets.get("a"), is("secret1")); } - public void testAffixKeyExistsWithSecure() { + public void testAffixKeyExists() { Setting setting = Setting.affixKeySetting("foo.", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope)); - - final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("foo.test.enabled", "true"); - assertFalse(setting.exists(Settings.builder().setSecureSettings(secureSettings).build())); + assertFalse(setting.exists(Settings.EMPTY)); + assertTrue(setting.exists(Settings.builder().put("foo.test.enable", "true").build())); } public void testAffixSettingNamespaces() { diff --git a/server/src/test/java/org/elasticsearch/common/util/ObjectObjectPagedHashMapTests.java b/server/src/test/java/org/elasticsearch/common/util/ObjectObjectPagedHashMapTests.java new file mode 100644 index 0000000000000..198adda7c33be --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/util/ObjectObjectPagedHashMapTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.util; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.indices.CrankyCircuitBreakerService; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class ObjectObjectPagedHashMapTests extends ESTestCase { + + private BigArrays mockBigArrays(CircuitBreakerService service) { + return new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), service, true); + } + + public void testDuel() { + // first with cranky + try { + doTestDuel(mockBigArrays(new CrankyCircuitBreakerService())); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo("cranky breaker")); + } + // then to the end + doTestDuel(mockBigArrays(new NoneCircuitBreakerService())); + } + + private void doTestDuel(BigArrays bigArrays) { + final Map map1 = new HashMap<>(); + try ( + ObjectObjectPagedHashMap map2 = new ObjectObjectPagedHashMap<>( + randomInt(42), + 0.6f + randomFloat() * 0.39f, + bigArrays + ) + ) { + final int maxKey = randomIntBetween(1, 10000); + BytesRef[] bytesRefs = new BytesRef[maxKey]; + for (int i = 0; i < maxKey; i++) { + bytesRefs[i] = randomBytesRef(); + } + final int iters = scaledRandomIntBetween(10000, 100000); + for (int i = 0; i < iters; ++i) { + final boolean put = randomBoolean(); + final int iters2 = randomIntBetween(1, 100); + for (int j = 0; j < iters2; ++j) { + final BytesRef key = bytesRefs[random().nextInt(maxKey)]; + if (put) { + final Object value = new Object(); + assertSame(map1.put(key, value), map2.put(key, value)); + } else { + assertSame(map1.remove(key), map2.remove(key)); + } + assertEquals(map1.size(), map2.size()); + } + } + for (int i = 0; i < maxKey; i++) { + assertSame(map1.get(bytesRefs[i]), map2.get(bytesRefs[i])); + } + final Map copy = new HashMap<>(); + for (ObjectObjectPagedHashMap.Cursor cursor : map2) { + copy.put(cursor.key, cursor.value); + } + assertEquals(map1, copy); + } + } + + private BytesRef randomBytesRef() { + byte[] bytes = new byte[randomIntBetween(2, 20)]; + random().nextBytes(bytes); + return new BytesRef(bytes); + } + + public void testAllocation() { + MockBigArrays.assertFitsIn(ByteSizeValue.ofBytes(256), bigArrays -> new ObjectObjectPagedHashMap<>(1, bigArrays)); + } + +} diff --git a/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java b/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java index a4436fd637c5a..d4c21ed9839fc 100644 --- a/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java @@ -152,9 +152,9 @@ public void testUpdateHealthInfo() throws Exception { // We override the poll interval like this to avoid the min value set by the setting which is too high for this test localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(10)); - assertThat(mockHealthTracker.getLastReportedValue(), nullValue()); + assertThat(mockHealthTracker.getLastDeterminedHealth(), nullValue()); localHealthMonitor.clusterChanged(new ClusterChangedEvent("initialize", clusterState, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); + assertBusy(() -> assertThat(mockHealthTracker.getLastDeterminedHealth(), equalTo(GREEN))); } @SuppressWarnings("unchecked") @@ -169,7 +169,7 @@ public void testDoNotUpdateHealthInfoOnFailure() throws Exception { localHealthMonitor.clusterChanged(new ClusterChangedEvent("initialize", clusterState, ClusterState.EMPTY_STATE)); assertBusy(() -> assertThat(clientCalled.get(), equalTo(true))); - assertThat(mockHealthTracker.getLastReportedValue(), nullValue()); + assertThat(mockHealthTracker.getLastDeterminedHealth(), nullValue()); } @SuppressWarnings("unchecked") @@ -189,9 +189,10 @@ public void testSendHealthInfoToNewNode() throws Exception { return null; }).when(client).execute(any(), any(), any()); + localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(10)); when(clusterService.state()).thenReturn(previous); localHealthMonitor.clusterChanged(new ClusterChangedEvent("start-up", previous, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); + assertBusy(() -> assertThat(mockHealthTracker.getLastDeterminedHealth(), equalTo(GREEN))); localHealthMonitor.clusterChanged(new ClusterChangedEvent("health-node-switch", current, previous)); assertBusy(() -> assertThat(counter.get(), equalTo(2))); } @@ -213,9 +214,10 @@ public void testResendHealthInfoOnMasterChange() throws Exception { return null; }).when(client).execute(any(), any(), any()); + localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(10)); when(clusterService.state()).thenReturn(previous); localHealthMonitor.clusterChanged(new ClusterChangedEvent("start-up", previous, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); + assertBusy(() -> assertThat(mockHealthTracker.getLastDeterminedHealth(), equalTo(GREEN))); localHealthMonitor.clusterChanged(new ClusterChangedEvent("health-node-switch", current, previous)); assertBusy(() -> assertThat(counter.get(), equalTo(2))); } @@ -233,25 +235,24 @@ public void testEnablingAndDisabling() throws Exception { // Ensure that there are no issues if the cluster state hasn't been initialized yet localHealthMonitor.setEnabled(true); - assertThat(mockHealthTracker.getLastReportedValue(), nullValue()); + assertThat(mockHealthTracker.getLastDeterminedHealth(), nullValue()); assertThat(clientCalledCount.get(), equalTo(0)); when(clusterService.state()).thenReturn(clusterState); localHealthMonitor.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); - assertThat(clientCalledCount.get(), equalTo(1)); + assertBusy(() -> assertThat(mockHealthTracker.getLastDeterminedHealth(), equalTo(GREEN))); + assertBusy(() -> assertThat(clientCalledCount.get(), equalTo(1))); DiskHealthInfo nextHealthStatus = new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD); // Disable the local monitoring localHealthMonitor.setEnabled(false); - localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(1)); + localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(10)); mockHealthTracker.setHealthInfo(nextHealthStatus); assertThat(clientCalledCount.get(), equalTo(1)); - localHealthMonitor.setMonitorInterval(TimeValue.timeValueSeconds(30)); localHealthMonitor.setEnabled(true); - assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(nextHealthStatus))); + assertBusy(() -> assertThat(mockHealthTracker.getLastDeterminedHealth(), equalTo(nextHealthStatus))); } /** @@ -386,12 +387,12 @@ private static class MockHealthTracker extends HealthTracker { private volatile DiskHealthInfo healthInfo = GREEN; @Override - public DiskHealthInfo checkCurrentHealth() { + protected DiskHealthInfo determineCurrentHealth() { return healthInfo; } @Override - public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, DiskHealthInfo healthInfo) { + protected void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, DiskHealthInfo healthInfo) { builder.diskHealthInfo(healthInfo); } diff --git a/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java b/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java index dd2ef861e85c3..cb503312124d9 100644 --- a/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java @@ -116,25 +116,25 @@ public void testNoDiskData() { eq(false) ) ).thenReturn(nodeStats()); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.UNKNOWN, DiskHealthInfo.Cause.NODE_HAS_NO_DISK_STATS), diskHealth); } public void testGreenDiskStatus() { simulateHealthDiskSpace(); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(GREEN, diskHealth); } public void testYellowDiskStatus() { initializeIncreasedDiskSpaceUsage(); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD), diskHealth); } public void testRedDiskStatus() { simulateDiskOutOfSpace(); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD), diskHealth); } @@ -144,7 +144,7 @@ public void testFrozenGreenDiskStatus() { b -> b.nodes(DiscoveryNodes.builder().add(node).add(frozenNode).localNodeId(frozenNode.getId()).build()) ); when(clusterService.state()).thenReturn(clusterStateFrozenLocalNode); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(GREEN, diskHealth); } @@ -154,7 +154,7 @@ public void testFrozenRedDiskStatus() { b -> b.nodes(DiscoveryNodes.builder().add(node).add(frozenNode).localNodeId(frozenNode.getId()).build()) ); when(clusterService.state()).thenReturn(clusterStateFrozenLocalNode); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD), diskHealth); } @@ -165,7 +165,7 @@ public void testSearchNodeGreenDiskStatus() { b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchNode).localNodeId(searchNode.getId()).build()) ); when(clusterService.state()).thenReturn(clusterStateSearchLocalNode); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(GREEN, diskHealth); } @@ -176,7 +176,7 @@ public void testSearchNodeRedDiskStatus() { b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchNode).localNodeId(searchNode.getId()).build()) ); when(clusterService.state()).thenReturn(clusterStateSearchLocalNode); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD), diskHealth); } @@ -187,7 +187,7 @@ public void testSearchAndIndexNodesYellowDiskStatus() { b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchAndIndexNode).localNodeId(searchAndIndexNode.getId()).build()) ); when(clusterService.state()).thenReturn(clusterStateSearchLocalNode); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD), diskHealth); } @@ -204,7 +204,7 @@ public void testYellowStatusForNonDataNode() { ).copyAndUpdate(b -> b.putCustom(HealthMetadata.TYPE, healthMetadata)); initializeIncreasedDiskSpaceUsage(); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD), diskHealth); } diff --git a/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java b/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java index 0b5f09acc69ca..8b7a2648b0529 100644 --- a/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java @@ -45,7 +45,7 @@ public void setUp() throws Exception { public void testGetHealthNoRepos() { when(repositoriesService.getRepositories()).thenReturn(Map.of()); - var health = repositoriesHealthTracker.checkCurrentHealth(); + var health = repositoriesHealthTracker.determineCurrentHealth(); assertTrue(health.unknownRepositories().isEmpty()); assertTrue(health.invalidRepositories().isEmpty()); @@ -59,7 +59,7 @@ public void testGetHealthCorrectRepo() { when(repo.getMetadata()).thenReturn(metadata); when(repositoriesService.getRepositories()).thenReturn(Map.of(randomAlphaOfLength(10), repo)); - var health = repositoriesHealthTracker.checkCurrentHealth(); + var health = repositoriesHealthTracker.determineCurrentHealth(); assertTrue(health.unknownRepositories().isEmpty()); assertTrue(health.invalidRepositories().isEmpty()); @@ -69,7 +69,7 @@ public void testGetHealthUnknownType() { var repo = createRepositoryMetadata(); when(repositoriesService.getRepositories()).thenReturn(Map.of(randomAlphaOfLength(10), new UnknownTypeRepository(repo))); - var health = repositoriesHealthTracker.checkCurrentHealth(); + var health = repositoriesHealthTracker.determineCurrentHealth(); assertEquals(1, health.unknownRepositories().size()); assertEquals(repo.name(), health.unknownRepositories().get(0)); @@ -82,7 +82,7 @@ public void testGetHealthInvalid() { Map.of(repo.name(), new InvalidRepository(repo, new RepositoryException(repo.name(), "Test"))) ); - var health = repositoriesHealthTracker.checkCurrentHealth(); + var health = repositoriesHealthTracker.determineCurrentHealth(); assertTrue(health.unknownRepositories().isEmpty()); assertEquals(1, health.invalidRepositories().size()); diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 7f5c623dbae08..06d05f9dc06fa 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -918,8 +918,8 @@ protected void stopInternal() {} } } - public void testStopDoesntWaitIfGraceIsZero() { - try (var noWait = LogExpectation.unexpectWait(); var transport = new TestHttpServerTransport(Settings.EMPTY)) { + public void testStopWaitsIndefinitelyIfGraceIsZero() { + try (var wait = LogExpectation.expectWait(); var transport = new TestHttpServerTransport(Settings.EMPTY)) { TestHttpChannel httpChannel = new TestHttpChannel(); transport.serverAcceptedChannel(httpChannel); transport.incomingRequest(testHttpRequest(), httpChannel); @@ -927,7 +927,33 @@ public void testStopDoesntWaitIfGraceIsZero() { transport.doStop(); assertFalse(transport.testHttpServerChannel.isOpen()); assertFalse(httpChannel.isOpen()); - noWait.assertExpectationsMatched(); + wait.assertExpectationsMatched(); + } + } + + public void testStopLogsProgress() throws Exception { + TestHttpChannel httpChannel = new TestHttpChannel(); + var doneWithRequest = new CountDownLatch(1); + try (var wait = LogExpectation.expectUpdate(1); var transport = new TestHttpServerTransport(gracePeriod(SHORT_GRACE_PERIOD_MS))) { + + httpChannel.blockSendResponse(); + var inResponse = httpChannel.notifyInSendResponse(); + + transport.serverAcceptedChannel(httpChannel); + new Thread(() -> { + transport.incomingRequest(testHttpRequest(), httpChannel); + doneWithRequest.countDown(); + }, "testStopLogsProgress -> incomingRequest").start(); + + inResponse.await(); + + transport.doStop(); + assertFalse(transport.testHttpServerChannel.isOpen()); + assertFalse(httpChannel.isOpen()); + wait.assertExpectationsMatched(); + } finally { + httpChannel.allowSendResponse(); + doneWithRequest.await(); } } @@ -1345,20 +1371,24 @@ private LogExpectation(int grace) { } public static LogExpectation expectTimeout(int grace) { - return new LogExpectation(grace).timedOut(true).wait(true); + return new LogExpectation(grace).timedOut(true).wait(false); } public static LogExpectation unexpectedTimeout(int grace) { - return new LogExpectation(grace).timedOut(false).wait(true); + return new LogExpectation(grace).timedOut(false).wait(false); + } + + public static LogExpectation expectWait() { + return new LogExpectation(0).wait(true); } - public static LogExpectation unexpectWait() { - return new LogExpectation(0).wait(false); + public static LogExpectation expectUpdate(int connections) { + return new LogExpectation(0).update(connections); } private LogExpectation timedOut(boolean expected) { var message = "timed out while waiting [" + grace + "]ms for clients to close connections"; - var name = "message"; + var name = "timed out message"; var logger = AbstractHttpServerTransport.class.getName(); var level = Level.WARN; if (expected) { @@ -1370,18 +1400,27 @@ private LogExpectation timedOut(boolean expected) { } private LogExpectation wait(boolean expected) { - var message = "closing all client connections immediately"; - var name = "message"; + var message = "waiting indefinitely for clients to close connections"; + var name = "wait message"; var logger = AbstractHttpServerTransport.class.getName(); var level = Level.DEBUG; if (expected) { - appender.addExpectation(new MockLogAppender.UnseenEventExpectation(name, logger, level, message)); - } else { appender.addExpectation(new MockLogAppender.SeenEventExpectation(name, logger, level, message)); + } else { + appender.addExpectation(new MockLogAppender.UnseenEventExpectation(name, logger, level, message)); } return this; } + private LogExpectation update(int connections) { + var message = "still waiting on " + connections + " client connections to close"; + var name = "update message"; + var logger = AbstractHttpServerTransport.class.getName(); + var level = Level.INFO; + appender.addExpectation(new MockLogAppender.SeenEventExpectation(name, logger, level, message)); + return this; + } + public void assertExpectationsMatched() { appender.assertAllExpectationsMatched(); checked = true; diff --git a/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java b/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java index fc29d13667d33..6118a84814462 100644 --- a/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java +++ b/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java @@ -174,7 +174,7 @@ public void testRoutingPathEqualsObjectNameError() { assertThat( e.getMessage(), equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [dim.o] was [object]." ) @@ -192,29 +192,21 @@ public void testRoutingPathMatchesNonDimensionKeyword() { assertThat( e.getMessage(), equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [dim.non_dim] was not a dimension." ) ); } - public void testRoutingPathMatchesNonKeyword() { + public void testRoutingPathMatchesNonKeyword() throws IOException { Settings s = getSettings(randomBoolean() ? "dim.non_kwd" : "dim.*"); - Exception e = expectThrows(IllegalArgumentException.class, () -> createMapperService(s, mapping(b -> { + createMapperService(s, mapping(b -> { b.startObject("dim").startObject("properties"); b.startObject("non_kwd").field("type", "integer").field("time_series_dimension", true).endObject(); b.startObject("dim").field("type", "keyword").field("time_series_dimension", true).endObject(); b.endObject().endObject(); - }))); - assertThat( - e.getMessage(), - equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [dim.non_kwd] was [integer]." - ) - ); + })); } public void testRoutingPathMatchesScriptedKeyword() { @@ -229,7 +221,7 @@ public void testRoutingPathMatchesScriptedKeyword() { assertThat( e.getMessage(), equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [dim.kwd] has a [script] parameter." ) @@ -245,7 +237,7 @@ public void testRoutingPathMatchesRuntimeKeyword() { assertThat( e.getMessage(), equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [dim.kwd] was a runtime [keyword]." ) diff --git a/server/src/test/java/org/elasticsearch/index/codec/postings/ForUtilTests.java b/server/src/test/java/org/elasticsearch/index/codec/ForUtilTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/index/codec/postings/ForUtilTests.java rename to server/src/test/java/org/elasticsearch/index/codec/ForUtilTests.java index 14e8d3344c3dc..5d9052203c5f4 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/postings/ForUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/ForUtilTests.java @@ -17,7 +17,7 @@ * * Modifications copyright (C) 2022 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.postings; +package org.elasticsearch.index.codec; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java index ec8308404a118..f766d2148a96e 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.packed.PackedInts; +import org.elasticsearch.index.codec.ForUtil; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 43628fe59daa3..683bfb19aac26 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -90,9 +90,11 @@ public > IFD getForField(String type, String field if (docValues) { fieldType = new KeywordFieldMapper.Builder(fieldName, IndexVersion.current()).build(context).fieldType(); } else { - fieldType = new TextFieldMapper.Builder(fieldName, createDefaultIndexAnalyzers()).fielddata(true) - .build(context) - .fieldType(); + fieldType = new TextFieldMapper.Builder( + fieldName, + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true).build(context).fieldType(); } } else if (type.equals("float")) { fieldType = new NumberFieldMapper.Builder( diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java index 45ebfba265c2f..4df1961c123af 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java @@ -52,10 +52,11 @@ public void testFilterByFrequency() throws Exception { { indexService.clearCaches(false, true); - MappedFieldType ft = new TextFieldMapper.Builder("high_freq", createDefaultIndexAnalyzers()).fielddata(true) - .fielddataFrequencyFilter(0, random.nextBoolean() ? 100 : 0.5d, 0) - .build(builderContext) - .fieldType(); + MappedFieldType ft = new TextFieldMapper.Builder( + "high_freq", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true).fielddataFrequencyFilter(0, random.nextBoolean() ? 100 : 0.5d, 0).build(builderContext).fieldType(); IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(ft, MappedFieldType.FielddataOperation.SEARCH); for (LeafReaderContext context : contexts) { LeafOrdinalsFieldData loadDirect = fieldData.loadDirect(context); @@ -67,7 +68,11 @@ public void testFilterByFrequency() throws Exception { } { indexService.clearCaches(false, true); - MappedFieldType ft = new TextFieldMapper.Builder("high_freq", createDefaultIndexAnalyzers()).fielddata(true) + MappedFieldType ft = new TextFieldMapper.Builder( + "high_freq", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, 201, 100) .build(builderContext) .fieldType(); @@ -82,7 +87,11 @@ public void testFilterByFrequency() throws Exception { { indexService.clearCaches(false, true);// test # docs with value - MappedFieldType ft = new TextFieldMapper.Builder("med_freq", createDefaultIndexAnalyzers()).fielddata(true) + MappedFieldType ft = new TextFieldMapper.Builder( + "med_freq", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, Integer.MAX_VALUE, 101) .build(builderContext) .fieldType(); @@ -98,7 +107,11 @@ public void testFilterByFrequency() throws Exception { { indexService.clearCaches(false, true); - MappedFieldType ft = new TextFieldMapper.Builder("med_freq", createDefaultIndexAnalyzers()).fielddata(true) + MappedFieldType ft = new TextFieldMapper.Builder( + "med_freq", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, Integer.MAX_VALUE, 101) .build(builderContext) .fieldType(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index bf9176de1b124..8c583fe3976fa 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -156,12 +156,16 @@ public void testClearField() throws Exception { ); final MapperBuilderContext context = MapperBuilderContext.root(false, false); - final MappedFieldType mapper1 = new TextFieldMapper.Builder("field_1", createDefaultIndexAnalyzers()).fielddata(true) - .build(context) - .fieldType(); - final MappedFieldType mapper2 = new TextFieldMapper.Builder("field_2", createDefaultIndexAnalyzers()).fielddata(true) - .build(context) - .fieldType(); + final MappedFieldType mapper1 = new TextFieldMapper.Builder( + "field_1", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true).build(context).fieldType(); + final MappedFieldType mapper2 = new TextFieldMapper.Builder( + "field_2", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true).build(context).fieldType(); final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); doc.add(new StringField("field_1", "thisisastring", Store.NO)); @@ -223,9 +227,11 @@ public void testFieldDataCacheListener() throws Exception { ); final MapperBuilderContext context = MapperBuilderContext.root(false, false); - final MappedFieldType mapper1 = new TextFieldMapper.Builder("s", createDefaultIndexAnalyzers()).fielddata(true) - .build(context) - .fieldType(); + final MappedFieldType mapper1 = new TextFieldMapper.Builder( + "s", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true).build(context).fieldType(); final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); doc.add(new StringField("s", "thisisastring", Store.NO)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index d55eaf9df3452..0cdc9568f1fac 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -55,6 +55,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -73,8 +74,8 @@ protected ScriptFactory dummyScript() { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -104,7 +105,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of(1L, 0L, 1L))); + assertThat(results, containsInAnyOrder(1L, 0L, 1L)); } } } @@ -112,8 +113,8 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -128,8 +129,8 @@ public void testSort() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); { @@ -185,10 +186,10 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(3)); @@ -199,7 +200,7 @@ public void testExistsQuery() throws IOException { @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -210,7 +211,7 @@ public void testRangeQuery() throws IOException { } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -221,8 +222,8 @@ public void testRangeQuery() throws IOException { } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -269,7 +270,7 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery(true, mockContext())), equalTo(1)); @@ -282,7 +283,7 @@ public void testTermQuery() throws IOException { } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery(false, mockContext())), equalTo(1)); @@ -305,7 +306,7 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, true), mockContext())), equalTo(1)); @@ -315,7 +316,7 @@ public void testTermsQuery() throws IOException { } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(false, false), mockContext())), equalTo(1)); @@ -364,7 +365,7 @@ public XContentParser parser() { while (ctx.parser().nextToken() != Token.END_ARRAY) { ootb.parse(ctx); } - iw.addDocument(ctx.doc()); + addDocument(iw, ctx.doc()); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertSameCount( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java index 25a79022c245e..09d4b62fb157c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java @@ -60,6 +60,7 @@ import static java.util.Collections.emptyMap; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -160,8 +161,8 @@ public void testFormatDuel() throws IOException { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356, 1595432181351]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356, 1595432181351]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -191,7 +192,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of(1595518581354L, 1595518581351L, 1595518581356L))); + assertThat(results, containsInAnyOrder(1595518581354L, 1595518581351L, 1595518581356L)); } } } @@ -199,9 +200,9 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); DateScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -220,9 +221,9 @@ public void testSort() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -300,8 +301,8 @@ private Query randomDistanceFeatureQuery(MappedFieldType ft, SearchExecutionCont @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); @@ -312,9 +313,9 @@ public void testExistsQuery() throws IOException { @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -394,8 +395,8 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery("2020-07-22T15:36:21.354Z", mockContext())), equalTo(1)); @@ -422,8 +423,8 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); try (DirectoryReader reader = iw.getReader()) { MappedFieldType ft = simpleMappedFieldType(); IndexSearcher searcher = newSearcher(reader); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java index 03716f8ad4497..9b66d0011ba69 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java @@ -20,9 +20,9 @@ public class DocumentParserContextTests extends ESTestCase { private final MapperBuilderContext root = MapperBuilderContext.root(false, false); public void testDynamicMapperSizeMultipleMappers() { - context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers()).build(root)); + context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers(), false).build(root)); assertEquals(1, context.getNewFieldsSize()); - context.addDynamicMapper(new TextFieldMapper.Builder("bar", createDefaultIndexAnalyzers()).build(root)); + context.addDynamicMapper(new TextFieldMapper.Builder("bar", createDefaultIndexAnalyzers(), false).build(root)); assertEquals(2, context.getNewFieldsSize()); context.addDynamicRuntimeField(new TestRuntimeField("runtime1", "keyword")); assertEquals(3, context.getNewFieldsSize()); @@ -37,9 +37,9 @@ public void testDynamicMapperSizeSameFieldMultipleRuntimeFields() { } public void testDynamicMapperSizeSameFieldMultipleMappers() { - context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers()).build(root)); + context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers(), false).build(root)); assertEquals(1, context.getNewFieldsSize()); - context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers()).build(root)); + context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers(), false).build(root)); assertEquals(1, context.getNewFieldsSize()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java index ed365a2460203..9547b4f9cb9a3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java @@ -45,6 +45,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class DoubleScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase { @@ -71,8 +72,8 @@ public void testFormat() throws IOException { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.0]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [3.14, 1.4]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.0]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [3.14, 1.4]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -102,7 +103,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of(2.0, 2.4, 4.140000000000001))); + assertThat(results, containsInAnyOrder(2.0, 2.4, 4.140000000000001)); } } } @@ -110,9 +111,9 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4.2]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [4.2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); DoubleScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -128,9 +129,9 @@ public void testSort() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4.2]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [4.2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -158,8 +159,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); @@ -170,9 +171,9 @@ public void testExistsQuery() throws IOException { @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.5]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.5]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -195,8 +196,8 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery("1", mockContext())), equalTo(1)); @@ -218,8 +219,8 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("1"), mockContext())), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java index 2b8be2882c409..ce406b604ba62 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -32,7 +33,6 @@ import java.util.Map; import java.util.Set; import java.util.function.Function; -import java.util.function.Predicate; import static org.elasticsearch.cluster.metadata.MetadataTests.assertLeafs; import static org.elasticsearch.cluster.metadata.MetadataTests.assertMultiField; @@ -246,8 +246,23 @@ private static void assertNotFiltered(MappingMetadata mappingMetadata) { public static class FieldFilterPlugin extends Plugin implements MapperPlugin { @Override - public Function> getFieldFilter() { - return index -> index.equals("filtered") ? field -> field.endsWith("visible") : MapperPlugin.NOOP_FIELD_PREDICATE; + public Function getFieldFilter() { + return index -> false == index.equals("filtered") ? FieldPredicate.ACCEPT_ALL : new FieldPredicate() { + @Override + public boolean test(String field) { + return field.endsWith("visible"); + } + + @Override + public String modifyHash(String hash) { + return "only-visible:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java index 36f691341425c..3289e46941a45 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java @@ -44,6 +44,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class GeoPointScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase { @@ -71,8 +72,8 @@ protected boolean supportsRangeQueries() { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -103,7 +104,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of(new GeoPoint(45.0, 45.0), new GeoPoint(0.0, 0.0)))); + assertThat(results, containsInAnyOrder(new GeoPoint(45.0, 45.0), new GeoPoint(0.0, 0.0))); } } } @@ -117,7 +118,7 @@ public void testSort() throws IOException { public void testFetch() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef(""" + addDocument(iw, List.of(new StoredField("_source", new BytesRef(""" {"foo": {"lat": 45.0, "lon" : 45.0}}""")))); try (DirectoryReader reader = iw.getReader()) { SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -138,8 +139,8 @@ public void testFetch() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -167,8 +168,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index 5eb66e631d86f..4726424ada5f2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -49,6 +49,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; @@ -75,8 +76,8 @@ public void testFormat() throws IOException { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.2\", \"192.168.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.2\", \"192.168.1\"]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -107,7 +108,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of("192.168.0.1", "192.168.1.1", "192.168.2.1"))); + assertThat(results, containsInAnyOrder("192.168.0.1", "192.168.1.1", "192.168.2.1")); } } } @@ -115,9 +116,9 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.4\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.4\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -142,9 +143,9 @@ public void testSort() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.4\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.4\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -172,8 +173,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); @@ -184,9 +185,9 @@ public void testExistsQuery() throws IOException { @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( @@ -207,9 +208,9 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); IpScriptFieldType fieldType = build("append_param", Map.of("param", ".1"), OnScriptError.FAIL); @@ -229,10 +230,10 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 542d0088f2ad0..70e375a89d5e7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -601,15 +601,6 @@ protected boolean dedupAfterFetch() { return true; } - @Override - protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { - return "All fields that match routing_path must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + mapper.name() - + "] was not a dimension."; - } - public void testDimensionInRoutingPath() throws IOException { MapperService mapper = createMapperService(fieldMapping(b -> b.field("type", "keyword").field("time_series_dimension", true))); IndexSettings settings = createIndexSettings( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index d8903251e6c3b..6912194625bb7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -49,6 +49,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class KeywordScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase { @@ -66,8 +67,8 @@ protected ScriptFactory dummyScript() { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2, 1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2, 1]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -97,7 +98,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of("1-suffix", "1-suffix", "2-suffix"))); + assertThat(results, containsInAnyOrder("1-suffix", "1-suffix", "2-suffix")); } } } @@ -105,9 +106,9 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"a\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"d\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"a\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"d\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -123,9 +124,9 @@ public void testSort() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"a\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aaa\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aa\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"a\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aaa\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aa\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -153,8 +154,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); @@ -164,11 +165,11 @@ public void testExistsQuery() throws IOException { public void testFuzzyQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); // No edits, matches - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"caat\"]}")))); // Single insertion, matches - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cta\"]}")))); // Single transposition, matches - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"caaat\"]}")))); // Two insertions, no match - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); // Totally wrong, no match + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); // No edits, matches + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"caat\"]}")))); // Single insertion, matches + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cta\"]}")))); // Single transposition, matches + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"caaat\"]}")))); // Two insertions, no match + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); // Totally wrong, no match try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( @@ -200,9 +201,9 @@ private Query randomFuzzyQuery(MappedFieldType ft, SearchExecutionContext ctx) { public void testPrefixQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().prefixQuery("cat", null, mockContext())), equalTo(2)); @@ -225,9 +226,9 @@ private Query randomPrefixQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( @@ -268,9 +269,9 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) public void testRegexpQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( @@ -294,8 +295,8 @@ private Query randomRegexpQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); KeywordScriptFieldType fieldType = build("append_param", Map.of("param", "-suffix"), OnScriptError.FAIL); @@ -312,10 +313,10 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [3]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [3]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("1", "2"), mockContext())), equalTo(2)); @@ -330,8 +331,8 @@ protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) public void testWildcardQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aab\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aab\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().wildcardQuery("a*b", null, mockContext())), equalTo(1)); @@ -342,8 +343,8 @@ public void testWildcardQuery() throws IOException { // Normalized WildcardQueries are requested by the QueryStringQueryParser public void testNormalizedWildcardQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aab\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aab\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().normalizedWildcardQuery("a*b", null, mockContext())), equalTo(1)); @@ -365,8 +366,8 @@ private Query randomWildcardQuery(MappedFieldType ft, SearchExecutionContext ctx public void testMatchQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); KeywordScriptFieldType fieldType = build("append_param", Map.of("param", "-Suffix"), OnScriptError.FAIL); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java index debcd3c5fa911..83b3dbe858471 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java @@ -47,6 +47,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -83,8 +84,8 @@ public void testLongFromSource() throws IOException { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2, 1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2, 1]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -114,7 +115,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of(2L, 2L, 3L))); + assertThat(results, containsInAnyOrder(2L, 2L, 3L)); } } } @@ -122,9 +123,9 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); LongScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -139,9 +140,9 @@ public void testSort() throws IOException { public void testNow() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); LongScriptFieldData ifd = build("millis_ago", Map.of(), OnScriptError.FAIL).fielddataBuilder(mockFielddataContext()) @@ -164,9 +165,9 @@ public void testNow() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -194,8 +195,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); @@ -206,8 +207,8 @@ public void testExistsQuery() throws IOException { @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -228,8 +229,8 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery("1", mockContext())), equalTo(1)); @@ -251,8 +252,8 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("1"), mockContext())), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java new file mode 100644 index 0000000000000..01cbe496e6a3d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.script.ScriptCompiler; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; + +public class MultiFieldsTests extends ESTestCase { + + public void testMultiFieldsBuilderHasSyntheticSourceCompatibleKeywordField() { + var isStored = randomBoolean(); + var hasNormalizer = randomBoolean(); + + var builder = new FieldMapper.MultiFields.Builder(); + assertFalse(builder.hasSyntheticSourceCompatibleKeywordField()); + + var keywordFieldMapperBuilder = getKeywordFieldMapperBuilder(isStored, hasNormalizer); + builder.add(keywordFieldMapperBuilder); + + var expected = hasNormalizer == false; + assertEquals(expected, builder.hasSyntheticSourceCompatibleKeywordField()); + } + + public void testMultiFieldsBuilderHasSyntheticSourceCompatibleKeywordFieldDuringMerge() { + var isStored = randomBoolean(); + var hasNormalizer = randomBoolean(); + + var builder = new TextFieldMapper.Builder("text_field", createDefaultIndexAnalyzers(), false); + assertFalse(builder.multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField()); + + var keywordFieldMapperBuilder = getKeywordFieldMapperBuilder(isStored, hasNormalizer); + + var newField = new TextFieldMapper.Builder("text_field", createDefaultIndexAnalyzers(), false).addMultiField( + keywordFieldMapperBuilder + ).build(MapperBuilderContext.root(false, false)); + + builder.merge(newField, new FieldMapper.Conflicts("TextFieldMapper"), MapperMergeContext.root(false, false, Long.MAX_VALUE)); + + var expected = hasNormalizer == false; + assertEquals(expected, builder.multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField()); + } + + private KeywordFieldMapper.Builder getKeywordFieldMapperBuilder(boolean isStored, boolean hasNormalizer) { + var keywordFieldMapperBuilder = new KeywordFieldMapper.Builder( + "field", + IndexAnalyzers.of(Map.of(), Map.of("normalizer", Lucene.STANDARD_ANALYZER), Map.of()), + ScriptCompiler.NONE, + IndexVersion.current() + ); + if (isStored) { + keywordFieldMapperBuilder.stored(true); + if (randomBoolean()) { + keywordFieldMapperBuilder.docValues(false); + } + } + if (hasNormalizer) { + keywordFieldMapperBuilder.normalizer("normalizer"); + } + return keywordFieldMapperBuilder; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index 40d1f2488749a..7acb89a857772 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -346,6 +346,95 @@ public void testLongRangeQueryWithDecimalParts() { ); } + public void testHalfFloatRangeQueryWithOverflowingBounds() { + MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.HALF_FLOAT, randomBoolean()); + final float min_half_float = -65504; + final float max_half_float = 65504; + assertEquals( + ft.rangeQuery(min_half_float, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(-1e+300, 10, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(min_half_float, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(Float.NEGATIVE_INFINITY, 10, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, max_half_float, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, 1e+300, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, max_half_float, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, Float.POSITIVE_INFINITY, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, 1e+300, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, Float.POSITIVE_INFINITY, false, false, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(-1e+300, 10, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(Float.NEGATIVE_INFINITY, 10, false, false, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, 1e+300, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, max_half_float, false, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(-1e+300, 10, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(min_half_float, 10, true, false, null, null, null, MOCK_CONTEXT) + ); + } + + public void testFloatRangeQueryWithOverflowingBounds() { + MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.FLOAT, randomBoolean()); + + assertEquals( + ft.rangeQuery(-Float.MAX_VALUE, 10.0, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(-1e+300, 10.0, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(-Float.MAX_VALUE, 10.0, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(Float.NEGATIVE_INFINITY, 10.0, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, Float.MAX_VALUE, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, 1e+300, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, Float.MAX_VALUE, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, Float.POSITIVE_INFINITY, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, 1e+300, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, Float.POSITIVE_INFINITY, false, false, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(-1e+300, 10, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(Float.NEGATIVE_INFINITY, 10, false, false, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, 1e+300, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, Float.MAX_VALUE, false, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(-1e+300, 10, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(-Float.MAX_VALUE, 10, true, false, null, null, null, MOCK_CONTEXT) + ); + } + public void testRangeQuery() { MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); Query expected = new IndexOrDocValuesQuery( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index e024f2fa7b1ea..3c4aca4d36284 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -27,10 +27,10 @@ private RootObjectMapper createMapping( rootBuilder.add(new ObjectMapper.Builder("disabled", Explicit.IMPLICIT_TRUE).enabled(disabledFieldEnabled)); ObjectMapper.Builder fooBuilder = new ObjectMapper.Builder("foo", Explicit.IMPLICIT_TRUE).enabled(fooFieldEnabled); if (includeBarField) { - fooBuilder.add(new TextFieldMapper.Builder("bar", createDefaultIndexAnalyzers())); + fooBuilder.add(new TextFieldMapper.Builder("bar", createDefaultIndexAnalyzers(), false)); } if (includeBazField) { - fooBuilder.add(new TextFieldMapper.Builder("baz", createDefaultIndexAnalyzers())); + fooBuilder.add(new TextFieldMapper.Builder("baz", createDefaultIndexAnalyzers(), false)); } rootBuilder.add(fooBuilder); return rootBuilder.build(MapperBuilderContext.root(false, false)); @@ -366,7 +366,7 @@ private TextFieldMapper.Builder createTextKeywordMultiField(String name) { } private TextFieldMapper.Builder createTextKeywordMultiField(String name, String multiFieldName) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers(), false); builder.multiFieldsBuilder.add(new KeywordFieldMapper.Builder(multiFieldName, IndexVersion.current())); return builder; } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 6472f09ce1be7..74b293ca7d6d6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -530,11 +530,11 @@ public void testSyntheticSourceDocValuesFieldWithout() throws IOException { public void testNestedObjectWithMultiFieldsgetTotalFieldsCount() { ObjectMapper.Builder mapperBuilder = new ObjectMapper.Builder("parent_size_1", Explicit.IMPLICIT_TRUE).add( new ObjectMapper.Builder("child_size_2", Explicit.IMPLICIT_TRUE).add( - new TextFieldMapper.Builder("grand_child_size_3", createDefaultIndexAnalyzers()).addMultiField( + new TextFieldMapper.Builder("grand_child_size_3", createDefaultIndexAnalyzers(), false).addMultiField( new KeywordFieldMapper.Builder("multi_field_size_4", IndexVersion.current()) ) .addMultiField( - new TextFieldMapper.Builder("grand_child_size_5", createDefaultIndexAnalyzers()).addMultiField( + new TextFieldMapper.Builder("grand_child_size_5", createDefaultIndexAnalyzers(), false).addMultiField( new KeywordFieldMapper.Builder("multi_field_of_multi_field_size_6", IndexVersion.current()) ) ) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java index 3e90459746a19..7a7f1668b4636 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java @@ -391,6 +391,100 @@ public void testPassThroughObjectNested() throws IOException { assertThat(mapperService.mappingLookup().getMapper("attributes.another.dim"), instanceOf(KeywordFieldMapper.class)); } + public void testPassThroughObjectNestedWithDuplicateNames() throws IOException { + MapperService mapperService = createMapperService(mapping(b -> { + b.startObject("resource").field("type", "object"); + { + b.startObject("properties"); + { + b.startObject("attributes").field("type", "passthrough"); + { + b.startObject("properties"); + b.startObject("dim").field("type", "keyword").endObject(); + b.startObject("more.attributes.another.dimA").field("type", "keyword").endObject(); + b.startObject("more.attributes.another.dimB").field("type", "keyword").endObject(); + b.endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + b.startObject("attributes").field("type", "passthrough"); + { + b.startObject("properties"); + b.startObject("another.dim").field("type", "keyword").endObject(); + b.startObject("more.attributes.another.dimC").field("type", "keyword").endObject(); + b.startObject("more.attributes.another.dimD").field("type", "keyword").endObject(); + b.endObject(); + } + b.endObject(); + })); + + assertThat(mapperService.mappingLookup().getMapper("dim"), instanceOf(FieldAliasMapper.class)); + assertThat(mapperService.mappingLookup().getMapper("resource.attributes.dim"), instanceOf(KeywordFieldMapper.class)); + assertThat( + mapperService.mappingLookup().objectMappers().get("more.attributes.another").getMapper("dimA"), + instanceOf(FieldAliasMapper.class) + ); + assertThat( + mapperService.mappingLookup().getMapper("resource.attributes.more.attributes.another.dimA"), + instanceOf(KeywordFieldMapper.class) + ); + assertThat( + mapperService.mappingLookup().objectMappers().get("more.attributes.another").getMapper("dimB"), + instanceOf(FieldAliasMapper.class) + ); + assertThat( + mapperService.mappingLookup().getMapper("resource.attributes.more.attributes.another.dimB"), + instanceOf(KeywordFieldMapper.class) + ); + + assertThat(mapperService.mappingLookup().objectMappers().get("another").getMapper("dim"), instanceOf(FieldAliasMapper.class)); + assertThat(mapperService.mappingLookup().getMapper("attributes.another.dim"), instanceOf(KeywordFieldMapper.class)); + assertThat( + mapperService.mappingLookup().objectMappers().get("more.attributes.another").getMapper("dimC"), + instanceOf(FieldAliasMapper.class) + ); + assertThat( + mapperService.mappingLookup().getMapper("attributes.more.attributes.another.dimC"), + instanceOf(KeywordFieldMapper.class) + ); + assertThat( + mapperService.mappingLookup().objectMappers().get("more.attributes.another").getMapper("dimD"), + instanceOf(FieldAliasMapper.class) + ); + assertThat( + mapperService.mappingLookup().getMapper("attributes.more.attributes.another.dimD"), + instanceOf(KeywordFieldMapper.class) + ); + } + + public void testPassThroughObjectNestedWithConflictingNames() throws IOException { + MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(mapping(b -> { + b.startObject("resource").field("type", "object"); + { + b.startObject("properties"); + { + b.startObject("attributes").field("type", "passthrough"); + { + b.startObject("properties"); + b.startObject("dim").field("type", "keyword").endObject(); + b.startObject("resource.attributes.another.dim").field("type", "keyword").endObject(); + b.endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + }))); + assertEquals( + "Failed to parse mapping: Conflicting objects created during alias generation for pass-through field: [resource]", + e.getMessage() + ); + } + public void testAliasMappersCreatesAlias() throws Exception { var context = MapperBuilderContext.root(false, false); Map aliases = new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers( @@ -445,6 +539,7 @@ public void testAliasMappersExitsInDeepNesting() throws Exception { var context = MapperBuilderContext.root(false, false); Map aliases = new HashMap<>(); var objectIntermediates = new HashMap(1); + var objectIntermediatesFullPath = new HashMap(1); new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers( Map.of( "labels", @@ -457,8 +552,10 @@ public void testAliasMappersExitsInDeepNesting() throws Exception { Explicit.EXPLICIT_FALSE ) ), + Map.of(), aliases, objectIntermediates, + objectIntermediatesFullPath, context, 1_000_000 ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldAnalyzerModeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldAnalyzerModeTests.java index 8cb3ecef4c35c..def8841045746 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldAnalyzerModeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldAnalyzerModeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.AnalysisMode; @@ -67,6 +68,9 @@ public void testParseTextFieldCheckAnalyzerAnalysisMode() { fieldNode.put("analyzer", "my_analyzer"); MappingParserContext parserContext = mock(MappingParserContext.class); when(parserContext.indexVersionCreated()).thenReturn(IndexVersion.current()); + when(parserContext.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("index").settings(indexSettings(IndexVersion.current(), 1, 0)).build(), Settings.EMPTY) + ); // check AnalysisMode.ALL works Map analyzers = defaultAnalyzers(); @@ -102,6 +106,12 @@ public void testParseTextFieldCheckSearchAnalyzerAnalysisMode() { } MappingParserContext parserContext = mock(MappingParserContext.class); when(parserContext.indexVersionCreated()).thenReturn(IndexVersion.current()); + when(parserContext.getIndexSettings()).thenReturn( + new IndexSettings( + IndexMetadata.builder("index").settings(indexSettings(IndexVersion.current(), 1, 0)).build(), + Settings.EMPTY + ) + ); // check AnalysisMode.ALL and AnalysisMode.SEARCH_TIME works Map analyzers = defaultAnalyzers(); @@ -143,6 +153,9 @@ public void testParseTextFieldCheckAnalyzerWithSearchAnalyzerAnalysisMode() { fieldNode.put("analyzer", "my_analyzer"); MappingParserContext parserContext = mock(MappingParserContext.class); when(parserContext.indexVersionCreated()).thenReturn(IndexVersion.current()); + when(parserContext.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("index").settings(indexSettings(IndexVersion.current(), 1, 0)).build(), Settings.EMPTY) + ); // check that "analyzer" set to AnalysisMode.INDEX_TIME is blocked if there is no search analyzer AnalysisMode mode = AnalysisMode.INDEX_TIME; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index f92867d1ce461..1c5ae3baca827 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -44,9 +44,11 @@ import org.apache.lucene.tests.analysis.MockSynonymAnalyzer; import org.apache.lucene.tests.analysis.Token; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.AnalyzerScope; @@ -249,6 +251,64 @@ public void testDefaults() throws IOException { assertEquals(DocValuesType.NONE, fieldType.docValuesType()); } + public void testStoreParameterDefaults() throws IOException { + var timeSeriesIndexMode = randomBoolean(); + var isStored = randomBoolean(); + var hasKeywordFieldForSyntheticSource = randomBoolean(); + + var indexSettingsBuilder = getIndexSettingsBuilder(); + if (timeSeriesIndexMode) { + indexSettingsBuilder.put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES) + .putList(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "dimension") + .put(IndexSettings.TIME_SERIES_START_TIME.getKey(), "2000-01-08T23:40:53.384Z") + .put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2106-01-08T23:40:53.384Z"); + } + var indexSettings = indexSettingsBuilder.build(); + + var mapping = mapping(b -> { + b.startObject("field"); + b.field("type", "text"); + if (isStored) { + b.field("store", isStored); + } + if (hasKeywordFieldForSyntheticSource) { + b.startObject("fields"); + b.startObject("keyword"); + b.field("type", "keyword"); + b.endObject(); + b.endObject(); + } + b.endObject(); + + if (timeSeriesIndexMode) { + b.startObject("@timestamp"); + b.field("type", "date"); + b.endObject(); + b.startObject("dimension"); + b.field("type", "keyword"); + b.field("time_series_dimension", "true"); + b.endObject(); + } + }); + DocumentMapper mapper = createMapperService(getVersion(), indexSettings, () -> true, mapping).documentMapper(); + + var source = source(TimeSeriesRoutingHashFieldMapper.DUMMY_ENCODED_VALUE, b -> { + b.field("field", "1234"); + if (timeSeriesIndexMode) { + b.field("@timestamp", randomMillisUpToYear9999()); + b.field("dimension", "dimension1"); + } + }, null); + ParsedDocument doc = mapper.parse(source); + List fields = doc.rootDoc().getFields("field"); + IndexableFieldType fieldType = fields.get(0).fieldType(); + if (isStored || (timeSeriesIndexMode && hasKeywordFieldForSyntheticSource == false)) { + assertTrue(fieldType.stored()); + } else { + assertFalse(fieldType.stored()); + } + } + public void testBWCSerialization() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> { b.field("type", "text"); @@ -1138,7 +1198,8 @@ public SyntheticSourceExample example(int maxValues) { delegate.expectedForSyntheticSource(), delegate.expectedForBlockLoader(), b -> { - b.field("type", "text").field("store", true); + b.field("type", "text"); + b.field("store", true); if (indexText == false) { b.field("index", false); } @@ -1196,6 +1257,17 @@ public List invalidExample() throws IOException { b.endObject(); } b.endObject(); + }), + new SyntheticSourceInvalidExample(err, b -> { + b.field("type", "text"); + b.startObject("fields"); + { + b.startObject("kwd"); + b.field("type", "keyword"); + b.field("doc_values", "false"); + b.endObject(); + } + b.endObject(); }) ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java index 50abb47e51125..87b107d5bd139 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java @@ -720,38 +720,4 @@ public void testParseWithDynamicMappingInvalidRoutingHash() { }); assertThat(failure.getMessage(), equalTo("[5:1] failed to parse: Illegal base64 character 20")); } - - public void testParseWithDynamicMappingNullId() { - Settings indexSettings = Settings.builder() - .put(IndexSettings.MODE.getKey(), "time_series") - .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "dim") - .build(); - MapperService mapper = createMapperService(IndexVersion.current(), indexSettings, () -> false); - SourceToParse source = new SourceToParse(null, new BytesArray(""" - { - "@timestamp": 1609459200000, - "dim": "6a841a21", - "value": 100 - }"""), XContentType.JSON); - var failure = expectThrows(DocumentParsingException.class, () -> { - IndexShard.prepareIndex( - mapper, - source, - UNASSIGNED_SEQ_NO, - randomNonNegativeLong(), - Versions.MATCH_ANY, - VersionType.INTERNAL, - Engine.Operation.Origin.PRIMARY, - -1, - false, - UNASSIGNED_SEQ_NO, - 0, - System.nanoTime() - ); - }); - assertThat( - failure.getMessage(), - equalTo("[5:1] failed to parse: _ts_routing_hash was null but must be set because index [index] is in time_series mode") - ); - } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapperTests.java index df5ff9a8fe7e5..5352bd446a80b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapperTests.java @@ -59,6 +59,15 @@ private static ParsedDocument parseDocument(int hash, DocumentMapper docMapper, }, TimeSeriesRoutingHashFieldMapper.encode(hash))); } + private static ParsedDocument parseDocument(String id, DocumentMapper docMapper, CheckedConsumer f) + throws IOException { + // Add the @timestamp field required by DataStreamTimestampFieldMapper for all time series indices + return docMapper.parse(source(id, b -> { + f.accept(b); + b.field("@timestamp", "2021-10-01"); + }, null)); + } + private static int getRoutingHash(ParsedDocument document) { BytesRef value = document.rootDoc().getBinaryValue(TimeSeriesRoutingHashFieldMapper.NAME); return TimeSeriesRoutingHashFieldMapper.decode(Uid.decodeId(value.bytes)); @@ -76,6 +85,17 @@ public void testEnabledInTimeSeriesMode() throws Exception { assertEquals(hash, getRoutingHash(doc)); } + public void testRetrievedFromIdInTimeSeriesMode() throws Exception { + DocumentMapper docMapper = createMapper(mapping(b -> { + b.startObject("a").field("type", "keyword").field("time_series_dimension", true).endObject(); + })); + + int hash = randomInt(); + ParsedDocument doc = parseDocument(TimeSeriesRoutingHashFieldMapper.DUMMY_ENCODED_VALUE, docMapper, b -> b.field("a", "value")); + assertThat(doc.rootDoc().getField("a").binaryValue(), equalTo(new BytesRef("value"))); + assertEquals(0, getRoutingHash(doc)); + } + public void testDisabledInStandardMode() throws Exception { DocumentMapper docMapper = createMapperService( getIndexSettingsBuilder().put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()).build(), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java index e4ea78f3b7a0e..4f23c86f53cca 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java @@ -467,9 +467,9 @@ public void testMissingDimensionInRoutingPath() throws IOException { ); Exception ex = expectThrows(IllegalArgumentException.class, () -> mapper.documentMapper().validate(settings, false)); assertEquals( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [field.key3] was [flattened].", + + "without the [script] parameter. [field._keyed] was not a dimension.", ex.getMessage() ); } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index 98096a49443a9..d7a1f70333ad8 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -106,6 +106,7 @@ protected void doAssertLuceneQuery(TermsQueryBuilder queryBuilder, Query query, .or(instanceOf(ConstantScoreQuery.class)) .or(instanceOf(MatchNoDocsQuery.class)) ); + // if (true) throw new IllegalArgumentException(randomTerms.toString()); if (query instanceof ConstantScoreQuery) { assertThat(((ConstantScoreQuery) query).getQuery(), instanceOf(BooleanQuery.class)); } diff --git a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java index f8162eb987226..48432a0ff4958 100644 --- a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java +++ b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java @@ -51,7 +51,7 @@ public class BulkByScrollResponseTests extends AbstractXContentTestCase { @@ -177,7 +184,142 @@ protected BulkByScrollTask.Status createTestInstance() { @Override protected BulkByScrollTask.Status doParseInstance(XContentParser parser) throws IOException { - return BulkByScrollTask.Status.fromXContent(parser); + XContentParser.Token token; + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + token = parser.nextToken(); + } else { + token = parser.nextToken(); + } + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + return innerParseStatus(parser); + } + + private static final ConstructingObjectParser, Void> RETRIES_PARSER = new ConstructingObjectParser<>( + "bulk_by_scroll_task_status_retries", + true, + a -> new Tuple<>(((Long) a[0]), (Long) a[1]) + ); + static { + RETRIES_PARSER.declareLong(constructorArg(), new ParseField(BulkByScrollTask.Status.RETRIES_BULK_FIELD)); + RETRIES_PARSER.declareLong(constructorArg(), new ParseField(BulkByScrollTask.Status.RETRIES_SEARCH_FIELD)); + } + + public static void declareFields(ObjectParser parser) { + parser.declareInt(BulkByScrollTask.StatusBuilder::setSliceId, new ParseField(BulkByScrollTask.Status.SLICE_ID_FIELD)); + parser.declareLong(BulkByScrollTask.StatusBuilder::setTotal, new ParseField(BulkByScrollTask.Status.TOTAL_FIELD)); + parser.declareLong(BulkByScrollTask.StatusBuilder::setUpdated, new ParseField(BulkByScrollTask.Status.UPDATED_FIELD)); + parser.declareLong(BulkByScrollTask.StatusBuilder::setCreated, new ParseField(BulkByScrollTask.Status.CREATED_FIELD)); + parser.declareLong(BulkByScrollTask.StatusBuilder::setDeleted, new ParseField(BulkByScrollTask.Status.DELETED_FIELD)); + parser.declareInt(BulkByScrollTask.StatusBuilder::setBatches, new ParseField(BulkByScrollTask.Status.BATCHES_FIELD)); + parser.declareLong( + BulkByScrollTask.StatusBuilder::setVersionConflicts, + new ParseField(BulkByScrollTask.Status.VERSION_CONFLICTS_FIELD) + ); + parser.declareLong(BulkByScrollTask.StatusBuilder::setNoops, new ParseField(BulkByScrollTask.Status.NOOPS_FIELD)); + parser.declareObject( + BulkByScrollTask.StatusBuilder::setRetries, + RETRIES_PARSER, + new ParseField(BulkByScrollTask.Status.RETRIES_FIELD) + ); + parser.declareLong(BulkByScrollTask.StatusBuilder::setThrottled, new ParseField(BulkByScrollTask.Status.THROTTLED_RAW_FIELD)); + parser.declareFloat( + BulkByScrollTask.StatusBuilder::setRequestsPerSecond, + new ParseField(BulkByScrollTask.Status.REQUESTS_PER_SEC_FIELD) + ); + parser.declareString(BulkByScrollTask.StatusBuilder::setReasonCancelled, new ParseField(BulkByScrollTask.Status.CANCELED_FIELD)); + parser.declareLong( + BulkByScrollTask.StatusBuilder::setThrottledUntil, + new ParseField(BulkByScrollTask.Status.THROTTLED_UNTIL_RAW_FIELD) + ); + parser.declareObjectArray( + BulkByScrollTask.StatusBuilder::setSliceStatuses, + (p, c) -> parseStatusOrException(p), + new ParseField(BulkByScrollTask.Status.SLICES_FIELD) + ); + } + + private static Status innerParseStatus(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + String fieldName = parser.currentName(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + BulkByScrollTask.StatusBuilder builder = new BulkByScrollTask.StatusBuilder(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if (fieldName.equals(Status.RETRIES_FIELD)) { + builder.setRetries(RETRIES_PARSER.parse(parser, null)); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (fieldName.equals(Status.SLICES_FIELD)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + builder.addToSliceStatuses(parseStatusOrException(parser)); + } + } else { + parser.skipChildren(); + } + } else { // else if it is a value + switch (fieldName) { + case Status.SLICE_ID_FIELD -> builder.setSliceId(parser.intValue()); + case Status.TOTAL_FIELD -> builder.setTotal(parser.longValue()); + case Status.UPDATED_FIELD -> builder.setUpdated(parser.longValue()); + case Status.CREATED_FIELD -> builder.setCreated(parser.longValue()); + case Status.DELETED_FIELD -> builder.setDeleted(parser.longValue()); + case Status.BATCHES_FIELD -> builder.setBatches(parser.intValue()); + case Status.VERSION_CONFLICTS_FIELD -> builder.setVersionConflicts(parser.longValue()); + case Status.NOOPS_FIELD -> builder.setNoops(parser.longValue()); + case Status.THROTTLED_RAW_FIELD -> builder.setThrottled(parser.longValue()); + case Status.REQUESTS_PER_SEC_FIELD -> builder.setRequestsPerSecond(parser.floatValue()); + case Status.CANCELED_FIELD -> builder.setReasonCancelled(parser.text()); + case Status.THROTTLED_UNTIL_RAW_FIELD -> builder.setThrottledUntil(parser.longValue()); + } + } + } + return builder.buildStatus(); + } + + /** + * Since {@link BulkByScrollTask.StatusOrException} can contain either an {@link Exception} or a {@link Status} we need to peek + * at a field first before deciding what needs to be parsed since the same object could contains either. + * The {@link BulkByScrollTask.StatusOrException#EXPECTED_EXCEPTION_FIELDS} contains the fields that are expected when the serialised + * object was an instance of exception and the {@link Status#FIELDS_SET} is the set of fields expected when the + * serialized object was an instance of Status. + */ + public static BulkByScrollTask.StatusOrException parseStatusOrException(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + if (token == null) { + token = parser.nextToken(); + } + if (token == XContentParser.Token.VALUE_NULL) { + return null; + } else { + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + token = parser.nextToken(); + // This loop is present only to ignore unknown tokens. It breaks as soon as we find a field + // that is allowed. + while (token != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String fieldName = parser.currentName(); + // weird way to ignore unknown tokens + if (Status.FIELDS_SET.contains(fieldName)) { + return new BulkByScrollTask.StatusOrException(innerParseStatus(parser)); + } else if (BulkByScrollTask.StatusOrException.EXPECTED_EXCEPTION_FIELDS.contains(fieldName)) { + return new BulkByScrollTask.StatusOrException(ElasticsearchException.innerFromXContent(parser, false)); + } else { + // Ignore unknown tokens + token = parser.nextToken(); + if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + token = parser.nextToken(); + } + } + throw new XContentParseException("Unable to parse StatusFromException. Expected fields not found."); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index cade1e66c7fc7..0216bad7cf7a3 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesRoutingHashFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -44,11 +45,11 @@ import java.util.Map; import java.util.Set; import java.util.function.Function; -import java.util.function.Predicate; import static org.elasticsearch.test.LambdaMatchers.falseWith; import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; @@ -246,24 +247,24 @@ public void testGetFieldFilter() { List mapperPlugins = List.of(new MapperPlugin() { }, new MapperPlugin() { @Override - public Function> getFieldFilter() { - return index -> index.equals("hidden_index") ? field -> false : MapperPlugin.NOOP_FIELD_PREDICATE; + public Function getFieldFilter() { + return index -> index.equals("hidden_index") ? HIDDEN_INDEX : FieldPredicate.ACCEPT_ALL; } }, new MapperPlugin() { @Override - public Function> getFieldFilter() { - return index -> field -> field.equals("hidden_field") == false; + public Function getFieldFilter() { + return index -> HIDDEN_FIELD; } }, new MapperPlugin() { @Override - public Function> getFieldFilter() { - return index -> index.equals("filtered") ? field -> field.equals("visible") : MapperPlugin.NOOP_FIELD_PREDICATE; + public Function getFieldFilter() { + return index -> index.equals("filtered") ? ONLY_VISIBLE : FieldPredicate.ACCEPT_ALL; } }); IndicesModule indicesModule = new IndicesModule(mapperPlugins); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); - Function> fieldFilter = mapperRegistry.getFieldFilter(); + Function fieldFilter = mapperRegistry.getFieldFilter(); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); assertThat(fieldFilter.apply("hidden_index"), falseWith(randomAlphaOfLengthBetween(3, 5))); @@ -276,6 +277,10 @@ public Function> getFieldFilter() { assertThat(fieldFilter.apply("hidden_index"), falseWith("visible")); assertThat(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)), trueWith("visible")); assertThat(fieldFilter.apply("hidden_index"), falseWith("hidden_field")); + + assertThat(fieldFilter.apply("filtered").modifyHash("hash"), equalTo("only-visible:hide-field:hash")); + assertThat(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)).modifyHash("hash"), equalTo("hide-field:hash")); + assertThat(fieldFilter.apply("hidden_index").modifyHash("hash"), equalTo("hide-field:hidden:hash")); } public void testDefaultFieldFilterIsNoOp() { @@ -286,7 +291,7 @@ public void testDefaultFieldFilterIsNoOp() { }); } IndicesModule indicesModule = new IndicesModule(mapperPlugins); - Function> fieldFilter = indicesModule.getMapperRegistry().getFieldFilter(); + Function fieldFilter = indicesModule.getMapperRegistry().getFieldFilter(); assertSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); } @@ -294,21 +299,72 @@ public void testNoOpFieldPredicate() { List mapperPlugins = Arrays.asList(new MapperPlugin() { }, new MapperPlugin() { @Override - public Function> getFieldFilter() { - return index -> index.equals("hidden_index") ? field -> false : MapperPlugin.NOOP_FIELD_PREDICATE; + public Function getFieldFilter() { + return index -> index.equals("hidden_index") ? HIDDEN_INDEX : FieldPredicate.ACCEPT_ALL; } }, new MapperPlugin() { @Override - public Function> getFieldFilter() { - return index -> index.equals("filtered") ? field -> field.equals("visible") : MapperPlugin.NOOP_FIELD_PREDICATE; + public Function getFieldFilter() { + return index -> index.equals("filtered") ? ONLY_VISIBLE : FieldPredicate.ACCEPT_ALL; } }); IndicesModule indicesModule = new IndicesModule(mapperPlugins); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); - Function> fieldFilter = mapperRegistry.getFieldFilter(); - assertSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply(randomAlphaOfLengthBetween(3, 7))); - assertNotSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("hidden_index")); - assertNotSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("filtered")); + Function fieldFilter = mapperRegistry.getFieldFilter(); + assertSame(FieldPredicate.ACCEPT_ALL, fieldFilter.apply(randomAlphaOfLengthBetween(3, 7))); + assertNotSame(FieldPredicate.ACCEPT_ALL, fieldFilter.apply("hidden_index")); + assertNotSame(FieldPredicate.ACCEPT_ALL, fieldFilter.apply("filtered")); } + + private static final FieldPredicate HIDDEN_INDEX = new FieldPredicate() { + @Override + public boolean test(String field) { + return false; + } + + @Override + public String modifyHash(String hash) { + return "hidden:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; + + private static final FieldPredicate HIDDEN_FIELD = new FieldPredicate() { + @Override + public boolean test(String field) { + return false == field.equals("hidden_field"); + } + + @Override + public String modifyHash(String hash) { + return "hide-field:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; + + private static final FieldPredicate ONLY_VISIBLE = new FieldPredicate() { + @Override + public boolean test(String field) { + return field.equals("visible"); + } + + @Override + public String modifyHash(String hash) { + return "only-visible:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; } diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java index aa749c5dffe5f..c4e708320946f 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java @@ -31,7 +31,10 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.emptySet; @@ -589,4 +592,35 @@ public void testFileHashCodeAndEquals() { } } } + + public void testConcurrentlyAddRecoveredFromSnapshotBytes() { + var index = new RecoveryState.Index(); + int numIndices = randomIntBetween(1, 4); + for (int i = 0; i < numIndices; i++) { + index.addFileDetail("foo_" + i, randomIntBetween(1, 100), false); + } + + var executor = Executors.newFixedThreadPool(randomIntBetween(2, 8)); + try { + int count = randomIntBetween(1000, 10_000); + var latch = new CountDownLatch(count); + var recoveredBytes = new AtomicLong(); + for (int i = 0; i < count; i++) { + String indexName = "foo_" + (i % numIndices); + executor.submit(() -> { + int bytes = randomIntBetween(1, 1000); + // This is safe because the whole addRecoveredFromSnapshotBytesToFile method is synchronized + index.addRecoveredFromSnapshotBytesToFile(indexName, bytes); + // This fails because only getFileDetails is synchronized + // index.getFileDetails(indexName).addRecoveredFromSnapshotBytes(bytes); + recoveredBytes.addAndGet(bytes); + latch.countDown(); + }); + } + safeAwait(latch); + assertEquals(recoveredBytes.get(), index.recoveredFromSnapshotBytes()); + } finally { + executor.shutdownNow(); + } + } } diff --git a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatterTests.java b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatterTests.java index c1ecaf12828d3..10db924f25f4b 100644 --- a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatterTests.java @@ -21,7 +21,7 @@ public class CustomPassageFormatterTests extends ESTestCase { public void testSimpleFormat() { String content = "This is a really cool highlighter. Unified highlighter gives nice snippets back. No matches here."; - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new DefaultEncoder()); + CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new DefaultEncoder(), 3); Passage[] passages = new Passage[3]; String match = "highlighter"; @@ -62,7 +62,7 @@ public void testSimpleFormat() { public void testHtmlEncodeFormat() { String content = "This is a really cool highlighter. Unified highlighter gives nice snippets back."; - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new SimpleHTMLEncoder()); + CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new SimpleHTMLEncoder(), 3); Passage[] passages = new Passage[2]; String match = "highlighter"; diff --git a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index bf249ba4409ab..8412cc241f51a 100644 --- a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -145,7 +145,7 @@ private void assertHighlightOneDoc( UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, analyzer); builder.withBreakIterator(() -> breakIterator); builder.withFieldMatcher(name -> "text".equals(name)); - builder.withFormatter(new CustomPassageFormatter("", "", new DefaultEncoder())); + builder.withFormatter(new CustomPassageFormatter("", "", new DefaultEncoder(), 3)); CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter( builder, offsetSource, diff --git a/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java b/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java index e13c0c135c5e9..1970662782976 100644 --- a/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java @@ -10,8 +10,7 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; -import org.elasticsearch.search.suggest.term.TermSuggestion; +import org.elasticsearch.search.suggest.SuggestTests; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -61,12 +60,12 @@ public List getNamedXContentParsers() { new NamedXContentRegistry.Entry( Suggest.Suggestion.class, new ParseField("phrase_aggregation"), - (parser, context) -> PhraseSuggestion.fromXContent(parser, (String) context) + (parser, context) -> SuggestTests.parsePhraseSuggestion(parser, (String) context) ), new NamedXContentRegistry.Entry( Suggest.Suggestion.class, new ParseField("test_suggestion"), - (parser, context) -> TermSuggestion.fromXContent(parser, (String) context) + (parser, context) -> SuggestTests.parseTermSuggestion(parser, (String) context) ) ); } diff --git a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java index 8bca9481529d9..04f3b998f8375 100644 --- a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java +++ b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java @@ -48,7 +48,7 @@ public void testFromXContent() throws IOException { } builder = nestedIdentity.innerToXContent(builder, ToXContent.EMPTY_PARAMS); try (XContentParser parser = createParser(builder)) { - NestedIdentity parsedNestedIdentity = NestedIdentity.fromXContent(parser); + NestedIdentity parsedNestedIdentity = SearchResponseUtils.parseNestedIdentity(parser); assertEquals(nestedIdentity, parsedNestedIdentity); assertNull(parser.nextToken()); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 40bdc3da37242..0eefa171f7c08 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -166,7 +166,7 @@ public void testFromXContent() throws IOException { SearchHit parsed; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -201,7 +201,7 @@ public void testFromXContentLenientParsing() throws IOException { SearchHit parsed; try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -219,7 +219,7 @@ public void testFromXContentWithoutTypeAndId() throws IOException { SearchHit parsed; try (XContentParser parser = createParser(JsonXContent.jsonXContent, hit)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -341,7 +341,7 @@ public void testWeirdScriptFields() throws Exception { "result": [null] } }"""); - SearchHit searchHit = SearchHit.fromXContent(parser); + SearchHit searchHit = SearchResponseUtils.parseSearchHit(parser); Map fields = searchHit.getFields(); assertEquals(1, fields.size()); DocumentField result = fields.get("result"); @@ -360,7 +360,7 @@ public void testWeirdScriptFields() throws Exception { } }"""); - SearchHit searchHit = SearchHit.fromXContent(parser); + SearchHit searchHit = SearchResponseUtils.parseSearchHit(parser); Map fields = searchHit.getFields(); assertEquals(1, fields.size()); DocumentField result = fields.get("result"); @@ -384,7 +384,7 @@ public void testWeirdScriptFields() throws Exception { } }"""); - SearchHit searchHit = SearchHit.fromXContent(parser); + SearchHit searchHit = SearchResponseUtils.parseSearchHit(parser); Map fields = searchHit.getFields(); assertEquals(1, fields.size()); DocumentField result = fields.get("result"); @@ -410,7 +410,7 @@ public void testToXContentEmptyFields() throws IOException { final SearchHit parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -431,7 +431,7 @@ public void testToXContentEmptyFields() throws IOException { final SearchHit parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -448,7 +448,7 @@ public void testToXContentEmptyFields() throws IOException { final SearchHit parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index 4ca3c5b8dd46e..0d75358768dab 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -225,7 +225,7 @@ protected SearchHits doParseInstance(XContentParser parser) throws IOException { assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals(SearchHits.Fields.HITS, parser.currentName()); assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - SearchHits searchHits = SearchHits.fromXContent(parser); + SearchHits searchHits = SearchResponseUtils.parseSearchHits(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); try { diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/PopulateFieldLookupTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/PopulateFieldLookupTests.java new file mode 100644 index 0000000000000..f9e0fcf114cdc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/PopulateFieldLookupTests.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.fetch.subphase; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.StoredField; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperServiceTestCase; +import org.elasticsearch.search.lookup.FieldLookup; +import org.elasticsearch.search.lookup.LeafFieldLookupProvider; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.List; + +public class PopulateFieldLookupTests extends MapperServiceTestCase { + public void testPopulateFieldLookup() throws IOException { + final XContentBuilder mapping = createMapping(); + final MapperService mapperService = createMapperService(mapping); + withLuceneIndex(mapperService, iw -> { + final Document doc = new Document(); + doc.add(new StoredField("integer", 101)); + doc.add(new StoredField("keyword", new BytesRef("foobar"))); + iw.addDocument(doc); + }, reader -> { + final StoredFields storedFields = reader.storedFields(); + final Document document = storedFields.document(0); + final List documentFields = document.getFields().stream().map(IndexableField::name).toList(); + assertThat(documentFields, Matchers.containsInAnyOrder("integer", "keyword")); + + final IndexSearcher searcher = newSearcher(reader); + final LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0); + final LeafFieldLookupProvider provider = LeafFieldLookupProvider.fromStoredFields().apply(readerContext); + final FieldLookup integerFieldLookup = new FieldLookup(mapperService.fieldType("integer")); + final FieldLookup keywordFieldLookup = new FieldLookup(mapperService.fieldType("keyword")); + provider.populateFieldLookup(integerFieldLookup, 0); + provider.populateFieldLookup(keywordFieldLookup, 0); + assertEquals(List.of(101), integerFieldLookup.getValues()); + assertEquals(List.of("foobar"), keywordFieldLookup.getValues()); + }); + } + + private static XContentBuilder createMapping() throws IOException { + final XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc"); + { + mapping.startObject("properties"); + { + mapping.startObject("integer"); + { + mapping.field("type", "integer").field("store", "true"); + } + mapping.endObject(); + mapping.startObject("keyword"); + { + mapping.field("type", "keyword").field("store", "true"); + } + mapping.endObject(); + } + mapping.endObject(); + + } + return mapping.endObject().endObject(); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index efa2456e31af5..4e4f5c9c0ddfa 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -317,7 +317,11 @@ public void testBuildSearchContextHighlight() throws IOException { ) { @Override public MappedFieldType getFieldType(String name) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder( + name, + createDefaultIndexAnalyzers(), + idxSettings.getMode().isSyntheticSourceEnabled() + ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } }; @@ -610,8 +614,8 @@ public static HighlightBuilder randomHighlighterBuilder() { private static void setRandomCommonOptions(AbstractHighlighterBuilder highlightBuilder) { if (randomBoolean()) { // need to set this together, otherwise parsing will complain - highlightBuilder.preTags(randomStringArray(0, 3)); - highlightBuilder.postTags(randomStringArray(0, 3)); + highlightBuilder.preTags(randomStringArray(1, 3)); + highlightBuilder.postTags(randomStringArray(1, 3)); } if (randomBoolean()) { highlightBuilder.fragmentSize(randomIntBetween(0, 100)); diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResultTests.java index c89edb29b5058..4855a043c565a 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResultTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.profile; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.search.profile.query.QueryProfileShardResultTests; @@ -48,7 +49,7 @@ protected Reader instanceReader() { @Override protected SearchProfileDfsPhaseResult doParseInstance(XContentParser parser) throws IOException { - return SearchProfileDfsPhaseResult.fromXContent(parser); + return SearchResponseUtils.parseProfileDfsPhaseResult(parser); } public void testCombineQueryProfileShardResults() { diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java index f02114a48991c..bda74e75de88c 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -114,7 +115,7 @@ protected SearchProfileResults doParseInstance(XContentParser parser) throws IOE ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureFieldName(parser, parser.nextToken(), SearchProfileResults.PROFILE_FIELD); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - SearchProfileResults result = SearchProfileResults.fromXContent(parser); + SearchProfileResults result = SearchResponseUtils.parseSearchProfileResults(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); return result; diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java index f28425172ead5..56520c0c6d033 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.profile.query; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.ProfileResultTests; import org.elasticsearch.test.AbstractXContentSerializingTestCase; @@ -51,7 +52,7 @@ protected QueryProfileShardResult mutateInstance(QueryProfileShardResult instanc @Override protected QueryProfileShardResult doParseInstance(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - QueryProfileShardResult result = QueryProfileShardResult.fromXContent(parser); + QueryProfileShardResult result = SearchResponseUtils.parseQueryProfileShardResult(parser); ensureExpectedToken(null, parser.nextToken(), parser); return result; } diff --git a/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java index c7967f0de5411..c63e2499147c1 100644 --- a/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java @@ -118,6 +118,7 @@ public void testHitCountFromWeightDoesNotEarlyTerminate() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106647") public void testCollectedHitCount() throws Exception { Query query = new NonCountingTermQuery(new Term("string", "a1")); int threshold = randomIntBetween(1, 10000); diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 0ade522ae1ffa..7113117a4d7fa 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -160,7 +160,11 @@ public void testBuildRescoreSearchContext() throws ElasticsearchParseException, ) { @Override public MappedFieldType getFieldType(String name) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder( + name, + createDefaultIndexAnalyzers(), + idxSettings.getMode().isSyntheticSourceEnabled() + ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } }; @@ -222,7 +226,11 @@ public void testRewritingKeepsSettings() throws IOException { ) { @Override public MappedFieldType getFieldType(String name) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder( + name, + createDefaultIndexAnalyzers(), + idxSettings.getMode().isSyntheticSourceEnabled() + ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } }; diff --git a/server/src/test/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQueryTests.java b/server/src/test/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQueryTests.java index b6530e3c1c6bd..e6c34bb89fd8b 100644 --- a/server/src/test/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQueryTests.java @@ -10,13 +10,17 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LogDocMergePolicy; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.geo.GeoTestUtil; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -79,9 +83,13 @@ protected GeoPointScriptFieldDistanceFeatureQuery mutate(GeoPointScriptFieldDist @Override public void testMatches() throws IOException { - try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + IndexWriterConfig config = LuceneTestCase.newIndexWriterConfig(random(), new MockAnalyzer(random())); + // Use LogDocMergePolicy to avoid randomization issues with the doc retrieval order. + config.setMergePolicy(new LogDocMergePolicy()); + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory, config)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"location\": [34, 6]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"location\": [-3.56, -45.98]}")))); + try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchLookup searchLookup = new SearchLookup(null, null, SourceProvider.fromStoredFields()); diff --git a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldFuzzyQueryTests.java b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldFuzzyQueryTests.java index 86486cac893cf..3ded47b6d2671 100644 --- a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldFuzzyQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldFuzzyQueryTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.script.Script; @@ -68,18 +69,19 @@ protected StringScriptFieldFuzzyQuery mutate(StringScriptFieldFuzzyQuery orig) { @Override public void testMatches() { StringScriptFieldFuzzyQuery query = StringScriptFieldFuzzyQuery.build(randomScript(), leafFactory, "test", "foo", 1, 0, false); - assertTrue(query.matches(List.of("foo"))); - assertTrue(query.matches(List.of("foa"))); - assertTrue(query.matches(List.of("foo", "bar"))); - assertFalse(query.matches(List.of("bar"))); + BytesRefBuilder scratch = new BytesRefBuilder(); + assertTrue(query.matches(List.of("foo"), scratch)); + assertTrue(query.matches(List.of("foa"), scratch)); + assertTrue(query.matches(List.of("foo", "bar"), scratch)); + assertFalse(query.matches(List.of("bar"), scratch)); query = StringScriptFieldFuzzyQuery.build(randomScript(), leafFactory, "test", "foo", 0, 0, false); - assertTrue(query.matches(List.of("foo"))); - assertFalse(query.matches(List.of("foa"))); + assertTrue(query.matches(List.of("foo"), scratch)); + assertFalse(query.matches(List.of("foa"), scratch)); query = StringScriptFieldFuzzyQuery.build(randomScript(), leafFactory, "test", "foo", 2, 0, false); - assertTrue(query.matches(List.of("foo"))); - assertTrue(query.matches(List.of("foa"))); - assertTrue(query.matches(List.of("faa"))); - assertFalse(query.matches(List.of("faaa"))); + assertTrue(query.matches(List.of("foo"), scratch)); + assertTrue(query.matches(List.of("foa"), scratch)); + assertTrue(query.matches(List.of("faa"), scratch)); + assertFalse(query.matches(List.of("faaa"), scratch)); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQueryTests.java b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQueryTests.java index 50c6786de1282..46f841c344e5f 100644 --- a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQueryTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; @@ -84,13 +85,14 @@ public void testMatches() { 0, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ); - assertTrue(query.matches(List.of("astuffb"))); - assertFalse(query.matches(List.of("astuffB"))); - assertFalse(query.matches(List.of("fffff"))); - assertFalse(query.matches(List.of("ab"))); - assertFalse(query.matches(List.of("aasdf"))); - assertFalse(query.matches(List.of("dsfb"))); - assertTrue(query.matches(List.of("astuffb", "fffff"))); + BytesRefBuilder scratch = new BytesRefBuilder(); + assertTrue(query.matches(List.of("astuffb"), scratch)); + assertFalse(query.matches(List.of("astuffB"), scratch)); + assertFalse(query.matches(List.of("fffff"), scratch)); + assertFalse(query.matches(List.of("ab"), scratch)); + assertFalse(query.matches(List.of("aasdf"), scratch)); + assertFalse(query.matches(List.of("dsfb"), scratch)); + assertTrue(query.matches(List.of("astuffb", "fffff"), scratch)); StringScriptFieldRegexpQuery ciQuery = new StringScriptFieldRegexpQuery( randomScript(), @@ -101,9 +103,8 @@ public void testMatches() { RegExp.ASCII_CASE_INSENSITIVE, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ); - assertTrue(ciQuery.matches(List.of("astuffB"))); - assertTrue(ciQuery.matches(List.of("Astuffb", "fffff"))); - + assertTrue(ciQuery.matches(List.of("astuffB"), scratch)); + assertTrue(ciQuery.matches(List.of("Astuffb", "fffff"), scratch)); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQueryTests.java b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQueryTests.java index 37e24553f9fce..f6cd59f4254ad 100644 --- a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQueryTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.script.Script; @@ -52,18 +53,19 @@ protected StringScriptFieldWildcardQuery mutate(StringScriptFieldWildcardQuery o @Override public void testMatches() { StringScriptFieldWildcardQuery query = new StringScriptFieldWildcardQuery(randomScript(), leafFactory, "test", "a*b", false); - assertTrue(query.matches(List.of("astuffb"))); - assertFalse(query.matches(List.of("Astuffb"))); - assertFalse(query.matches(List.of("fffff"))); - assertFalse(query.matches(List.of("a"))); - assertFalse(query.matches(List.of("b"))); - assertFalse(query.matches(List.of("aasdf"))); - assertFalse(query.matches(List.of("dsfb"))); - assertTrue(query.matches(List.of("astuffb", "fffff"))); + BytesRefBuilder scratch = new BytesRefBuilder(); + assertTrue(query.matches(List.of("astuffb"), scratch)); + assertFalse(query.matches(List.of("Astuffb"), scratch)); + assertFalse(query.matches(List.of("fffff"), scratch)); + assertFalse(query.matches(List.of("a"), scratch)); + assertFalse(query.matches(List.of("b"), scratch)); + assertFalse(query.matches(List.of("aasdf"), scratch)); + assertFalse(query.matches(List.of("dsfb"), scratch)); + assertTrue(query.matches(List.of("astuffb", "fffff"), scratch)); StringScriptFieldWildcardQuery ciQuery = new StringScriptFieldWildcardQuery(randomScript(), leafFactory, "test", "a*b", true); - assertTrue(ciQuery.matches(List.of("Astuffb"))); - assertTrue(ciQuery.matches(List.of("astuffB", "fffff"))); + assertTrue(ciQuery.matches(List.of("Astuffb"), scratch)); + assertTrue(ciQuery.matches(List.of("astuffB", "fffff"), scratch)); } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java index 42fe65c8d14ef..0c6721e1f62e5 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java @@ -12,9 +12,11 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitTests; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -28,11 +30,75 @@ import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class CompletionSuggestionOptionTests extends ESTestCase { + private static final ObjectParser, Void> PARSER = new ObjectParser<>( + "CompletionOptionParser", + SearchResponseUtils.unknownMetaFieldConsumer, + HashMap::new + ); + + static { + SearchResponseUtils.declareInnerHitsParseFields(PARSER); + PARSER.declareString( + (map, value) -> map.put(Suggest.Suggestion.Entry.Option.TEXT.getPreferredName(), value), + Suggest.Suggestion.Entry.Option.TEXT + ); + PARSER.declareFloat( + (map, value) -> map.put(Suggest.Suggestion.Entry.Option.SCORE.getPreferredName(), value), + Suggest.Suggestion.Entry.Option.SCORE + ); + PARSER.declareObject( + (map, value) -> map.put(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName(), value), + (p, c) -> parseContexts(p), + CompletionSuggestion.Entry.Option.CONTEXTS + ); + } + + private static Map> parseContexts(XContentParser parser) throws IOException { + Map> contexts = new HashMap<>(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + String key = parser.currentName(); + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); + Set values = new HashSet<>(); + while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { + ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser); + values.add(parser.text()); + } + contexts.put(key, values); + } + return contexts; + } + + public static Option parseOption(XContentParser parser) { + Map values = PARSER.apply(parser, null); + + Text text = new Text((String) values.get(Suggest.Suggestion.Entry.Option.TEXT.getPreferredName())); + Float score = (Float) values.get(Suggest.Suggestion.Entry.Option.SCORE.getPreferredName()); + @SuppressWarnings("unchecked") + Map> contexts = (Map>) values.get( + CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName() + ); + if (contexts == null) { + contexts = Collections.emptyMap(); + } + + SearchHit hit = null; + // the option either prints SCORE or inlines the search hit + if (score == null) { + hit = SearchResponseUtils.searchHitFromMap(values); + score = hit.getScore(); + } + CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option(-1, text, score, contexts); + option.setHit(hit); + return option; + } + public static Option createTestItem() { Text text = new Text(randomAlphaOfLengthBetween(5, 15)); int docId = randomInt(); @@ -91,7 +157,7 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { } Option parsed; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - parsed = Option.fromXContent(parser); + parsed = parseOption(parser); assertNull(parser.nextToken()); } assertEquals(option.getText(), parsed.getText()); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java index d209f15a641f5..8c850a5be42ac 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java @@ -17,8 +17,10 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; @@ -28,6 +30,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -60,26 +63,76 @@ public class SuggestTests extends ESTestCase { new NamedXContentRegistry.Entry( Suggest.Suggestion.class, new ParseField("term"), - (parser, context) -> TermSuggestion.fromXContent(parser, (String) context) + (parser, context) -> parseTermSuggestion(parser, (String) context) ) ); namedXContents.add( new NamedXContentRegistry.Entry( Suggest.Suggestion.class, new ParseField("phrase"), - (parser, context) -> PhraseSuggestion.fromXContent(parser, (String) context) - ) - ); - namedXContents.add( - new NamedXContentRegistry.Entry( - Suggest.Suggestion.class, - new ParseField("completion"), - (parser, context) -> CompletionSuggestion.fromXContent(parser, (String) context) + (parser, context) -> parsePhraseSuggestion(parser, (String) context) ) ); + namedXContents.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField("completion"), (parser, context) -> { + CompletionSuggestion suggestion = new CompletionSuggestion((String) context, -1, false); + parseEntries(parser, suggestion, SuggestionEntryTests::parseCompletionSuggestionEntry); + return suggestion; + })); xContentRegistry = new NamedXContentRegistry(namedXContents); } + public static PhraseSuggestion parsePhraseSuggestion(XContentParser parser, String name) throws IOException { + PhraseSuggestion suggestion = new PhraseSuggestion(name, -1); + parseEntries(parser, suggestion, SuggestionEntryTests::parsePhraseSuggestionEntry); + return suggestion; + } + + private static > void parseEntries( + XContentParser parser, + Suggestion suggestion, + CheckedFunction entryParser + ) throws IOException { + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { + suggestion.addTerm(entryParser.apply(parser)); + } + } + + static void declareCommonEntryParserFields(ObjectParser, Void> parser) { + parser.declareString((entry, text) -> entry.text = new Text(text), new ParseField(Suggestion.Entry.TEXT)); + parser.declareInt((entry, offset) -> entry.offset = offset, new ParseField(Suggestion.Entry.OFFSET)); + parser.declareInt((entry, length) -> entry.length = length, new ParseField(Suggestion.Entry.LENGTH)); + } + + public static TermSuggestion parseTermSuggestion(XContentParser parser, String name) throws IOException { + // the "size" parameter and the SortBy for TermSuggestion cannot be parsed from the response, use default values + TermSuggestion suggestion = new TermSuggestion(name, -1, SortBy.SCORE); + parseEntries(parser, suggestion, SuggestTests::parseTermSuggestionEntry); + return suggestion; + } + + private static final ObjectParser PARSER = new ObjectParser<>( + "TermSuggestionEntryParser", + true, + TermSuggestion.Entry::new + ); + static { + declareCommonEntryParserFields(PARSER); + /* + * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. + * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 + */ + PARSER.declareObjectArray( + (e, o) -> e.addOptions(o), + (p, c) -> TermSuggestionOptionTests.parseEntryOption(p), + new ParseField(Suggest.Suggestion.Entry.OPTIONS) + ); + } + + public static TermSuggestion.Entry parseTermSuggestionEntry(XContentParser parser) { + return PARSER.apply(parser, null); + } + public static List getDefaultNamedXContents() { return namedXContents; } @@ -113,7 +166,7 @@ public void testFromXContent() throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureFieldName(parser, parser.nextToken(), Suggest.NAME); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - parsed = Suggest.fromXContent(parser); + parsed = SearchResponseUtils.parseSuggest(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); assertNull(parser.nextToken()); @@ -210,7 +263,7 @@ public void testParsingExceptionOnUnknownSuggestion() throws IOException { BytesReference originalBytes = BytesReference.bytes(builder); try (XContentParser parser = createParser(builder.contentType().xContent(), originalBytes)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - ParsingException ex = expectThrows(ParsingException.class, () -> Suggest.fromXContent(parser)); + ParsingException ex = expectThrows(ParsingException.class, () -> SearchResponseUtils.parseSuggest(parser)); assertEquals("Could not parse suggestion keyed as [unknownSuggestion]", ex.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java index fc1589066a9fc..d437f5fdc5fe6 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; import org.elasticsearch.search.suggest.term.TermSuggestion; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -39,9 +41,53 @@ public class SuggestionEntryTests extends ESTestCase { private static final Map>, Function>> ENTRY_PARSERS = new HashMap<>(); static { - ENTRY_PARSERS.put(TermSuggestion.Entry.class, TermSuggestion.Entry::fromXContent); - ENTRY_PARSERS.put(PhraseSuggestion.Entry.class, PhraseSuggestion.Entry::fromXContent); - ENTRY_PARSERS.put(CompletionSuggestion.Entry.class, CompletionSuggestion.Entry::fromXContent); + ENTRY_PARSERS.put(TermSuggestion.Entry.class, SuggestTests::parseTermSuggestionEntry); + ENTRY_PARSERS.put(PhraseSuggestion.Entry.class, SuggestionEntryTests::parsePhraseSuggestionEntry); + ENTRY_PARSERS.put(CompletionSuggestion.Entry.class, SuggestionEntryTests::parseCompletionSuggestionEntry); + } + + private static final ObjectParser PHRASE_SUGGESTION_ENTRY_PARSER = new ObjectParser<>( + "PhraseSuggestionEntryParser", + true, + PhraseSuggestion.Entry::new + ); + static { + SuggestTests.declareCommonEntryParserFields(PHRASE_SUGGESTION_ENTRY_PARSER); + /* + * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. + * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 + */ + PHRASE_SUGGESTION_ENTRY_PARSER.declareObjectArray( + (e, o) -> e.addOptions(o), + (p, c) -> SuggestionOptionTests.parsePhraseSuggestionOption(p), + new ParseField(Entry.OPTIONS) + ); + } + + public static PhraseSuggestion.Entry parsePhraseSuggestionEntry(XContentParser parser) { + return PHRASE_SUGGESTION_ENTRY_PARSER.apply(parser, null); + } + + private static final ObjectParser COMPLETION_SUGGESTION_ENTRY_PARSER = new ObjectParser<>( + "CompletionSuggestionEntryParser", + true, + CompletionSuggestion.Entry::new + ); + static { + SuggestTests.declareCommonEntryParserFields(COMPLETION_SUGGESTION_ENTRY_PARSER); + /* + * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. + * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 + */ + COMPLETION_SUGGESTION_ENTRY_PARSER.declareObjectArray( + (e, o) -> e.addOptions(o), + (p, c) -> CompletionSuggestionOptionTests.parseOption(p), + new ParseField(Entry.OPTIONS) + ); + } + + public static CompletionSuggestion.Entry parseCompletionSuggestionEntry(XContentParser parser) { + return COMPLETION_SUGGESTION_ENTRY_PARSER.apply(parser, null); } /** diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java index 0261ab623ee8f..d818f05e75e12 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -21,11 +22,38 @@ import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option.COLLATE_MATCH; +import static org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option.HIGHLIGHTED; +import static org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option.SCORE; +import static org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option.TEXT; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public class SuggestionOptionTests extends ESTestCase { + private static final ConstructingObjectParser PHRASE_OPTION_PARSER = + new ConstructingObjectParser<>("PhraseOptionParser", true, args -> { + Text text = new Text((String) args[0]); + float score = (Float) args[1]; + String highlighted = (String) args[2]; + Text highlightedText = highlighted == null ? null : new Text(highlighted); + Boolean collateMatch = (Boolean) args[3]; + return new PhraseSuggestion.Entry.Option(text, highlightedText, score, collateMatch); + }); + + static { + PHRASE_OPTION_PARSER.declareString(constructorArg(), TEXT); + PHRASE_OPTION_PARSER.declareFloat(constructorArg(), SCORE); + PHRASE_OPTION_PARSER.declareString(optionalConstructorArg(), HIGHLIGHTED); + PHRASE_OPTION_PARSER.declareBoolean(optionalConstructorArg(), COLLATE_MATCH); + } + + public static PhraseSuggestion.Entry.Option parsePhraseSuggestionOption(XContentParser parser) { + return PHRASE_OPTION_PARSER.apply(parser, null); + } + public static Option createTestItem() { Text text = new Text(randomAlphaOfLengthBetween(5, 15)); float score = randomFloat(); @@ -56,7 +84,7 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { Option parsed; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - parsed = PhraseSuggestion.Entry.Option.fromXContent(parser); + parsed = parsePhraseSuggestionOption(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java index 0e92a0ee37682..f9df18fd0944a 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; @@ -130,7 +131,7 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - parsed = Suggestion.fromXContent(parser); + parsed = SearchResponseUtils.parseSuggestion(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); assertNull(parser.nextToken()); } @@ -153,7 +154,7 @@ public void testFromXContentWithoutTypeParam() throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - assertNull(Suggestion.fromXContent(parser)); + assertNull(SearchResponseUtils.parseSuggestion(parser)); ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); } } @@ -187,7 +188,10 @@ public void testUnknownSuggestionTypeThrows() throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - NamedObjectNotFoundException e = expectThrows(NamedObjectNotFoundException.class, () -> Suggestion.fromXContent(parser)); + NamedObjectNotFoundException e = expectThrows( + NamedObjectNotFoundException.class, + () -> SearchResponseUtils.parseSuggestion(parser) + ); assertEquals("[1:31] unknown field [unknownType]", e.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java index aa5e9fcc3edbf..f13267e60b36f 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java @@ -10,8 +10,10 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; +import org.elasticsearch.search.suggest.term.TermSuggestion; import org.elasticsearch.search.suggest.term.TermSuggestion.Entry.Option; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -22,6 +24,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; public class TermSuggestionOptionTests extends ESTestCase { @@ -54,7 +57,7 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { Option parsed; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - parsed = Option.fromXContent(parser); + parsed = parseEntryOption(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -71,4 +74,24 @@ public void testToXContent() throws IOException { {"text":"someText","score":1.3,"freq":100}""", xContent.utf8ToString()); } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "TermSuggestionOptionParser", + true, + args -> { + Text text = new Text((String) args[0]); + int freq = (Integer) args[1]; + float score = (Float) args[2]; + return new Option(text, freq, score); + } + ); + + static { + PARSER.declareString(constructorArg(), Suggest.Suggestion.Entry.Option.TEXT); + PARSER.declareInt(constructorArg(), TermSuggestion.Entry.Option.FREQ); + PARSER.declareFloat(constructorArg(), Suggest.Suggestion.Entry.Option.SCORE); + } + + public static Option parseEntryOption(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java b/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java index f47aaee5ff145..169379441aadd 100644 --- a/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java @@ -12,11 +12,15 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -31,12 +35,46 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class ListTasksResponseTests extends AbstractXContentTestCase { + private static ConstructingObjectParser setupParser( + String name, + TriFunction, List, List, T> ctor + ) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(name, true, constructingObjects -> { + int i = 0; + @SuppressWarnings("unchecked") + List tasks = (List) constructingObjects[i++]; + @SuppressWarnings("unchecked") + List tasksFailures = (List) constructingObjects[i++]; + @SuppressWarnings("unchecked") + List nodeFailures = (List) constructingObjects[i]; + return ctor.apply(tasks, tasksFailures, nodeFailures); + }); + parser.declareObjectArray(optionalConstructorArg(), TaskInfo.PARSER, new ParseField(ListTasksResponse.TASKS)); + parser.declareObjectArray( + optionalConstructorArg(), + (p, c) -> TaskOperationFailure.fromXContent(p), + new ParseField(BaseTasksResponse.TASK_FAILURES) + ); + parser.declareObjectArray( + optionalConstructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField(BaseTasksResponse.NODE_FAILURES) + ); + return parser; + } + + private static final ConstructingObjectParser PARSER = setupParser( + "list_tasks_response", + ListTasksResponse::new + ); + // ListTasksResponse doesn't directly implement ToXContent because it has multiple XContent representations, so we must wrap here public record ListTasksResponseWrapper(ListTasksResponse in) implements ToXContentObject { @Override @@ -108,7 +146,7 @@ private static List randomTasks() { @Override protected ListTasksResponseWrapper doParseInstance(XContentParser parser) { - return new ListTasksResponseWrapper(ListTasksResponse.fromXContent(parser)); + return new ListTasksResponseWrapper(PARSER.apply(parser, null)); } @Override diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java index baa03ddf1abcd..66d3dd7a829eb 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java @@ -11,10 +11,14 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.TaskExecutionTimeTrackingEsThreadPoolExecutor; import org.elasticsearch.core.TimeValue; @@ -25,7 +29,10 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedTransferQueue; import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import static org.elasticsearch.common.util.concurrent.EsExecutors.TaskTrackingConfig.DEFAULT; +import static org.elasticsearch.common.util.concurrent.EsExecutors.TaskTrackingConfig.DO_NOT_TRACK; import static org.elasticsearch.threadpool.ThreadPool.ESTIMATED_TIME_INTERVAL_SETTING; import static org.elasticsearch.threadpool.ThreadPool.LATE_TIME_INTERVAL_WARN_THRESHOLD_SETTING; import static org.elasticsearch.threadpool.ThreadPool.assertCurrentMethodIsNotCalledRecursively; @@ -405,4 +412,148 @@ public void testSearchWorkedThreadPool() { assertTrue(terminate(threadPool)); } } + + public void testScheduledOneShotRejection() { + final var name = "fixed-bounded"; + final var threadPool = new TestThreadPool( + getTestName(), + new FixedExecutorBuilder(Settings.EMPTY, name, between(1, 5), between(1, 5), randomFrom(DEFAULT, DO_NOT_TRACK)) + ); + + final var future = new PlainActionFuture(); + final var latch = new CountDownLatch(1); + try { + blockExecution(threadPool.executor(name), latch); + threadPool.schedule( + ActionRunnable.run(future, () -> fail("should not execute")), + TimeValue.timeValueMillis(between(1, 100)), + threadPool.executor(name) + ); + + expectThrows(EsRejectedExecutionException.class, () -> FutureUtils.get(future, 10, TimeUnit.SECONDS)); + } finally { + latch.countDown(); + assertTrue(terminate(threadPool)); + } + } + + public void testScheduledOneShotForceExecution() { + final var name = "fixed-bounded"; + final var threadPool = new TestThreadPool( + getTestName(), + new FixedExecutorBuilder(Settings.EMPTY, name, between(1, 5), between(1, 5), randomFrom(DEFAULT, DO_NOT_TRACK)) + ); + + final var future = new PlainActionFuture(); + final var latch = new CountDownLatch(1); + try { + blockExecution(threadPool.executor(name), latch); + threadPool.schedule( + forceExecution(ActionRunnable.run(future, () -> {})), + TimeValue.timeValueMillis(between(1, 100)), + threadPool.executor(name) + ); + + Thread.yield(); + assertFalse(future.isDone()); + + latch.countDown(); + FutureUtils.get(future, 10, TimeUnit.SECONDS); // shouldn't throw + } finally { + latch.countDown(); + assertTrue(terminate(threadPool)); + } + } + + public void testScheduledFixedDelayRejection() { + final var name = "fixed-bounded"; + final var threadPool = new TestThreadPool( + getTestName(), + new FixedExecutorBuilder(Settings.EMPTY, name, between(1, 5), between(1, 5), randomFrom(DEFAULT, DO_NOT_TRACK)) + ); + + final var future = new PlainActionFuture(); + final var latch = new CountDownLatch(1); + try { + blockExecution(threadPool.executor(name), latch); + threadPool.scheduleWithFixedDelay( + ActionRunnable.wrap(future, ignored -> fail("should not execute")), + TimeValue.timeValueMillis(between(1, 100)), + threadPool.executor(name) + ); + + expectThrows(EsRejectedExecutionException.class, () -> FutureUtils.get(future, 10, TimeUnit.SECONDS)); + } finally { + latch.countDown(); + assertTrue(terminate(threadPool)); + } + } + + public void testScheduledFixedDelayForceExecution() { + final var name = "fixed-bounded"; + final var threadPool = new TestThreadPool( + getTestName(), + new FixedExecutorBuilder(Settings.EMPTY, name, between(1, 5), between(1, 5), randomFrom(DEFAULT, DO_NOT_TRACK)) + ); + + final var future = new PlainActionFuture(); + final var latch = new CountDownLatch(1); + try { + blockExecution(threadPool.executor(name), latch); + + threadPool.scheduleWithFixedDelay( + forceExecution(ActionRunnable.run(future, Thread::yield)), + TimeValue.timeValueMillis(between(1, 100)), + threadPool.executor(name) + ); + + assertFalse(future.isDone()); + + latch.countDown(); + FutureUtils.get(future, 10, TimeUnit.SECONDS); // shouldn't throw + } finally { + latch.countDown(); + assertTrue(terminate(threadPool)); + } + } + + private static AbstractRunnable forceExecution(AbstractRunnable delegate) { + return new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + delegate.onFailure(e); + } + + @Override + protected void doRun() { + delegate.run(); + } + + @Override + public void onRejection(Exception e) { + delegate.onRejection(e); + } + + @Override + public void onAfter() { + delegate.onAfter(); + } + + @Override + public boolean isForceExecution() { + return true; + } + }; + } + + private static void blockExecution(ExecutorService executor, CountDownLatch latch) { + while (true) { + try { + executor.execute(() -> safeAwait(latch)); + } catch (EsRejectedExecutionException e) { + break; + } + } + } + } diff --git a/settings.gradle b/settings.gradle index 97cce0a476d99..48e3794c9005d 100644 --- a/settings.gradle +++ b/settings.gradle @@ -90,8 +90,7 @@ List projects = [ 'test:framework', 'test:fixtures:azure-fixture', 'test:fixtures:gcs-fixture', - 'test:fixtures:hdfs2-fixture', - 'test:fixtures:hdfs3-fixture', + 'test:fixtures:hdfs-fixture', 'test:fixtures:krb5kdc-fixture', 'test:fixtures:minio-fixture', 'test:fixtures:old-elasticsearch', diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java new file mode 100644 index 0000000000000..fbc191a12d8b0 --- /dev/null +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.xpack.esql.heap_attack; + +import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; + +public class Clusters { + static ElasticsearchCluster buildCluster() { + var spec = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .nodes(2) + .module("test-esql-heap-attack") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial"); + String javaVersion = JvmInfo.jvmInfo().version(); + if (javaVersion.equals("20") || javaVersion.equals("21")) { + // see https://github.com/elastic/elasticsearch/issues/99592 + spec.jvmArg("-XX:+UnlockDiagnosticVMOptions -XX:+G1UsePreventiveGC"); + } + return spec.build(); + } +} diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 8c87ef5977114..2f3826f8423b8 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -21,10 +21,8 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.test.ListMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.TestThreadPool; @@ -60,25 +58,10 @@ public class HeapAttackIT extends ESRestTestCase { @ClassRule - public static ElasticsearchCluster cluster = buildCluster(); + public static ElasticsearchCluster cluster = Clusters.buildCluster(); static volatile boolean SUITE_ABORTED = false; - static ElasticsearchCluster buildCluster() { - var spec = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .nodes(2) - .module("test-esql-heap-attack") - .setting("xpack.security.enabled", "false") - .setting("xpack.license.self_generated.type", "trial"); - String javaVersion = JvmInfo.jvmInfo().version(); - if (javaVersion.equals("20") || javaVersion.equals("21")) { - // see https://github.com/elastic/elasticsearch/issues/99592 - spec.jvmArg("-XX:+UnlockDiagnosticVMOptions -XX:+G1UsePreventiveGC"); - } - return spec.build(); - } - @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); diff --git a/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java index d0a146edde765..f23646df081c1 100644 --- a/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java +++ b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java @@ -20,12 +20,15 @@ import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import java.util.ArrayList; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +@ServerlessScope(Scope.PUBLIC) public class RestTriggerOutOfMemoryAction extends BaseRestHandler { private static final Logger LOGGER = LogManager.getLogger(RestTriggerOutOfMemoryAction.class); diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle new file mode 100644 index 0000000000000..bd4acf4e51505 --- /dev/null +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +apply plugin: 'elasticsearch.java' +apply plugin: 'com.github.johnrengelman.shadow' + +import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar + +configurations { +// all { +// transitive = true +// } + hdfs2 + hdfs3 + consumable("shadowedHdfs2") +} + +dependencies { + compileOnly("org.apache.hadoop:hadoop-minicluster:2.8.5") + api("com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}") { + transitive false + } + compileOnly "junit:junit:${versions.junit}" + hdfs2 "org.apache.hadoop:hadoop-minicluster:2.8.5" + hdfs3 "org.apache.hadoop:hadoop-minicluster:3.3.1" + +} + +tasks.named("shadowJar").configure { + archiveClassifier.set("hdfs3") + // fix issues with signed jars + + relocate("org.apache.hadoop", "fixture.hdfs3.org.apache.hadoop") { + exclude "org.apache.hadoop.hdfs.protocol.ClientProtocol" + exclude "org.apache.hadoop.ipc.StandbyException" + } + configurations << project.configurations.hdfs3 +} + +def hdfs2Jar = tasks.register("hdfs2jar", ShadowJar) { + relocate("org.apache.hadoop", "fixture.hdfs2.org.apache.hadoop") { + exclude "org.apache.hadoop.hdfs.protocol.ClientProtocol" + exclude "org.apache.hadoop.ipc.StandbyException" + } + archiveClassifier.set("hdfs2") + from sourceSets.main.output + configurations << project.configurations.hdfs2 +} + +tasks.withType(ShadowJar) { + dependencies { +// exclude(dependency('commons-io:commons-io:2.8.0')) + exclude(dependency("com.carrotsearch.randomizedtesting:randomizedtesting-runner:.*")) + exclude(dependency("junit:junit:.*")) + exclude(dependency("org.slf4j:slf4j-api:.*")) + exclude(dependency("com.google.guava:guava:.*")) + exclude(dependency("org.apache.commons:commons-compress:.*")) + exclude(dependency("commons-logging:commons-logging:.*")) + exclude(dependency("commons-codec:commons-codec:.*")) + exclude(dependency("org.apache.httpcomponents:httpclient:.*")) + exclude(dependency("org.apache.httpcomponents:httpcore:.*")) + exclude(dependency("org.apache.logging.log4j:log4j-1.2-api:.*")) + exclude(dependency("log4j:log4j:.*")) + exclude(dependency("io.netty:.*:.*")) + exclude(dependency("com.nimbusds:nimbus-jose-jwt:.*")) + exclude(dependency("commons-cli:commons-cli:1.2")) + exclude(dependency("net.java.dev.jna:jna:.*")) + exclude(dependency("org.objenesis:objenesis:.*")) + exclude(dependency('com.fasterxml.jackson.core:.*:.*')) + } + + transform(org.elasticsearch.gradle.internal.shadow.XmlClassRelocationTransformer.class) { + resource = "core-default.xml" + enabled = true + } + + transform(org.elasticsearch.gradle.internal.shadow.XmlClassRelocationTransformer.class) { + resource = "hdfs-default.xml" + enabled = true + } +} + +artifacts { + shadowedHdfs2(hdfs2Jar) +} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsClientThreadLeakFilter.java b/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsClientThreadLeakFilter.java similarity index 77% rename from plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsClientThreadLeakFilter.java rename to test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsClientThreadLeakFilter.java index c79418557da20..be63e22742ed5 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsClientThreadLeakFilter.java +++ b/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsClientThreadLeakFilter.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.repositories.hdfs; +package org.elasticsearch.test.fixtures.hdfs; import com.carrotsearch.randomizedtesting.ThreadFilter; @@ -29,6 +29,11 @@ public final class HdfsClientThreadLeakFilter implements ThreadFilter { @Override public boolean reject(Thread t) { - return t.getName().equals(OFFENDING_THREAD_NAME); + return t.getName().contains(OFFENDING_THREAD_NAME) + || t.getName().startsWith("LeaseRenewer") + || t.getName().startsWith("SSL Certificates Store Monitor") // hadoop 3 brings that in + || t.getName().startsWith("GcTimeMonitor") // hadoop 3 + || t.getName().startsWith("Command processor") // hadoop 3 + || t.getName().startsWith("ForkJoinPool-"); // hadoop 3 } } diff --git a/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java b/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java new file mode 100644 index 0000000000000..37d9fc9f536ca --- /dev/null +++ b/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java @@ -0,0 +1,460 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.fixtures.hdfs; + +import org.apache.commons.io.FileUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclEntryType; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.ha.BadFencingConfigurationException; +import org.apache.hadoop.ha.HAServiceProtocol; +import org.apache.hadoop.ha.HAServiceTarget; +import org.apache.hadoop.ha.NodeFencer; +import org.apache.hadoop.ha.ZKFCProtocol; +import org.apache.hadoop.ha.protocolPB.HAServiceProtocolClientSideTranslatorPB; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.MiniDFSNNTopology; +import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; +import org.apache.hadoop.hdfs.tools.DFSHAAdmin; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; +import org.junit.Assume; +import org.junit.rules.ExternalResource; +import org.junit.rules.TemporaryFolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.net.InetSocketAddress; +import java.net.ServerSocket; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +public class HdfsFixture extends ExternalResource { + + private static final Logger LOGGER = LoggerFactory.getLogger(HdfsFixture.class); + + private TemporaryFolder temporaryFolder = new TemporaryFolder(); + private MiniDFSCluster dfs; + private String haNameService; + private Supplier principalConfig = null; + private Supplier keytab = null; + private Configuration cfg; + + private Configuration haConfiguration; + private int explicitPort = findAvailablePort(); + + public HdfsFixture withHAService(String haNameService) { + this.haNameService = haNameService; + return this; + } + + public HdfsFixture withKerberos(Supplier principalConfig, Supplier keytabFile) { + this.principalConfig = principalConfig; + this.keytab = keytabFile; + return this; + } + + @Override + protected void before() throws Throwable { + temporaryFolder.create(); + assumeHdfsAvailable(); + startMinHdfs(); + } + + private void assumeHdfsAvailable() { + boolean fixtureSupported = false; + if (isWindows()) { + // hdfs fixture will not start without hadoop native libraries on windows + String nativePath = System.getenv("HADOOP_HOME"); + if (nativePath != null) { + java.nio.file.Path path = Paths.get(nativePath); + if (Files.isDirectory(path) + && Files.exists(path.resolve("bin").resolve("winutils.exe")) + && Files.exists(path.resolve("bin").resolve("hadoop.dll")) + && Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { + fixtureSupported = true; + } else { + throw new IllegalStateException( + "HADOOP_HOME: " + path + " is invalid, does not contain hadoop native libraries in " + nativePath + "/bin" + ); + } + } + } else { + fixtureSupported = true; + } + + boolean nonLegalegalPath = temporaryFolder.getRoot().getAbsolutePath().contains(" "); + if (nonLegalegalPath) { + fixtureSupported = false; + } + + Assume.assumeTrue("HDFS Fixture is not supported", fixtureSupported); + } + + private boolean isWindows() { + return System.getProperty("os.name").toLowerCase().startsWith("windows"); + } + + /** + * Performs a two-phase leading namenode transition. + * @param from Namenode ID to transition to standby + * @param to Namenode ID to transition to active + * @throws IOException In the event of a raised exception during namenode failover. + */ + public void failoverHDFS(String from, String to) throws IOException { + assert isHA() && haConfiguration != null : "HA Configuration must be set up before performing failover"; + LOGGER.info("Swapping active namenodes: [{}] to standby and [{}] to active", from, to); + try { + AccessController.doPrivileged((PrivilegedExceptionAction) () -> { + CloseableHAAdmin haAdmin = new CloseableHAAdmin(); + haAdmin.setConf(haConfiguration); + try { + haAdmin.transitionToStandby(from); + haAdmin.transitionToActive(to); + } finally { + haAdmin.close(); + } + return null; + }); + } catch (PrivilegedActionException pae) { + throw new IOException("Unable to perform namenode failover", pae); + } + } + + public void setupHA() throws IOException { + assert isHA() : "HA Name Service must be set up before setting up HA"; + haConfiguration = new Configuration(); + haConfiguration.set("dfs.nameservices", haNameService); + haConfiguration.set("dfs.ha.namenodes.ha-hdfs", "nn1,nn2"); + haConfiguration.set("dfs.namenode.rpc-address.ha-hdfs.nn1", "localhost:" + getPort(0)); + haConfiguration.set("dfs.namenode.rpc-address.ha-hdfs.nn2", "localhost:" + (getPort(1))); + haConfiguration.set( + "dfs.client.failover.proxy.provider.ha-hdfs", + "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" + ); + + if (isSecure()) { + // ensure that keytab exists + Path kt = this.keytab.get(); + if (Files.exists(kt) == false) { + throw new IllegalStateException("Could not locate keytab at " + keytab.get()); + } + if (Files.isReadable(kt) != true) { + throw new IllegalStateException("Could not read keytab at " + keytab.get()); + } + LOGGER.info("Keytab Length: " + Files.readAllBytes(kt).length); + + // set principal names + String hdfsKerberosPrincipal = principalConfig.get(); + haConfiguration.set("dfs.namenode.kerberos.principal", hdfsKerberosPrincipal); + haConfiguration.set("dfs.datanode.kerberos.principal", hdfsKerberosPrincipal); + haConfiguration.set("dfs.data.transfer.protection", "authentication"); + + SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, haConfiguration); + UserGroupInformation.setConfiguration(haConfiguration); + UserGroupInformation.loginUserFromKeytab(hdfsKerberosPrincipal, keytab.get().toString()); + } else { + SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE, haConfiguration); + UserGroupInformation.setConfiguration(haConfiguration); + UserGroupInformation.getCurrentUser(); + } + } + + private void startMinHdfs() throws Exception { + Path baseDir = temporaryFolder.newFolder("baseDir").toPath(); + int maxAttempts = 3; + for (int attempt = 1; attempt <= maxAttempts; attempt++) { + try { + Path hdfsHome = createHdfsDataFolder(baseDir); + tryStartingHdfs(hdfsHome); + break; + } catch (IOException e) { + // Log the exception + System.out.println("Attempt " + attempt + " failed with error: " + e.getMessage()); + // If the maximum number of attempts is reached, rethrow the exception + FileUtils.deleteDirectory(baseDir.toFile()); + + if (attempt == maxAttempts) { + throw e; + } + } + } + } + + private static Path createHdfsDataFolder(Path baseDir) throws IOException { + if (System.getenv("HADOOP_HOME") == null) { + Path hadoopHome = baseDir.resolve("hadoop-home"); + Files.createDirectories(hadoopHome); + System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); + } + // hdfs-data/, where any data is going + Path hdfsData = baseDir.resolve("hdfs-data"); + Files.createDirectories(hdfsData); + return hdfsData; + } + + private void tryStartingHdfs(Path hdfsHome) throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, + InvocationTargetException, IOException, URISyntaxException { + // configure cluster + cfg = new Configuration(); + cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); + cfg.set("hadoop.security.group.mapping", "org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback"); + + // optionally configure security + if (isSecure()) { + String kerberosPrincipal = principalConfig.get(); + String keytabFilePath = keytab.get().toString(); + cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); + cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); + cfg.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); + cfg.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); + cfg.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); + cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFilePath); + cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFilePath); + cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true"); + cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true"); + cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true"); + cfg.set(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY, "true"); + } + refreshKrb5Config(); + UserGroupInformation.setConfiguration(cfg); + + MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); + builder.nameNodePort(explicitPort); + if (isHA()) { + MiniDFSNNTopology.NNConf nn1 = new MiniDFSNNTopology.NNConf("nn1").setIpcPort(0); + MiniDFSNNTopology.NNConf nn2 = new MiniDFSNNTopology.NNConf("nn2").setIpcPort(0); + MiniDFSNNTopology.NSConf nameservice = new MiniDFSNNTopology.NSConf(haNameService).addNN(nn1).addNN(nn2); + MiniDFSNNTopology namenodeTopology = new MiniDFSNNTopology().addNameservice(nameservice); + builder.nnTopology(namenodeTopology); + } + dfs = builder.build(); + // Configure contents of the filesystem + org.apache.hadoop.fs.Path esUserPath = new org.apache.hadoop.fs.Path("/user/elasticsearch"); + FileSystem fs; + if (isHA()) { + dfs.transitionToActive(0); + fs = HATestUtil.configureFailoverFs(dfs, cfg); + } else { + fs = dfs.getFileSystem(0); + } + + try { + // Set the elasticsearch user directory up + fs.mkdirs(esUserPath); + if (UserGroupInformation.isSecurityEnabled()) { + List acls = new ArrayList<>(); + acls.add(new AclEntry.Builder().setType(AclEntryType.USER).setName("elasticsearch").setPermission(FsAction.ALL).build()); + fs.modifyAclEntries(esUserPath, acls); + } + + // Install a pre-existing repository into HDFS + String directoryName = "readonly-repository"; + String archiveName = directoryName + ".tar.gz"; + URL readOnlyRepositoryArchiveURL = getClass().getClassLoader().getResource(archiveName); + if (readOnlyRepositoryArchiveURL != null) { + Path tempDirectory = Files.createTempDirectory(getClass().getName()); + File readOnlyRepositoryArchive = tempDirectory.resolve(archiveName).toFile(); + FileUtils.copyURLToFile(readOnlyRepositoryArchiveURL, readOnlyRepositoryArchive); + FileUtil.unTar(readOnlyRepositoryArchive, tempDirectory.toFile()); + + fs.copyFromLocalFile( + true, + true, + new org.apache.hadoop.fs.Path(tempDirectory.resolve(directoryName).toAbsolutePath().toUri()), + esUserPath.suffix("/existing/" + directoryName) + ); + + FileUtils.deleteDirectory(tempDirectory.toFile()); + } + } finally { + fs.close(); + } + } + + private boolean isSecure() { + return keytab != null && principalConfig != null; + } + + @Override + protected void after() { + if (dfs != null) { + try { + if (isHA()) { + dfs.getFileSystem(0).close(); + dfs.getFileSystem(1).close(); + } else { + dfs.getFileSystem().close(); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + dfs.close(); + } + temporaryFolder.delete(); + } + + private boolean isHA() { + return haNameService != null; + } + + public int getPort() { + return dfs == null ? explicitPort : dfs.getNameNodePort(0); + } + + // fix port handling to allow parallel hdfs fixture runs + public int getPort(int i) { + return dfs.getNameNodePort(i); + } + + /** + * Wraps an HAServiceTarget, keeping track of any HAServiceProtocol proxies it generates in order + * to close them at the end of the test lifecycle. + */ + protected static class CloseableHAServiceTarget extends HAServiceTarget { + private final HAServiceTarget delegate; + private final List protocolsToClose = new ArrayList<>(); + + CloseableHAServiceTarget(HAServiceTarget delegate) { + this.delegate = delegate; + } + + @Override + public InetSocketAddress getAddress() { + return delegate.getAddress(); + } + + @Override + public InetSocketAddress getHealthMonitorAddress() { + return delegate.getHealthMonitorAddress(); + } + + @Override + public InetSocketAddress getZKFCAddress() { + return delegate.getZKFCAddress(); + } + + @Override + public NodeFencer getFencer() { + return delegate.getFencer(); + } + + @Override + public void checkFencingConfigured() throws BadFencingConfigurationException { + delegate.checkFencingConfigured(); + } + + @Override + public HAServiceProtocol getProxy(Configuration conf, int timeoutMs) throws IOException { + HAServiceProtocol proxy = delegate.getProxy(conf, timeoutMs); + protocolsToClose.add(proxy); + return proxy; + } + + @Override + public HAServiceProtocol getHealthMonitorProxy(Configuration conf, int timeoutMs) throws IOException { + return delegate.getHealthMonitorProxy(conf, timeoutMs); + } + + @Override + public ZKFCProtocol getZKFCProxy(Configuration conf, int timeoutMs) throws IOException { + return delegate.getZKFCProxy(conf, timeoutMs); + } + + @Override + public boolean isAutoFailoverEnabled() { + return delegate.isAutoFailoverEnabled(); + } + + private void close() { + for (HAServiceProtocol protocol : protocolsToClose) { + if (protocol instanceof HAServiceProtocolClientSideTranslatorPB haServiceProtocolClientSideTranslatorPB) { + haServiceProtocolClientSideTranslatorPB.close(); + } + } + } + } + + /** + * The default HAAdmin tool does not throw exceptions on failures, and does not close any client connection + * resources when it concludes. This subclass overrides the tool to allow for exception throwing, and to + * keep track of and clean up connection resources. + */ + protected static class CloseableHAAdmin extends DFSHAAdmin { + private final List serviceTargets = new ArrayList<>(); + + @Override + protected HAServiceTarget resolveTarget(String nnId) { + CloseableHAServiceTarget target = new CloseableHAServiceTarget(super.resolveTarget(nnId)); + serviceTargets.add(target); + return target; + } + + @Override + public int run(String[] argv) throws Exception { + return runCmd(argv); + } + + public int transitionToStandby(String namenodeID) throws Exception { + return run(new String[] { "-transitionToStandby", namenodeID }); + } + + public int transitionToActive(String namenodeID) throws Exception { + return run(new String[] { "-transitionToActive", namenodeID }); + } + + public void close() { + for (CloseableHAServiceTarget serviceTarget : serviceTargets) { + serviceTarget.close(); + } + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public static void refreshKrb5Config() throws ClassNotFoundException, NoSuchMethodException, IllegalArgumentException, + IllegalAccessException, InvocationTargetException, InvocationTargetException { + Class classRef; + if (System.getProperty("java.vendor").contains("IBM")) { + classRef = Class.forName("com.ibm.security.krb5.internal.Config"); + } else { + classRef = Class.forName("sun.security.krb5.Config"); + } + + Method refreshMethod = classRef.getMethod("refresh"); + refreshMethod.invoke(classRef); + } + + private static int findAvailablePort() { + try (ServerSocket socket = new ServerSocket(0)) { + return socket.getLocalPort(); + } catch (Exception ex) { + LOGGER.error("Failed to find available port", ex); + } + return -1; + } + +} diff --git a/test/fixtures/hdfs2-fixture/src/main/resources/readonly-repository.tar.gz b/test/fixtures/hdfs-fixture/src/main/resources/readonly-repository.tar.gz similarity index 100% rename from test/fixtures/hdfs2-fixture/src/main/resources/readonly-repository.tar.gz rename to test/fixtures/hdfs-fixture/src/main/resources/readonly-repository.tar.gz diff --git a/test/fixtures/hdfs2-fixture/build.gradle b/test/fixtures/hdfs2-fixture/build.gradle deleted file mode 100644 index 43d14a38c5e3e..0000000000000 --- a/test/fixtures/hdfs2-fixture/build.gradle +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -apply plugin: 'elasticsearch.java' - -dependencies { - api "org.apache.hadoop:hadoop-minicluster:2.8.5" -} diff --git a/test/fixtures/hdfs2-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs2-fixture/src/main/java/hdfs/MiniHDFS.java deleted file mode 100644 index ee993fec74eb4..0000000000000 --- a/test/fixtures/hdfs2-fixture/src/main/java/hdfs/MiniHDFS.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package hdfs; - -import org.apache.commons.io.FileUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.permission.AclEntry; -import org.apache.hadoop.fs.permission.AclEntryType; -import org.apache.hadoop.fs.permission.FsAction; -import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.hdfs.MiniDFSNNTopology; -import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; -import org.apache.hadoop.security.UserGroupInformation; - -import java.io.File; -import java.lang.management.ManagementFactory; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -/** - * MiniHDFS test fixture. There is a CLI tool, but here we can - * easily properly setup logging, avoid parsing JSON, etc. - */ -public class MiniHDFS { - - private static String PORT_FILE_NAME = "ports"; - private static String PID_FILE_NAME = "pid"; - - public static void main(String[] args) throws Exception { - if (args.length != 1 && args.length != 3) { - throw new IllegalArgumentException( - "Expected: MiniHDFS [ ], got: " + Arrays.toString(args) - ); - } - boolean secure = args.length == 3; - - // configure Paths - Path baseDir = Paths.get(args[0]); - // hadoop-home/, so logs will not complain - if (System.getenv("HADOOP_HOME") == null) { - Path hadoopHome = baseDir.resolve("hadoop-home"); - Files.createDirectories(hadoopHome); - System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); - } - // hdfs-data/, where any data is going - Path hdfsHome = baseDir.resolve("hdfs-data"); - - // configure cluster - Configuration cfg = new Configuration(); - cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); - // lower default permission: TODO: needed? - cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); - - // optionally configure security - if (secure) { - String kerberosPrincipal = args[1]; - String keytabFile = args[2]; - - cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); - cfg.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFile); - cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFile); - cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true"); - cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true"); - cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true"); - cfg.set(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY, "true"); - } - - UserGroupInformation.setConfiguration(cfg); - - MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); - String explicitPort = System.getProperty("hdfs.config.port"); - if (explicitPort != null) { - builder.nameNodePort(Integer.parseInt(explicitPort)); - } else { - if (secure) { - builder.nameNodePort(9998); - } else { - builder.nameNodePort(9999); - } - } - - // Configure HA mode - String haNameService = System.getProperty("ha-nameservice"); - boolean haEnabled = haNameService != null; - if (haEnabled) { - MiniDFSNNTopology.NNConf nn1 = new MiniDFSNNTopology.NNConf("nn1").setIpcPort(0); - MiniDFSNNTopology.NNConf nn2 = new MiniDFSNNTopology.NNConf("nn2").setIpcPort(0); - MiniDFSNNTopology.NSConf nameservice = new MiniDFSNNTopology.NSConf(haNameService).addNN(nn1).addNN(nn2); - MiniDFSNNTopology namenodeTopology = new MiniDFSNNTopology().addNameservice(nameservice); - builder.nnTopology(namenodeTopology); - } - - MiniDFSCluster dfs = builder.build(); - - // Configure contents of the filesystem - org.apache.hadoop.fs.Path esUserPath = new org.apache.hadoop.fs.Path("/user/elasticsearch"); - - FileSystem fs; - if (haEnabled) { - dfs.transitionToActive(0); - fs = HATestUtil.configureFailoverFs(dfs, cfg); - } else { - fs = dfs.getFileSystem(); - } - - try { - // Set the elasticsearch user directory up - fs.mkdirs(esUserPath); - if (UserGroupInformation.isSecurityEnabled()) { - List acls = new ArrayList<>(); - acls.add(new AclEntry.Builder().setType(AclEntryType.USER).setName("elasticsearch").setPermission(FsAction.ALL).build()); - fs.modifyAclEntries(esUserPath, acls); - } - - // Install a pre-existing repository into HDFS - String directoryName = "readonly-repository"; - String archiveName = directoryName + ".tar.gz"; - URL readOnlyRepositoryArchiveURL = MiniHDFS.class.getClassLoader().getResource(archiveName); - if (readOnlyRepositoryArchiveURL != null) { - Path tempDirectory = Files.createTempDirectory(MiniHDFS.class.getName()); - File readOnlyRepositoryArchive = tempDirectory.resolve(archiveName).toFile(); - FileUtils.copyURLToFile(readOnlyRepositoryArchiveURL, readOnlyRepositoryArchive); - FileUtil.unTar(readOnlyRepositoryArchive, tempDirectory.toFile()); - - fs.copyFromLocalFile( - true, - true, - new org.apache.hadoop.fs.Path(tempDirectory.resolve(directoryName).toAbsolutePath().toUri()), - esUserPath.suffix("/existing/" + directoryName) - ); - - FileUtils.deleteDirectory(tempDirectory.toFile()); - } - } finally { - fs.close(); - } - - // write our PID file - Path tmp = Files.createTempFile(baseDir, null, null); - String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; - Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8)); - Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); - - // write our port file - String portFileContent = Integer.toString(dfs.getNameNodePort(0)); - if (haEnabled) { - portFileContent = portFileContent + "\n" + Integer.toString(dfs.getNameNodePort(1)); - } - tmp = Files.createTempFile(baseDir, null, null); - Files.write(tmp, portFileContent.getBytes(StandardCharsets.UTF_8)); - Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); - } - -} diff --git a/test/fixtures/hdfs3-fixture/build.gradle b/test/fixtures/hdfs3-fixture/build.gradle deleted file mode 100644 index 872ab2efd42ab..0000000000000 --- a/test/fixtures/hdfs3-fixture/build.gradle +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -apply plugin: 'elasticsearch.java' - -dependencies { - api "org.apache.hadoop:hadoop-minicluster:3.3.1" -} diff --git a/test/fixtures/hdfs3-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs3-fixture/src/main/java/hdfs/MiniHDFS.java deleted file mode 100644 index 0a26f5d82ac17..0000000000000 --- a/test/fixtures/hdfs3-fixture/src/main/java/hdfs/MiniHDFS.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package hdfs; - -import org.apache.commons.io.FileUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.permission.AclEntry; -import org.apache.hadoop.fs.permission.AclEntryType; -import org.apache.hadoop.fs.permission.FsAction; -import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.hdfs.MiniDFSNNTopology; -import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; -import org.apache.hadoop.security.UserGroupInformation; - -import java.io.File; -import java.lang.management.ManagementFactory; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -/** - * MiniHDFS test fixture. There is a CLI tool, but here we can - * easily properly setup logging, avoid parsing JSON, etc. - */ -public class MiniHDFS { - - private static String PORT_FILE_NAME = "ports"; - private static String PID_FILE_NAME = "pid"; - - public static void main(String[] args) throws Exception { - if (args.length != 1 && args.length != 3) { - throw new IllegalArgumentException( - "Expected: MiniHDFS [ ], got: " + Arrays.toString(args) - ); - } - boolean secure = args.length == 3; - - // configure Paths - Path baseDir = Paths.get(args[0]); - // hadoop-home/, so logs will not complain - if (System.getenv("HADOOP_HOME") == null) { - Path hadoopHome = baseDir.resolve("hadoop-home"); - Files.createDirectories(hadoopHome); - System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); - } - // hdfs-data/, where any data is going - Path hdfsHome = baseDir.resolve("hdfs-data"); - - // configure cluster - Configuration cfg = new Configuration(); - cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); - // lower default permission: TODO: needed? - cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); - - // optionally configure security - if (secure) { - String kerberosPrincipal = args[1]; - String keytabFile = args[2]; - - cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); - cfg.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFile); - cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFile); - cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true"); - cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true"); - cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true"); - cfg.set(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY, "true"); - cfg.set(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY, "AES/CTR/NoPadding"); - } - - UserGroupInformation.setConfiguration(cfg); - - MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); - String explicitPort = System.getProperty("hdfs.config.port"); - if (explicitPort != null) { - builder.nameNodePort(Integer.parseInt(explicitPort)); - } else { - if (secure) { - builder.nameNodePort(9998); - } else { - builder.nameNodePort(9999); - } - } - - // Configure HA mode - String haNameService = System.getProperty("ha-nameservice"); - boolean haEnabled = haNameService != null; - if (haEnabled) { - MiniDFSNNTopology.NNConf nn1 = new MiniDFSNNTopology.NNConf("nn1").setIpcPort(0); - MiniDFSNNTopology.NNConf nn2 = new MiniDFSNNTopology.NNConf("nn2").setIpcPort(0); - MiniDFSNNTopology.NSConf nameservice = new MiniDFSNNTopology.NSConf(haNameService).addNN(nn1).addNN(nn2); - MiniDFSNNTopology namenodeTopology = new MiniDFSNNTopology().addNameservice(nameservice); - builder.nnTopology(namenodeTopology); - } - - MiniDFSCluster dfs = builder.build(); - - // Configure contents of the filesystem - org.apache.hadoop.fs.Path esUserPath = new org.apache.hadoop.fs.Path("/user/elasticsearch"); - - FileSystem fs; - if (haEnabled) { - dfs.transitionToActive(0); - fs = HATestUtil.configureFailoverFs(dfs, cfg); - } else { - fs = dfs.getFileSystem(); - } - - try { - // Set the elasticsearch user directory up - fs.mkdirs(esUserPath); - if (UserGroupInformation.isSecurityEnabled()) { - List acls = new ArrayList<>(); - acls.add(new AclEntry.Builder().setType(AclEntryType.USER).setName("elasticsearch").setPermission(FsAction.ALL).build()); - fs.modifyAclEntries(esUserPath, acls); - } - - // Install a pre-existing repository into HDFS - String directoryName = "readonly-repository"; - String archiveName = directoryName + ".tar.gz"; - URL readOnlyRepositoryArchiveURL = MiniHDFS.class.getClassLoader().getResource(archiveName); - if (readOnlyRepositoryArchiveURL != null) { - Path tempDirectory = Files.createTempDirectory(MiniHDFS.class.getName()); - File readOnlyRepositoryArchive = tempDirectory.resolve(archiveName).toFile(); - FileUtils.copyURLToFile(readOnlyRepositoryArchiveURL, readOnlyRepositoryArchive); - FileUtil.unTar(readOnlyRepositoryArchive, tempDirectory.toFile()); - - fs.copyFromLocalFile( - true, - true, - new org.apache.hadoop.fs.Path(tempDirectory.resolve(directoryName).toAbsolutePath().toUri()), - esUserPath.suffix("/existing/" + directoryName) - ); - - FileUtils.deleteDirectory(tempDirectory.toFile()); - } - } finally { - fs.close(); - } - - // write our PID file - Path tmp = Files.createTempFile(baseDir, null, null); - String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; - Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8)); - Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); - - // write our port file - String portFileContent = Integer.toString(dfs.getNameNodePort(0)); - if (haEnabled) { - portFileContent = portFileContent + "\n" + Integer.toString(dfs.getNameNodePort(1)); - } - tmp = Files.createTempFile(baseDir, null, null); - Files.write(tmp, portFileContent.getBytes(StandardCharsets.UTF_8)); - Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); - } - -} diff --git a/test/fixtures/hdfs3-fixture/src/main/resources/readonly-repository.tar.gz b/test/fixtures/hdfs3-fixture/src/main/resources/readonly-repository.tar.gz deleted file mode 100644 index 2cdb6d77c07d0..0000000000000 Binary files a/test/fixtures/hdfs3-fixture/src/main/resources/readonly-repository.tar.gz and /dev/null differ diff --git a/test/fixtures/krb5kdc-fixture/build.gradle b/test/fixtures/krb5kdc-fixture/build.gradle index ab9b4a32ed322..c671d58e1e395 100644 --- a/test/fixtures/krb5kdc-fixture/build.gradle +++ b/test/fixtures/krb5kdc-fixture/build.gradle @@ -8,37 +8,22 @@ import org.gradle.api.services.internal.BuildServiceRegistryInternal * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.java' +apply plugin: 'elasticsearch.cache-test-fixtures' +apply plugin: 'elasticsearch.deploy-test-fixtures' -List services = ["peppa", "hdfs"] - -tasks.named("preProcessFixture").configure { - doLast { - // We need to create these up-front because if docker creates them they will be owned by root and we won't be - // able to clean them up - services.each { fixturesDir.dir("shared/${it}").get().getAsFile().mkdirs() } - } -} - -tasks.named("postProcessFixture").configure { task -> - inputs.dir(fixturesDir.dir('shared').get().getAsFile()) - services.each { service -> - File confTemplate = fixturesDir.file("shared/${service}/krb5.conf.template").get().asFile - File confFile = fixturesDir.file("shared/${service}/krb5.conf").get().asFile - outputs.file(confFile) - doLast { - assert confTemplate.exists() - String confContents = confTemplate.text - .replace("\${MAPPED_PORT}", "${ext."test.fixtures.${service}.udp.88"}") - confFile.text = confContents - } +dockerFixtures { + krb5dc { + dockerContext = projectDir + version = "1.0" + baseImages = ["ubuntu:14.04"] } } -project.ext.krb5Conf = { s -> file("$testFixturesDir/shared/${s}/krb5.conf") } -project.ext.krb5Keytabs = { s, fileName -> file("$testFixturesDir/shared/${s}/keytabs/${fileName}") } - configurations { + all { + transitive = false + } krb5ConfHdfsFile { canBeConsumed = true canBeResolved = false @@ -49,11 +34,24 @@ configurations { } } -artifacts { - krb5ConfHdfsFile(krb5Conf('hdfs')) { - builtBy("postProcessFixture") - } - krb5KeytabsHdfsDir(file("$testFixturesDir/shared/hdfs/keytabs/")) { - builtBy("postProcessFixture") - } +dependencies { + testImplementation project(':test:framework') + + api "junit:junit:${versions.junit}" + api project(':test:fixtures:testcontainer-utils') + api "org.testcontainers:testcontainers:${versions.testcontainer}" + implementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" + implementation "org.slf4j:slf4j-api:${versions.slf4j}" + implementation "com.github.docker-java:docker-java-api:${versions.dockerJava}" + implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" + + runtimeOnly "com.github.docker-java:docker-java-transport-zerodep:${versions.dockerJava}" + runtimeOnly "com.github.docker-java:docker-java-transport:${versions.dockerJava}" + runtimeOnly "com.github.docker-java:docker-java-core:${versions.dockerJava}" + runtimeOnly "org.apache.commons:commons-compress:${versions.commonsCompress}" + runtimeOnly "org.rnorth.duct-tape:duct-tape:${versions.ductTape}" + + // ensure we have proper logging during when used in tests + runtimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" + runtimeOnly "org.hamcrest:hamcrest:${versions.hamcrest}" } diff --git a/test/fixtures/krb5kdc-fixture/docker-compose.yml b/test/fixtures/krb5kdc-fixture/docker-compose.yml deleted file mode 100644 index 9e2d67000532e..0000000000000 --- a/test/fixtures/krb5kdc-fixture/docker-compose.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: '3' -services: - peppa: - hostname: kerberos.build.elastic.co - build: - context: . - dockerfile: Dockerfile - extra_hosts: - - "kerberos.build.elastic.co:127.0.0.1" - command: "bash /fixture/src/main/resources/provision/peppa.sh" - volumes: - - ./testfixtures_shared/shared/peppa:/fixture/build - # containers have bad entropy so mount /dev/urandom. Less secure but this is a test fixture. - - /dev/urandom:/dev/random - ports: - - "4444" - - "88/udp" - hdfs: - hostname: kerberos.build.elastic.co - build: - context: . - dockerfile: Dockerfile - extra_hosts: - - "kerberos.build.elastic.co:127.0.0.1" - command: "bash /fixture/src/main/resources/provision/hdfs.sh" - volumes: - - ./testfixtures_shared/shared/hdfs:/fixture/build - # containers have bad entropy so mount /dev/urandom. Less secure but this is a test fixture. - - /dev/urandom:/dev/random - ports: - - "4444" - - "88/udp" diff --git a/test/fixtures/krb5kdc-fixture/src/main/java/org/elasticsearch/test/fixtures/krb5kdc/Krb5kDcContainer.java b/test/fixtures/krb5kdc-fixture/src/main/java/org/elasticsearch/test/fixtures/krb5kdc/Krb5kDcContainer.java new file mode 100644 index 0000000000000..fa75b57ea87a6 --- /dev/null +++ b/test/fixtures/krb5kdc-fixture/src/main/java/org/elasticsearch/test/fixtures/krb5kdc/Krb5kDcContainer.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.fixtures.krb5kdc; + +import com.github.dockerjava.api.model.ExposedPort; +import com.github.dockerjava.api.model.Ports; + +import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; +import org.junit.rules.TemporaryFolder; +import org.testcontainers.containers.Network; +import org.testcontainers.images.RemoteDockerImage; +import org.testcontainers.shaded.org.apache.commons.io.IOUtils; +import org.testcontainers.utility.MountableFile; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public final class Krb5kDcContainer extends DockerEnvironmentAwareTestContainer { + public static final String DOCKER_BASE_IMAGE = "docker.elastic.co/elasticsearch-dev/krb5dc-fixture:1.0"; + private final TemporaryFolder temporaryFolder = new TemporaryFolder(); + private final ProvisioningId provisioningId; + private Path krb5ConfFile; + private Path keytabFile; + private Path esKeytabFile; + + public enum ProvisioningId { + HDFS( + "hdfs", + "/fixture/src/main/resources/provision/hdfs.sh", + "/fixture/build/keytabs/hdfs_hdfs.build.elastic.co.keytab", + "/fixture/build/keytabs/elasticsearch.keytab", + "hdfs/hdfs.build.elastic.co@BUILD.ELASTIC.CO" + ), + PEPPA( + "peppa", + "/fixture/src/main/resources/provision/peppa.sh", + "/fixture/build/keytabs/peppa.keytab", + "/fixture/build/keytabs/HTTP_localhost.keytab", + "peppa@BUILD.ELASTIC.CO" + ); + + private final String id; + private final String scriptPath; + private final String keytabPath; + public final String esKeytab; + private final String keytabPrincipal; + + ProvisioningId(String id, String scriptPath, String keytabPath, String esKeytab, String principal) { + this.id = id; + this.scriptPath = scriptPath; + this.keytabPath = keytabPath; + this.esKeytab = esKeytab; + this.keytabPrincipal = principal; + } + } + + public Krb5kDcContainer() { + this(ProvisioningId.HDFS); + } + + public Krb5kDcContainer(ProvisioningId provisioningId) { + super(new RemoteDockerImage(DOCKER_BASE_IMAGE)); + this.provisioningId = provisioningId; + withNetwork(Network.newNetwork()); + addExposedPorts(88, 4444); + withCreateContainerCmdModifier(cmd -> { + // Add previously exposed ports and UDP port + List exposedPorts = new ArrayList<>(); + for (ExposedPort p : cmd.getExposedPorts()) { + exposedPorts.add(p); + } + exposedPorts.add(ExposedPort.udp(88)); + cmd.withExposedPorts(exposedPorts); + + // Add previous port bindings and UDP port binding + Ports ports = cmd.getPortBindings(); + ports.bind(ExposedPort.udp(88), Ports.Binding.empty()); + cmd.withPortBindings(ports); + }); + withNetworkAliases("kerberos.build.elastic.co", "build.elastic.co"); + withCopyFileToContainer(MountableFile.forHostPath("/dev/urandom"), "/dev/random"); + withExtraHost("kerberos.build.elastic.co", "127.0.0.1"); + withCommand("bash", provisioningId.scriptPath); + } + + @Override + public void start() { + try { + temporaryFolder.create(); + } catch (IOException e) { + throw new RuntimeException(e); + } + super.start(); + System.setProperty("java.security.krb5.conf", getConfPath().toString()); + } + + @Override + public void stop() { + super.stop(); + System.clearProperty("java.security.krb5.conf"); + temporaryFolder.delete(); + } + + @SuppressWarnings("all") + public String getConf() { + var bindings = Arrays.asList(getCurrentContainerInfo().getNetworkSettings().getPorts().getBindings().get(ExposedPort.udp(88))) + .stream() + .findFirst(); + String hostPortSpec = bindings.get().getHostPortSpec(); + String s = copyFileFromContainer("/fixture/build/krb5.conf.template", i -> IOUtils.toString(i, StandardCharsets.UTF_8)); + return s.replace("${MAPPED_PORT}", hostPortSpec); + } + + public Path getKeytab() { + if (keytabFile != null) { + return keytabFile; + } + try { + String keytabPath = provisioningId.keytabPath; + keytabFile = temporaryFolder.newFile(provisioningId.id + ".keytab").toPath(); + copyFileFromContainer(keytabPath, keytabFile.toAbsolutePath().toString()); + return keytabFile; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public Path getEsKeytab() { + if (esKeytabFile != null) { + return esKeytabFile; + } + try { + esKeytabFile = temporaryFolder.newFile("elasticsearch.keytab").toPath(); + copyFileFromContainer(provisioningId.esKeytab, esKeytabFile.toAbsolutePath().toString()); + return esKeytabFile; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public Path getConfPath() { + if (krb5ConfFile != null) { + return krb5ConfFile; + } + try { + krb5ConfFile = temporaryFolder.newFile("krb5.conf").toPath(); + Files.write(krb5ConfFile, getConf().getBytes(StandardCharsets.UTF_8)); + return krb5ConfFile; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public String getPrincipal() { + return provisioningId.keytabPrincipal; + } + + public String getEsPrincipal() { + return "elasticsearch@BUILD.ELASTIC.CO"; + } +} diff --git a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh index ef5bba076444c..de08a52df3306 100644 --- a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh +++ b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh @@ -8,4 +8,4 @@ addprinc.sh "hdfs/hdfs.build.elastic.co" # Use this as a signal that setup is complete python3 -m http.server 4444 & -sleep infinity \ No newline at end of file +sleep infinity diff --git a/test/fixtures/s3-fixture/build.gradle b/test/fixtures/s3-fixture/build.gradle index 0f031ee029f75..fc044c6dc6aa6 100644 --- a/test/fixtures/s3-fixture/build.gradle +++ b/test/fixtures/s3-fixture/build.gradle @@ -8,7 +8,6 @@ apply plugin: 'elasticsearch.java' description = 'Fixture for S3 Storage service' -//tasks.named("test").configure { enabled = false } dependencies { api project(':server') diff --git a/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java index 11063dc4cf10b..1b47039f9ac5c 100644 --- a/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java +++ b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java @@ -64,10 +64,16 @@ public DockerEnvironmentAwareTestContainer(Future image) { public void start() { Assume.assumeFalse("Docker support excluded on OS", EXCLUDED_OS); Assume.assumeTrue("Docker probing succesful", DOCKER_PROBING_SUCCESSFUL); - withLogConsumer(new Slf4jLogConsumer(logger())); + withLogConsumer(new Slf4jLogConsumer(LOGGER)); super.start(); } + @Override + public void stop() { + LOGGER.info("Stopping container {}", getContainerId()); + super.stop(); + } + @Override public void cache() { try { diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 7f39120e83c07..4e43cb33111a1 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -240,6 +240,10 @@ protected static int defaultInt(Setting setting) { // Then a commit of the new leader's first cluster state + DEFAULT_CLUSTER_STATE_UPDATE_DELAY; + /** + * An estimate for the max time needed to stabilize a cluster. Takes into account delays for various communications involved in + * leader elections. + * */ public static final long DEFAULT_STABILISATION_TIME = // If leader just blackholed, need to wait for this to be detected (defaultMillis(LEADER_CHECK_INTERVAL_SETTING) + defaultMillis(LEADER_CHECK_TIMEOUT_SETTING)) * defaultInt( @@ -549,6 +553,9 @@ private void updateCommittedStates() { } } + /** + * Uses a default period of time in which to wait for cluster stabilisation, and then verifies that a master has been elected. + */ public void stabilise() { stabilise(DEFAULT_STABILISATION_TIME, true); } @@ -942,6 +949,9 @@ protected long transportDelayMillis(String actionName) { return 0; } + /** + * Mimics a cluster node for testing. + */ public final class ClusterNode { private static final Logger logger = LogManager.getLogger(ClusterNode.class); diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 481ca207a71cf..52614dee8d04a 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -144,11 +144,18 @@ public MockBigArrays(PageCacheRecycler recycler, ByteSizeValue limit) { when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(new LimitedBreaker(CircuitBreaker.REQUEST, limit)); } + /** + * Create {@linkplain BigArrays} with a provided breaker service. The breaker is not enable by default. + */ public MockBigArrays(PageCacheRecycler recycler, CircuitBreakerService breakerService) { this(recycler, breakerService, false); } - private MockBigArrays(PageCacheRecycler recycler, CircuitBreakerService breakerService, boolean checkBreaker) { + /** + * Create {@linkplain BigArrays} with a provided breaker service. The breaker can be enabled with the + * {@code checkBreaker} flag. + */ + public MockBigArrays(PageCacheRecycler recycler, CircuitBreakerService breakerService, boolean checkBreaker) { super(recycler, breakerService, CircuitBreaker.REQUEST, checkBreaker); this.recycler = recycler; this.breakerService = breakerService; diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index ea97bafc5e4c8..675b5959f35a3 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -64,6 +65,18 @@ public abstract class AbstractScriptFieldTypeTestCase extends MapperServiceTestC protected abstract String typeName(); + /** + * Add the provided document to the provided writer, and randomly flush. + * This is useful for situations where there are not enough documents indexed to trigger random flush and commit performed + * by {@link RandomIndexWriter}. Flushing is important to obtain multiple slices and inter-segment concurrency. + */ + protected static void addDocument(RandomIndexWriter iw, Iterable indexableFields) throws IOException { + iw.addDocument(indexableFields); + if (randomBoolean()) { + iw.flush(); + } + } + public final void testMinimalSerializesToItself() throws IOException { XContentBuilder orig = JsonXContent.contentBuilder().startObject(); createMapperService(runtimeFieldMapping(this::minimalMapping)).documentMapper().mapping().toXContent(orig, ToXContent.EMPTY_PARAMS); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 43ac8057a3fc0..fa0f0e1b95f54 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1030,8 +1030,15 @@ public final void testMinimalIsInvalidInRoutingPath() throws IOException { } } - protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { - return "All fields that match routing_path must be keywords with [time_series_dimension: true] " + private String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { + if (mapper instanceof FieldMapper fieldMapper && fieldMapper.fieldType().isDimension() == false) { + return "All fields that match routing_path must be configured with [time_series_dimension: true] " + + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + + "without the [script] parameter. [" + + mapper.name() + + "] was not a dimension."; + } + return "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [" + mapper.name() diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java index 16cb0b4656fcf..61fc190e4952d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java @@ -8,18 +8,32 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.core.Predicates; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import java.util.function.Function; -import java.util.function.Predicate; public class MockFieldFilterPlugin extends Plugin implements MapperPlugin { @Override - public Function> getFieldFilter() { + public Function getFieldFilter() { // this filter doesn't filter any field out, but it's used to exercise the code path executed when the filter is not no-op - return index -> Predicates.always(); + return index -> new FieldPredicate() { + @Override + public boolean test(String field) { + return true; + } + + @Override + public String modifyHash(String hash) { + return hash + ":includeall"; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index 81848b5a50114..c60a913a63b33 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -253,12 +253,8 @@ public void testDimension() throws IOException { // dimension = false is allowed assertDimension(false, NumberFieldMapper.NumberFieldType::isDimension); - // dimension = true is not allowed - Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> { - minimalMapping(b); - b.field("time_series_dimension", true); - }))); - assertThat(e.getCause().getMessage(), containsString("Parameter [time_series_dimension] cannot be set")); + // dimension = true is allowed + assertDimension(true, NumberFieldMapper.NumberFieldType::isDimension); } public void testMetricType() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index 77b25efd56b35..91bee1ee253e9 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -7,36 +7,81 @@ */ package org.elasticsearch.search; +import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TotalHits; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.Response; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.IgnoredFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.action.RestActions; +import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; +import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.search.profile.SearchProfileDfsPhaseResult; +import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileShardResult; +import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; +import org.elasticsearch.search.profile.query.CollectorResult; +import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.InstantiatingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public enum SearchResponseUtils { ; + // All fields on the root level of the parsed SearchHit are interpreted as metadata fields + // public because we use it in a completion suggestion option + @SuppressWarnings("unchecked") + public static final ObjectParser.UnknownFieldConsumer> unknownMetaFieldConsumer = (map, fieldName, fieldValue) -> { + Map fieldMap = (Map) map.computeIfAbsent( + SearchHit.METADATA_FIELDS, + v -> new HashMap() + ); + if (fieldName.equals(IgnoredFieldMapper.NAME)) { + fieldMap.put(fieldName, new DocumentField(fieldName, (List) fieldValue)); + } else { + fieldMap.put(fieldName, new DocumentField(fieldName, Collections.singletonList(fieldValue))); + } + }; + public static TotalHits getTotalHits(SearchRequestBuilder request) { var resp = request.get(); try { @@ -180,13 +225,13 @@ public static SearchResponse parseInnerSearchResponse(XContentParser parser) thr } } else if (token == XContentParser.Token.START_OBJECT) { if (SearchHits.Fields.HITS.equals(currentFieldName)) { - hits = SearchHits.fromXContent(parser); + hits = parseSearchHits(parser); } else if (InternalAggregations.AGGREGATIONS_FIELD.equals(currentFieldName)) { aggs = InternalAggregations.fromXContent(parser); } else if (Suggest.NAME.equals(currentFieldName)) { - suggest = Suggest.fromXContent(parser); + suggest = parseSuggest(parser); } else if (SearchProfileResults.PROFILE_FIELD.equals(currentFieldName)) { - profile = SearchProfileResults.fromXContent(parser); + profile = parseSearchProfileResults(parser); } else if (RestActions._SHARDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -389,4 +434,465 @@ private static SearchResponse.Cluster parseCluster(String clusterAlias, XContent timedOut ); } + + public static SearchProfileResults parseSearchProfileResults(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + Map profileResults = new HashMap<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.START_ARRAY) { + if (SearchProfileResults.SHARDS_FIELD.equals(parser.currentName())) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + parseProfileResultsEntry(parser, profileResults); + } + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + parser.skipChildren(); + } + } + return new SearchProfileResults(profileResults); + } + + private static void parseProfileResultsEntry(XContentParser parser, Map searchProfileResults) + throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + SearchProfileDfsPhaseResult searchProfileDfsPhaseResult = null; + List queryProfileResults = new ArrayList<>(); + AggregationProfileShardResult aggProfileShardResult = null; + ProfileResult fetchResult = null; + String id = null; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (SearchProfileResults.ID_FIELD.equals(currentFieldName)) { + id = parser.text(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if ("searches".equals(currentFieldName)) { + while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { + queryProfileResults.add(parseQueryProfileShardResult(parser)); + } + } else if (AggregationProfileShardResult.AGGREGATIONS.equals(currentFieldName)) { + aggProfileShardResult = AggregationProfileShardResult.fromXContent(parser); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if ("dfs".equals(currentFieldName)) { + searchProfileDfsPhaseResult = parseProfileDfsPhaseResult(parser); + } else if ("fetch".equals(currentFieldName)) { + fetchResult = ProfileResult.fromXContent(parser); + } else { + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + SearchProfileShardResult result = new SearchProfileShardResult( + new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult), + fetchResult + ); + result.getQueryPhase().setSearchProfileDfsPhaseResult(searchProfileDfsPhaseResult); + searchProfileResults.put(id, result); + } + + private static final InstantiatingObjectParser PROFILE_DFS_PHASE_RESULT_PARSER; + + static { + InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( + "search_profile_dfs_phase_result", + true, + SearchProfileDfsPhaseResult.class + ); + parser.declareObject(optionalConstructorArg(), (p, c) -> ProfileResult.fromXContent(p), SearchProfileDfsPhaseResult.STATISTICS); + parser.declareObjectArray(optionalConstructorArg(), (p, c) -> parseQueryProfileShardResult(p), SearchProfileDfsPhaseResult.KNN); + PROFILE_DFS_PHASE_RESULT_PARSER = parser.build(); + } + + public static SearchProfileDfsPhaseResult parseProfileDfsPhaseResult(XContentParser parser) throws IOException { + return PROFILE_DFS_PHASE_RESULT_PARSER.parse(parser, null); + } + + public static QueryProfileShardResult parseQueryProfileShardResult(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + String currentFieldName = null; + List queryProfileResults = new ArrayList<>(); + long rewriteTime = 0; + Long vectorOperationsCount = null; + CollectorResult collector = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (QueryProfileShardResult.REWRITE_TIME.equals(currentFieldName)) { + rewriteTime = parser.longValue(); + } else if (QueryProfileShardResult.VECTOR_OPERATIONS_COUNT.equals(currentFieldName)) { + vectorOperationsCount = parser.longValue(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (QueryProfileShardResult.QUERY_ARRAY.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + queryProfileResults.add(ProfileResult.fromXContent(parser)); + } + } else if (QueryProfileShardResult.COLLECTOR.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + collector = CollectorResult.fromXContent(parser); + } + } else { + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector, vectorOperationsCount); + } + + public static SearchHits parseSearchHits(XContentParser parser) throws IOException { + if (parser.currentToken() != XContentParser.Token.START_OBJECT) { + parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + } + XContentParser.Token token = parser.currentToken(); + String currentFieldName = null; + List hits = new ArrayList<>(); + TotalHits totalHits = null; + float maxScore = 0f; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (SearchHits.Fields.TOTAL.equals(currentFieldName)) { + // For BWC with nodes pre 7.0 + long value = parser.longValue(); + totalHits = value == -1 ? null : new TotalHits(value, TotalHits.Relation.EQUAL_TO); + } else if (SearchHits.Fields.MAX_SCORE.equals(currentFieldName)) { + maxScore = parser.floatValue(); + } + } else if (token == XContentParser.Token.VALUE_NULL) { + if (SearchHits.Fields.MAX_SCORE.equals(currentFieldName)) { + maxScore = Float.NaN; // NaN gets rendered as null-field + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (SearchHits.Fields.HITS.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + hits.add(parseSearchHit(parser)); + } + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (SearchHits.Fields.TOTAL.equals(currentFieldName)) { + totalHits = SearchHits.parseTotalHitsFragment(parser); + } else { + parser.skipChildren(); + } + } + } + return SearchHits.unpooled(hits.toArray(SearchHits.EMPTY), totalHits, maxScore); + } + + /** + * This parser outputs a temporary map of the objects needed to create the + * SearchHit instead of directly creating the SearchHit. The reason for this + * is that this way we can reuse the parser when parsing xContent from + * {@link org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option} which unfortunately inlines + * the output of + * {@link SearchHit#toInnerXContent(XContentBuilder, org.elasticsearch.xcontent.ToXContent.Params)} + * of the included search hit. The output of the map is used to create the + * actual SearchHit instance via {@link SearchResponseUtils#searchHitFromMap(Map)} + */ + static final ObjectParser, Void> MAP_PARSER = new ObjectParser<>( + "innerHitParser", + unknownMetaFieldConsumer, + HashMap::new + ); + + static { + declareInnerHitsParseFields(MAP_PARSER); + } + + public static SearchHit parseSearchHit(XContentParser parser) { + return searchHitFromMap(MAP_PARSER.apply(parser, null)); + } + + public static void declareInnerHitsParseFields(ObjectParser, Void> parser) { + parser.declareString((map, value) -> map.put(SearchHit.Fields._INDEX, value), new ParseField(SearchHit.Fields._INDEX)); + parser.declareString((map, value) -> map.put(SearchHit.Fields._ID, value), new ParseField(SearchHit.Fields._ID)); + parser.declareString((map, value) -> map.put(SearchHit.Fields._NODE, value), new ParseField(SearchHit.Fields._NODE)); + parser.declareField( + (map, value) -> map.put(SearchHit.Fields._SCORE, value), + SearchResponseUtils::parseScore, + new ParseField(SearchHit.Fields._SCORE), + ObjectParser.ValueType.FLOAT_OR_NULL + ); + parser.declareInt((map, value) -> map.put(SearchHit.Fields._RANK, value), new ParseField(SearchHit.Fields._RANK)); + + parser.declareLong((map, value) -> map.put(SearchHit.Fields._VERSION, value), new ParseField(SearchHit.Fields._VERSION)); + parser.declareLong((map, value) -> map.put(SearchHit.Fields._SEQ_NO, value), new ParseField(SearchHit.Fields._SEQ_NO)); + parser.declareLong((map, value) -> map.put(SearchHit.Fields._PRIMARY_TERM, value), new ParseField(SearchHit.Fields._PRIMARY_TERM)); + parser.declareField( + (map, value) -> map.put(SearchHit.Fields._SHARD, value), + (p, c) -> ShardId.fromString(p.text()), + new ParseField(SearchHit.Fields._SHARD), + ObjectParser.ValueType.STRING + ); + parser.declareObject( + (map, value) -> map.put(SourceFieldMapper.NAME, value), + (p, c) -> parseSourceBytes(p), + new ParseField(SourceFieldMapper.NAME) + ); + parser.declareObject( + (map, value) -> map.put(SearchHit.Fields.HIGHLIGHT, value), + (p, c) -> parseHighlightFields(p), + new ParseField(SearchHit.Fields.HIGHLIGHT) + ); + parser.declareObject((map, value) -> { + Map fieldMap = get(SearchHit.Fields.FIELDS, map, new HashMap<>()); + fieldMap.putAll(value); + map.put(SearchHit.DOCUMENT_FIELDS, fieldMap); + }, (p, c) -> parseFields(p), new ParseField(SearchHit.Fields.FIELDS)); + parser.declareObject( + (map, value) -> map.put(SearchHit.Fields._EXPLANATION, value), + (p, c) -> parseExplanation(p), + new ParseField(SearchHit.Fields._EXPLANATION) + ); + parser.declareObject( + (map, value) -> map.put(SearchHit.NestedIdentity._NESTED, value), + (p, ignored) -> parseNestedIdentity(p), + new ParseField(SearchHit.NestedIdentity._NESTED) + ); + parser.declareObject( + (map, value) -> map.put(SearchHit.Fields.INNER_HITS, value), + (p, c) -> parseInnerHits(p), + new ParseField(SearchHit.Fields.INNER_HITS) + ); + + parser.declareField((p, map, context) -> { + XContentParser.Token token = p.currentToken(); + Map matchedQueries = new LinkedHashMap<>(); + if (token == XContentParser.Token.START_OBJECT) { + String fieldName = null; + while ((token = p.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = p.currentName(); + } else if (token.isValue()) { + matchedQueries.put(fieldName, p.floatValue()); + } + } + } else if (token == XContentParser.Token.START_ARRAY) { + while (p.nextToken() != XContentParser.Token.END_ARRAY) { + matchedQueries.put(p.text(), Float.NaN); + } + } + map.put(SearchHit.Fields.MATCHED_QUERIES, matchedQueries); + }, new ParseField(SearchHit.Fields.MATCHED_QUERIES), ObjectParser.ValueType.OBJECT_ARRAY); + + parser.declareField( + (map, list) -> map.put(SearchHit.Fields.SORT, list), + SearchSortValues::fromXContent, + new ParseField(SearchHit.Fields.SORT), + ObjectParser.ValueType.OBJECT_ARRAY + ); + } + + private static float parseScore(XContentParser parser) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER || parser.currentToken() == XContentParser.Token.VALUE_STRING) { + return parser.floatValue(); + } else { + return Float.NaN; + } + } + + private static BytesReference parseSourceBytes(XContentParser parser) throws IOException { + try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) { + // the original document gets slightly modified: whitespaces or + // pretty printing are not preserved, + // it all depends on the current builder settings + builder.copyCurrentStructure(parser); + return BytesReference.bytes(builder); + } + } + + private static Map parseFields(XContentParser parser) throws IOException { + Map fields = new HashMap<>(); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + DocumentField field = DocumentField.fromXContent(parser); + fields.put(field.getName(), field); + } + return fields; + } + + private static Map parseInnerHits(XContentParser parser) throws IOException { + Map innerHits = new HashMap<>(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + String name = parser.currentName(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + ensureFieldName(parser, parser.nextToken(), SearchHits.Fields.HITS); + innerHits.put(name, parseSearchHits(parser)); + ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); + } + return innerHits; + } + + private static Map parseHighlightFields(XContentParser parser) throws IOException { + Map highlightFields = new HashMap<>(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + HighlightField highlightField = HighlightField.fromXContent(parser); + highlightFields.put(highlightField.name(), highlightField); + } + return highlightFields; + } + + private static Explanation parseExplanation(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + XContentParser.Token token; + Float value = null; + String description = null; + List details = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String currentFieldName = parser.currentName(); + token = parser.nextToken(); + if (SearchHit.Fields.VALUE.equals(currentFieldName)) { + value = parser.floatValue(); + } else if (SearchHit.Fields.DESCRIPTION.equals(currentFieldName)) { + description = parser.textOrNull(); + } else if (SearchHit.Fields.DETAILS.equals(currentFieldName)) { + ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + details.add(parseExplanation(parser)); + } + } else { + parser.skipChildren(); + } + } + if (value == null) { + throw new ParsingException(parser.getTokenLocation(), "missing explanation value"); + } + if (description == null) { + throw new ParsingException(parser.getTokenLocation(), "missing explanation description"); + } + return Explanation.match(value, description, details); + } + + /** + * this parsing method assumes that the leading "suggest" field name has already been parsed by the caller + */ + public static Suggest parseSuggest(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + List>> suggestions = + new ArrayList<>(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + String currentField = parser.currentName(); + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); + Suggest.Suggestion> suggestion = parseSuggestion( + parser + ); + if (suggestion != null) { + suggestions.add(suggestion); + } else { + throw new ParsingException( + parser.getTokenLocation(), + String.format(Locale.ROOT, "Could not parse suggestion keyed as [%s]", currentField) + ); + } + } + return new Suggest(suggestions); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public static Suggest.Suggestion> parseSuggestion( + XContentParser parser + ) throws IOException { + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + SetOnce suggestion = new SetOnce<>(); + XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Suggest.Suggestion.class, suggestion::set); + return suggestion.get(); + } + + private static final ConstructingObjectParser NESTED_IDENTITY_PARSER = new ConstructingObjectParser<>( + "nested_identity", + true, + ctorArgs -> new SearchHit.NestedIdentity((String) ctorArgs[0], (int) ctorArgs[1], (SearchHit.NestedIdentity) ctorArgs[2]) + ); + static { + NESTED_IDENTITY_PARSER.declareString(constructorArg(), new ParseField(SearchHit.NestedIdentity.FIELD)); + NESTED_IDENTITY_PARSER.declareInt(constructorArg(), new ParseField(SearchHit.NestedIdentity.OFFSET)); + NESTED_IDENTITY_PARSER.declareObject( + optionalConstructorArg(), + NESTED_IDENTITY_PARSER, + new ParseField(SearchHit.NestedIdentity._NESTED) + ); + } + + public static SearchHit.NestedIdentity parseNestedIdentity(XContentParser parser) { + return NESTED_IDENTITY_PARSER.apply(parser, null); + } + + public static SearchHit searchHitFromMap(Map values) { + String id = get(SearchHit.Fields._ID, values, null); + String index = get(SearchHit.Fields._INDEX, values, null); + String clusterAlias = null; + if (index != null) { + int indexOf = index.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); + if (indexOf > 0) { + clusterAlias = index.substring(0, indexOf); + index = index.substring(indexOf + 1); + } + } + ShardId shardId = get(SearchHit.Fields._SHARD, values, null); + String nodeId = get(SearchHit.Fields._NODE, values, null); + final SearchShardTarget shardTarget; + if (shardId != null && nodeId != null) { + assert shardId.getIndexName().equals(index); + shardTarget = new SearchShardTarget(nodeId, shardId, clusterAlias); + index = shardTarget.getIndex(); + clusterAlias = shardTarget.getClusterAlias(); + } else { + shardTarget = null; + } + return new SearchHit( + -1, + get(SearchHit.Fields._SCORE, values, SearchHit.DEFAULT_SCORE), + get(SearchHit.Fields._RANK, values, SearchHit.NO_RANK), + id == null ? null : new Text(id), + get(SearchHit.NestedIdentity._NESTED, values, null), + get(SearchHit.Fields._VERSION, values, -1L), + get(SearchHit.Fields._SEQ_NO, values, SequenceNumbers.UNASSIGNED_SEQ_NO), + get(SearchHit.Fields._PRIMARY_TERM, values, SequenceNumbers.UNASSIGNED_PRIMARY_TERM), + get(SourceFieldMapper.NAME, values, null), + get(SearchHit.Fields.HIGHLIGHT, values, null), + get(SearchHit.Fields.SORT, values, SearchSortValues.EMPTY), + get(SearchHit.Fields.MATCHED_QUERIES, values, null), + get(SearchHit.Fields._EXPLANATION, values, null), + shardTarget, + index, + clusterAlias, + null, + get(SearchHit.Fields.INNER_HITS, values, null), + get(SearchHit.DOCUMENT_FIELDS, values, Collections.emptyMap()), + get(SearchHit.METADATA_FIELDS, values, Collections.emptyMap()), + RefCounted.ALWAYS_REFERENCED // TODO: do we ever want pooling here? + ); + } + + @SuppressWarnings("unchecked") + private static T get(String key, Map map, T defaultValue) { + return (T) map.getOrDefault(key, defaultValue); + } + } diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/SnapshotInfoUtils.java b/test/framework/src/main/java/org/elasticsearch/snapshots/SnapshotInfoUtils.java new file mode 100644 index 0000000000000..76a31c64ebd4e --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/SnapshotInfoUtils.java @@ -0,0 +1,244 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.snapshots; + +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.snapshots.SnapshotInfo.DATA_STREAMS; +import static org.elasticsearch.snapshots.SnapshotInfo.END_TIME_IN_MILLIS; +import static org.elasticsearch.snapshots.SnapshotInfo.FAILURES; +import static org.elasticsearch.snapshots.SnapshotInfo.FEATURE_STATES; +import static org.elasticsearch.snapshots.SnapshotInfo.INCLUDE_GLOBAL_STATE; +import static org.elasticsearch.snapshots.SnapshotInfo.INDEX_DETAILS; +import static org.elasticsearch.snapshots.SnapshotInfo.INDICES; +import static org.elasticsearch.snapshots.SnapshotInfo.REASON; +import static org.elasticsearch.snapshots.SnapshotInfo.REPOSITORY; +import static org.elasticsearch.snapshots.SnapshotInfo.SHARDS; +import static org.elasticsearch.snapshots.SnapshotInfo.START_TIME_IN_MILLIS; +import static org.elasticsearch.snapshots.SnapshotInfo.STATE; +import static org.elasticsearch.snapshots.SnapshotInfo.SUCCESSFUL; +import static org.elasticsearch.snapshots.SnapshotInfo.TOTAL; +import static org.elasticsearch.snapshots.SnapshotInfo.UNKNOWN_REPO_NAME; +import static org.elasticsearch.snapshots.SnapshotInfo.USER_METADATA; +import static org.elasticsearch.snapshots.SnapshotInfo.UUID; +import static org.elasticsearch.snapshots.SnapshotInfo.VERSION_ID; +import static org.elasticsearch.threadpool.ThreadPool.Names.SNAPSHOT; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class SnapshotInfoUtils { + + private SnapshotInfoUtils() {/* no instances */} + + static final ConstructingObjectParser CREATE_SNAPSHOT_RESPONSE_PARSER = new ConstructingObjectParser<>( + CreateSnapshotResponse.class.getName(), + true, + args -> new CreateSnapshotResponse(((SnapshotInfoBuilder) args[0]).build()) + ); + + static final ObjectParser SNAPSHOT_INFO_PARSER = new ObjectParser<>( + SnapshotInfoBuilder.class.getName(), + true, + SnapshotInfoBuilder::new + ); + + static final ConstructingObjectParser SHARD_STATS_PARSER = new ConstructingObjectParser<>( + ShardStatsBuilder.class.getName(), + true, + args -> new ShardStatsBuilder((int) Objects.requireNonNullElse(args[0], 0), (int) Objects.requireNonNullElse(args[1], 0)) + ); + + static { + SHARD_STATS_PARSER.declareInt(optionalConstructorArg(), new ParseField(TOTAL)); + SHARD_STATS_PARSER.declareInt(optionalConstructorArg(), new ParseField(SUCCESSFUL)); + + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotName, new ParseField(SNAPSHOT)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotUUID, new ParseField(UUID)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setRepository, new ParseField(REPOSITORY)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setState, new ParseField(STATE)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setReason, new ParseField(REASON)); + SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setIndices, new ParseField(INDICES)); + SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setDataStreams, new ParseField(DATA_STREAMS)); + SNAPSHOT_INFO_PARSER.declareObjectArray( + SnapshotInfoBuilder::setFeatureStates, + SnapshotFeatureInfo.SNAPSHOT_FEATURE_INFO_PARSER, + new ParseField(FEATURE_STATES) + ); + SNAPSHOT_INFO_PARSER.declareObject( + SnapshotInfoBuilder::setIndexSnapshotDetails, + (p, c) -> p.map(HashMap::new, p2 -> SnapshotInfo.IndexSnapshotDetails.PARSER.parse(p2, c)), + new ParseField(INDEX_DETAILS) + ); + SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setStartTime, new ParseField(START_TIME_IN_MILLIS)); + SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setEndTime, new ParseField(END_TIME_IN_MILLIS)); + SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setShardStatsBuilder, SHARD_STATS_PARSER, new ParseField(SHARDS)); + SNAPSHOT_INFO_PARSER.declareBoolean(SnapshotInfoBuilder::setIncludeGlobalState, new ParseField(INCLUDE_GLOBAL_STATE)); + SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setUserMetadata, (p, c) -> p.map(), new ParseField(USER_METADATA)); + SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID)); + SNAPSHOT_INFO_PARSER.declareObjectArray( + SnapshotInfoBuilder::setShardFailures, + SnapshotShardFailure.SNAPSHOT_SHARD_FAILURE_PARSER, + new ParseField(FAILURES) + ); + + CREATE_SNAPSHOT_RESPONSE_PARSER.declareObject(optionalConstructorArg(), SNAPSHOT_INFO_PARSER, new ParseField("snapshot")); + } + + private record ShardStatsBuilder(int totalShards, int successfulShards) {} + + public static final class SnapshotInfoBuilder { + private String snapshotName = null; + private String snapshotUUID = null; + private String repository = UNKNOWN_REPO_NAME; + private String state = null; + private String reason = null; + private List indices = null; + private List dataStreams = null; + private List featureStates = null; + private Map indexSnapshotDetails = null; + private long startTime = 0L; + private long endTime = 0L; + private ShardStatsBuilder shardStatsBuilder = null; + private Boolean includeGlobalState = null; + private Map userMetadata = null; + private int version = -1; + private List shardFailures = null; + + private void setSnapshotName(String snapshotName) { + this.snapshotName = snapshotName; + } + + private void setSnapshotUUID(String snapshotUUID) { + this.snapshotUUID = snapshotUUID; + } + + private void setRepository(String repository) { + this.repository = repository; + } + + private void setState(String state) { + this.state = state; + } + + private void setReason(String reason) { + this.reason = reason; + } + + private void setIndices(List indices) { + this.indices = indices; + } + + private void setDataStreams(List dataStreams) { + this.dataStreams = dataStreams; + } + + private void setFeatureStates(List featureStates) { + this.featureStates = featureStates; + } + + private void setIndexSnapshotDetails(Map indexSnapshotDetails) { + this.indexSnapshotDetails = indexSnapshotDetails; + } + + private void setStartTime(long startTime) { + this.startTime = startTime; + } + + private void setEndTime(long endTime) { + this.endTime = endTime; + } + + private void setShardStatsBuilder(ShardStatsBuilder shardStatsBuilder) { + this.shardStatsBuilder = shardStatsBuilder; + } + + private void setIncludeGlobalState(Boolean includeGlobalState) { + this.includeGlobalState = includeGlobalState; + } + + private void setUserMetadata(Map userMetadata) { + this.userMetadata = userMetadata; + } + + private void setVersion(int version) { + this.version = version; + } + + private void setShardFailures(List shardFailures) { + this.shardFailures = shardFailures; + } + + public SnapshotInfo build() { + final Snapshot snapshot = new Snapshot(repository, new SnapshotId(snapshotName, snapshotUUID)); + + if (indices == null) { + indices = Collections.emptyList(); + } + + if (dataStreams == null) { + dataStreams = Collections.emptyList(); + } + + if (featureStates == null) { + featureStates = Collections.emptyList(); + } + + if (indexSnapshotDetails == null) { + indexSnapshotDetails = Collections.emptyMap(); + } + + SnapshotState snapshotState = state == null ? null : SnapshotState.valueOf(state); + IndexVersion version = this.version == -1 ? IndexVersion.current() : IndexVersion.fromId(this.version); + + int totalShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.totalShards(); + int successfulShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.successfulShards(); + + if (shardFailures == null) { + shardFailures = new ArrayList<>(); + } + + return new SnapshotInfo( + snapshot, + indices, + dataStreams, + featureStates, + reason, + version, + startTime, + endTime, + totalShards, + successfulShards, + shardFailures, + includeGlobalState, + userMetadata, + snapshotState, + indexSnapshotDetails + ); + } + } + + public static CreateSnapshotResponse createSnapshotResponseFromXContent(XContentParser parser) { + return CREATE_SNAPSHOT_RESPONSE_PARSER.apply(parser, null); + } + + public static SnapshotInfo snapshotInfoFromXContent(XContentParser parser) { + return SNAPSHOT_INFO_PARSER.apply(parser, null).build(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java index 5f720eededf02..751eed222ee7a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java @@ -10,12 +10,15 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -24,12 +27,38 @@ import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; public abstract class AbstractBroadcastResponseTestCase extends AbstractXContentTestCase { + private static final ParseField _SHARDS_FIELD = new ParseField("_shards"); + private static final ParseField TOTAL_FIELD = new ParseField("total"); + private static final ParseField SUCCESSFUL_FIELD = new ParseField("successful"); + private static final ParseField FAILED_FIELD = new ParseField("failed"); + private static final ParseField FAILURES_FIELD = new ParseField("failures"); + + @SuppressWarnings("unchecked") + public static void declareBroadcastFields(ConstructingObjectParser PARSER) { + ConstructingObjectParser shardsParser = new ConstructingObjectParser<>( + "_shards", + true, + arg -> new BaseBroadcastResponse((int) arg[0], (int) arg[1], (int) arg[2], (List) arg[3]) + ); + shardsParser.declareInt(constructorArg(), TOTAL_FIELD); + shardsParser.declareInt(constructorArg(), SUCCESSFUL_FIELD); + shardsParser.declareInt(constructorArg(), FAILED_FIELD); + shardsParser.declareObjectArray( + optionalConstructorArg(), + (p, c) -> DefaultShardOperationFailedException.fromXContent(p), + FAILURES_FIELD + ); + PARSER.declareObject(constructorArg(), shardsParser, _SHARDS_FIELD); + } + @Override protected T createTestInstance() { int totalShards = randomIntBetween(1, 10); diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index 5e63dd2edefea..10a3a8a78e483 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -240,10 +240,18 @@ public String toString() { } } + /** + * Adds the list of class loggers to this {@link MockLogAppender}. + * + * Stops ({@link #stop()}) and runs some checks on the {@link MockLogAppender} once the returned object is released. + */ public Releasable capturing(Class... classes) { return appendToLoggers(Arrays.stream(classes).map(LogManager::getLogger).toList()); } + /** + * Same as above except takes string class names of each logger. + */ public Releasable capturing(String... names) { return appendToLoggers(Arrays.stream(names).map(LogManager::getLogger).toList()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 307daddd17c37..6905ee391a6eb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -72,6 +72,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.DeprecationHandler; @@ -1312,7 +1313,7 @@ protected static BroadcastResponse refresh(String index) throws IOException { ); static { - BaseBroadcastResponse.declareBroadcastFields(BROADCAST_RESPONSE_PARSER); + AbstractBroadcastResponseTestCase.declareBroadcastFields(BROADCAST_RESPONSE_PARSER); } protected static BroadcastResponse refresh(RestClient client, String index) throws IOException { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java index de0d541c8535f..062bb3bd3fa5a 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java @@ -71,6 +71,15 @@ void validate() { if (nodeNames.isEmpty() == false) { throw new IllegalArgumentException("Cluster cannot contain nodes with duplicates names: " + nodeNames); } + + // Ensure we do not configure older version nodes with the integTest distribution + if (nodes.stream().anyMatch(n -> n.getVersion() != Version.CURRENT && n.getDistributionType() == DistributionType.INTEG_TEST)) { + throw new IllegalArgumentException( + "Error configuring test cluster '" + + name + + "'. When configuring a node for a prior Elasticsearch version, the default distribution type must be used." + ); + } } public static class LocalNodeSpec { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/FileResource.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/FileResource.java index 2d4a88c4369b5..de4df7eaaaf49 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/FileResource.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/FileResource.java @@ -14,18 +14,19 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import java.util.function.Supplier; public class FileResource implements Resource { - private final Path file; + private final Supplier file; - FileResource(Path file) { + FileResource(Supplier file) { this.file = file; } @Override public InputStream asStream() { try { - return Files.newInputStream(file, StandardOpenOption.READ); + return Files.newInputStream(file.get(), StandardOpenOption.READ); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/Resource.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/Resource.java index 829e34007044d..22dc3e7465426 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/Resource.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/Resource.java @@ -32,6 +32,10 @@ static Resource fromClasspath(String path) { } static Resource fromFile(Path file) { + return fromFile(() -> file); + } + + static Resource fromFile(Supplier file) { return new FileResource(file); } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 804f4eae4042d..5ac83f94f6248 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -278,11 +278,18 @@ public static Iterable createParameters(NamedXContentRegistry executea return createParameters(executeableSectionRegistry, null); } + /** + * Create parameters for this parameterized test. + */ + public static Iterable createParameters(String[] testPaths, Map yamlParameters) throws Exception { + return createParameters(ExecutableSection.XCONTENT_REGISTRY, testPaths, yamlParameters); + } + /** * Create parameters for this parameterized test. */ public static Iterable createParameters(String[] testPaths) throws Exception { - return createParameters(ExecutableSection.XCONTENT_REGISTRY, testPaths); + return createParameters(testPaths, Collections.emptyMap()); } /** @@ -295,6 +302,23 @@ public static Iterable createParameters(String[] testPaths) throws Exc */ public static Iterable createParameters(NamedXContentRegistry executeableSectionRegistry, String[] testPaths) throws Exception { + return createParameters(executeableSectionRegistry, testPaths, Collections.emptyMap()); + } + + /** + * Create parameters for this parameterized test. + * + * @param executeableSectionRegistry registry of executable sections + * @param testPaths list of paths to explicitly search for tests. If null then include all tests in root path. + * @param yamlParameters map or parameters used within the yaml specs to be replaced at parsing time. + * @return list of test candidates. + * @throws Exception + */ + public static Iterable createParameters( + NamedXContentRegistry executeableSectionRegistry, + String[] testPaths, + Map yamlParameters + ) throws Exception { if (testPaths != null && System.getProperty(REST_TESTS_SUITE) != null) { throw new IllegalArgumentException("The '" + REST_TESTS_SUITE + "' system property is not supported with explicit test paths."); } @@ -308,7 +332,7 @@ public static Iterable createParameters(NamedXContentRegistry executea for (String api : yamlSuites.keySet()) { List yamlFiles = new ArrayList<>(yamlSuites.get(api)); for (Path yamlFile : yamlFiles) { - ClientYamlTestSuite suite = ClientYamlTestSuite.parse(executeableSectionRegistry, api, yamlFile); + ClientYamlTestSuite suite = ClientYamlTestSuite.parse(executeableSectionRegistry, api, yamlFile, yamlParameters); suites.add(suite); try { suite.validate(); diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ParameterizableYamlXContentParser.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ParameterizableYamlXContentParser.java new file mode 100644 index 0000000000000..43ea4f9d665d0 --- /dev/null +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ParameterizableYamlXContentParser.java @@ -0,0 +1,295 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.rest.yaml; + +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.xcontent.DeprecationHandler; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentLocation; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.nio.CharBuffer; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +/** + * a wrapper around YamlXContentParser that allows for parameter replacement in the yaml file + */ +public class ParameterizableYamlXContentParser implements XContentParser { + private final XContentParser delegate; + private final Map params; + + public ParameterizableYamlXContentParser(XContentParser delegate, Map params) { + this.delegate = delegate; + this.params = params.entrySet().stream().collect(Collectors.toMap(e -> "@" + e.getKey() + "@", Map.Entry::getValue)); + } + + @Override + public XContentType contentType() { + return delegate.contentType(); + } + + @Override + public void allowDuplicateKeys(boolean allowDuplicateKeys) { + delegate.allowDuplicateKeys(allowDuplicateKeys); + } + + @Override + public Token nextToken() throws IOException { + return delegate.nextToken(); + } + + @Override + @Nullable + public String nextFieldName() throws IOException { + return delegate.nextFieldName(); + } + + @Override + public void skipChildren() throws IOException { + delegate.skipChildren(); + } + + @Override + public Token currentToken() { + return delegate.currentToken(); + } + + @Override + public String currentName() throws IOException { + return delegate.currentName(); + } + + @Override + public Map map() throws IOException { + return delegate.map(); + } + + @Override + public Map mapOrdered() throws IOException { + return visitMapForParameterReplacements(delegate.mapOrdered()); + } + + private Map visitMapForParameterReplacements(Map stringObjectMap) { + var updatedMap = stringObjectMap.entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> maybeReplaceParams(e.getValue()))); + return updatedMap; + } + + @SuppressWarnings("unchecked") + private Object maybeReplaceParams(Object inputValue) { + if (inputValue == null) { + return null; + } + if (inputValue instanceof Map) { + return visitMapForParameterReplacements((Map) inputValue); + } + if (inputValue instanceof String) { + if (((String) inputValue).matches(".*@.*@.*")) { + String value = (String) inputValue; + for (String s : params.keySet()) { + if (value.contains(s)) { + value = value.replace(s, params.get(s).toString()); + } + } + return value; + } + } + return inputValue; + } + + @Override + public Map mapStrings() throws IOException { + return delegate.mapStrings(); + } + + @Override + public Map map(Supplier> mapFactory, CheckedFunction mapValueParser) + throws IOException { + return delegate.map(mapFactory, mapValueParser); + } + + @Override + public List list() throws IOException { + return delegate.list(); + } + + @Override + public List listOrderedMap() throws IOException { + return delegate.listOrderedMap(); + } + + @Override + public String text() throws IOException { + return delegate.text(); + } + + @Override + public String textOrNull() throws IOException { + return delegate.textOrNull(); + } + + @Override + public CharBuffer charBufferOrNull() throws IOException { + return delegate.charBufferOrNull(); + } + + @Override + public CharBuffer charBuffer() throws IOException { + return delegate.charBuffer(); + } + + @Override + public Object objectText() throws IOException { + return delegate.objectText(); + } + + @Override + public Object objectBytes() throws IOException { + return delegate.objectBytes(); + } + + @Override + public boolean hasTextCharacters() { + return delegate.hasTextCharacters(); + } + + @Override + public char[] textCharacters() throws IOException { + return delegate.textCharacters(); + } + + @Override + public int textLength() throws IOException { + return delegate.textLength(); + } + + @Override + public int textOffset() throws IOException { + return delegate.textOffset(); + } + + @Override + public Number numberValue() throws IOException { + return delegate.numberValue(); + } + + @Override + public NumberType numberType() throws IOException { + return delegate.numberType(); + } + + @Override + public short shortValue(boolean coerce) throws IOException { + return delegate.shortValue(coerce); + } + + @Override + public int intValue(boolean coerce) throws IOException { + return delegate.intValue(coerce); + } + + @Override + public long longValue(boolean coerce) throws IOException { + return delegate.longValue(coerce); + } + + @Override + public float floatValue(boolean coerce) throws IOException { + return delegate.floatValue(coerce); + } + + @Override + public double doubleValue(boolean coerce) throws IOException { + return delegate.doubleValue(coerce); + } + + @Override + public short shortValue() throws IOException { + return delegate.shortValue(); + } + + @Override + public int intValue() throws IOException { + return delegate.intValue(); + } + + @Override + public long longValue() throws IOException { + return delegate.longValue(); + } + + @Override + public float floatValue() throws IOException { + return delegate.floatValue(); + } + + @Override + public double doubleValue() throws IOException { + return delegate.doubleValue(); + } + + @Override + public boolean isBooleanValue() throws IOException { + return delegate.isBooleanValue(); + } + + @Override + public boolean booleanValue() throws IOException { + return delegate.booleanValue(); + } + + @Override + public byte[] binaryValue() throws IOException { + return delegate.binaryValue(); + } + + @Override + public XContentLocation getTokenLocation() { + return delegate.getTokenLocation(); + } + + @Override + public T namedObject(Class categoryClass, String name, Object context) throws IOException { + return getXContentRegistry().parseNamedObject(categoryClass, name, this, context); + } + + @Override + public NamedXContentRegistry getXContentRegistry() { + return delegate.getXContentRegistry(); + } + + @Override + public boolean isClosed() { + return delegate.isClosed(); + } + + @Override + public RestApiVersion getRestApiVersion() { + return delegate.getRestApiVersion(); + } + + @Override + public DeprecationHandler getDeprecationHandler() { + return delegate.getDeprecationHandler(); + } + + @Override + public void close() throws IOException { + delegate.close(); + } + +} diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java index fb9918e1f85f1..2ce35888c3f14 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java @@ -19,6 +19,14 @@ * to support BwC. Rather than leaving them in the main src we group them here, so it's clear they are not used in production code anymore. */ public class YamlTestLegacyFeatures implements FeatureSpecification { + private static final NodeFeature BULK_AUTO_ID = new NodeFeature("bulk_auto_id"); + private static final NodeFeature BULK_REQUIRE_ALIAS = new NodeFeature("bulk_require_alias"); + private static final NodeFeature BULK_DYNAMIC_TEMPLATE_OP_TYPE = new NodeFeature("bulk_dynamic_template_op_type"); + private static final NodeFeature BULK_DYNAMIC_TEMPLATE_DOCUMENT_PARSE_EXCEPTION = new NodeFeature( + "bulk_dynamic_template_document_parse_exception" + ); + + private static final NodeFeature BULK_PIPELINE_VALIDATE = new NodeFeature("bulk_pipeline_validate"); private static final NodeFeature CAT_ALIASES_SHOW_WRITE_INDEX = new NodeFeature("cat_aliases_show_write_index"); private static final NodeFeature CAT_ALIASES_HIDDEN = new NodeFeature("cat_aliases_hidden"); @@ -27,7 +35,6 @@ public class YamlTestLegacyFeatures implements FeatureSpecification { private static final NodeFeature CAT_ALLOCATION_NODE_ROLE = new NodeFeature("cat_allocation_node_role"); private static final NodeFeature CAT_INDICES_DATASET_SIZE = new NodeFeature("cat_indices_dataset_size"); - private static final NodeFeature CAT_INDICES_REPLICATE_CLOSED = new NodeFeature("cat_indices_replicate_closed"); private static final NodeFeature CAT_INDICES_VALIDATE_HEALTH_PARAM = new NodeFeature("cat_indices_validate_health_param"); private static final NodeFeature CAT_PLUGINS_NEW_FORMAT = new NodeFeature("cat_plugins_new_format"); @@ -39,19 +46,62 @@ public class YamlTestLegacyFeatures implements FeatureSpecification { private static final NodeFeature CAT_TASKS_X_OPAQUE_ID = new NodeFeature("cat_tasks_x_opaque_id"); - private static final NodeFeature CAT_TEMPLATES_V2 = new NodeFeature("cat_templates_v2"); private static final NodeFeature CAT_TEMPLATE_NAME_VALIDATION = new NodeFeature("cat_template_name_validation"); + private static final NodeFeature CLUSTER_TEMPLATES_DELETE_MULTIPLE = new NodeFeature("cluster_templates_delete_multiple"); + + private static final NodeFeature CLUSTER_ALLOCATION_ROLE = new NodeFeature("cluster_allocation_role"); + + private static final NodeFeature CLUSTER_DESIRED_BALANCE = new NodeFeature("cluster_desired_balance"); + private static final NodeFeature CLUSTER_DESIRED_BALANCE_STATS = new NodeFeature("cluster_desired_balance_stats"); + private static final NodeFeature CLUSTER_DESIRED_BALANCE_EXTENDED = new NodeFeature("cluster_desired_balance_extended"); + private static final NodeFeature CLUSTER_DESIRED_BALANCE_STATS_UNDESIRED_COUNT = new NodeFeature( + "cluster_desired_balance_stats_undesired_count" + ); + + private static final NodeFeature CLUSTER_DESIRED_NODES_OLD = new NodeFeature("cluster_desired_nodes_old"); + private static final NodeFeature CLUSTER_DESIRED_NODES_DRY_RUN = new NodeFeature("cluster_desired_nodes_dry_run"); + private static final NodeFeature CLUSTER_DESIRED_NODES_NO_SETTINGS_VALIDATION = new NodeFeature( + "cluster_desired_nodes_no_settings_validation" + ); + private static final NodeFeature CLUSTER_DESIRED_NODES = new NodeFeature("cluster_desired_nodes"); + + private static final NodeFeature CLUSTER_HEALTH_INDICES_OPTIONS = new NodeFeature("cluster_health_indices_options"); + + private static final NodeFeature CLUSTER_INFO = new NodeFeature("cluster_info"); + private static final NodeFeature CLUSTER_INFO_EXTENDED = new NodeFeature("cluster_info_extended"); + + private static final NodeFeature CLUSTER_PREVALIDATE_NODE_REMOVAL = new NodeFeature("cluster_prevalidate_node_removal"); + private static final NodeFeature CLUSTER_PREVALIDATE_NODE_REMOVAL_REASON = new NodeFeature("cluster_prevalidate_node_removal_reason"); + + private static final NodeFeature CLUSTER_STATS_PACKAGING_TYPES = new NodeFeature("cluster_stats_packaging_types"); + private static final NodeFeature CLUSTER_STATS_RUNTIME_FIELDS = new NodeFeature("cluster_stats_runtime_fields"); + private static final NodeFeature CLUSTER_STATS_INDEXING_PRESSURE = new NodeFeature("cluster_stats_indexing_pressure"); + private static final NodeFeature CLUSTER_STATS_MAPPING_SIZES = new NodeFeature("cluster_stats_mapping_sizes"); + private static final NodeFeature CLUSTER_STATS_SNAPSHOTS = new NodeFeature("cluster_stats_snapshots"); + private static final NodeFeature CLUSTER_STATS_DENSE_VECTORS = new NodeFeature("cluster_stats_dense_vectors"); + + private static final NodeFeature DATASTREAM_LIFECYCLE = new NodeFeature("datastream_lifecycle"); + + private static final NodeFeature TEMPLATES_V2 = new NodeFeature("templates_v2"); + + private static final NodeFeature INDICES_REPLICATE_CLOSED = new NodeFeature("indices_replicate_closed"); + @Override public Map getHistoricalFeatures() { return Map.ofEntries( + Map.entry(BULK_AUTO_ID, Version.V_7_5_0), + Map.entry(BULK_REQUIRE_ALIAS, Version.V_7_10_0), + Map.entry(BULK_PIPELINE_VALIDATE, Version.V_7_9_1), + Map.entry(BULK_DYNAMIC_TEMPLATE_OP_TYPE, Version.V_8_6_1), + Map.entry(BULK_DYNAMIC_TEMPLATE_DOCUMENT_PARSE_EXCEPTION, Version.V_8_8_0), + Map.entry(CAT_ALIASES_SHOW_WRITE_INDEX, Version.V_7_4_0), Map.entry(CAT_ALIASES_HIDDEN, Version.V_7_7_0), Map.entry(CAT_ALIASES_LOCAL_DEPRECATED, Version.V_8_12_0), Map.entry(CAT_ALLOCATION_NODE_ROLE, Version.V_8_10_0), - Map.entry(CAT_INDICES_REPLICATE_CLOSED, Version.V_7_2_0), Map.entry(CAT_INDICES_VALIDATE_HEALTH_PARAM, Version.V_7_8_0), Map.entry(CAT_INDICES_DATASET_SIZE, Version.V_8_11_0), @@ -64,8 +114,40 @@ public Map getHistoricalFeatures() { Map.entry(CAT_TASKS_X_OPAQUE_ID, Version.V_7_10_0), - Map.entry(CAT_TEMPLATES_V2, Version.V_7_8_0), - Map.entry(CAT_TEMPLATE_NAME_VALIDATION, Version.V_7_16_0) + Map.entry(CAT_TEMPLATE_NAME_VALIDATION, Version.V_7_16_0), + + Map.entry(CLUSTER_TEMPLATES_DELETE_MULTIPLE, Version.V_8_0_0), + Map.entry(CLUSTER_ALLOCATION_ROLE, Version.V_8_11_0), + + Map.entry(CLUSTER_DESIRED_BALANCE, Version.V_8_6_0), + Map.entry(CLUSTER_DESIRED_BALANCE_STATS, Version.V_8_7_0), + Map.entry(CLUSTER_DESIRED_BALANCE_EXTENDED, Version.V_8_8_0), + Map.entry(CLUSTER_DESIRED_BALANCE_STATS_UNDESIRED_COUNT, Version.V_8_12_0), + + Map.entry(CLUSTER_DESIRED_NODES_OLD, Version.V_8_3_0), + Map.entry(CLUSTER_DESIRED_NODES_DRY_RUN, Version.V_8_4_0), + Map.entry(CLUSTER_DESIRED_NODES_NO_SETTINGS_VALIDATION, Version.V_8_10_0), + + Map.entry(CLUSTER_HEALTH_INDICES_OPTIONS, Version.V_7_2_0), + + Map.entry(CLUSTER_INFO, Version.V_8_8_0), + Map.entry(CLUSTER_INFO_EXTENDED, Version.V_8_9_0), + + Map.entry(CLUSTER_PREVALIDATE_NODE_REMOVAL, Version.V_8_6_0), + Map.entry(CLUSTER_PREVALIDATE_NODE_REMOVAL_REASON, Version.V_8_7_0), + + Map.entry(CLUSTER_STATS_PACKAGING_TYPES, Version.V_7_2_0), + Map.entry(CLUSTER_STATS_RUNTIME_FIELDS, Version.V_7_13_0), + Map.entry(CLUSTER_STATS_INDEXING_PRESSURE, Version.V_8_1_0), + Map.entry(CLUSTER_STATS_MAPPING_SIZES, Version.V_8_4_0), + Map.entry(CLUSTER_STATS_SNAPSHOTS, Version.V_8_8_0), + Map.entry(CLUSTER_STATS_DENSE_VECTORS, Version.V_8_10_0), + + Map.entry(DATASTREAM_LIFECYCLE, Version.V_8_11_0), + + Map.entry(INDICES_REPLICATE_CLOSED, Version.V_7_2_0), + + Map.entry(TEMPLATES_V2, Version.V_7_8_0) ); } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java index e5f46ff135171..466b64736ddbc 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java @@ -10,6 +10,7 @@ import org.elasticsearch.client.NodeSelector; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.Channels; +import org.elasticsearch.test.rest.yaml.ParameterizableYamlXContentParser; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; @@ -26,6 +27,7 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; @@ -38,7 +40,8 @@ * Supports a setup section and multiple test sections. */ public class ClientYamlTestSuite { - public static ClientYamlTestSuite parse(NamedXContentRegistry executeableSectionRegistry, String api, Path file) throws IOException { + public static ClientYamlTestSuite parse(NamedXContentRegistry executeableSectionRegistry, String api, Path file, Map params) + throws IOException { if (Files.isRegularFile(file) == false) { throw new IllegalArgumentException(file.toAbsolutePath() + " is not a file"); } @@ -63,10 +66,18 @@ public static ClientYamlTestSuite parse(NamedXContentRegistry executeableSection } try ( - XContentParser parser = YamlXContent.yamlXContent.createParser( - XContentParserConfiguration.EMPTY.withRegistry(executeableSectionRegistry), - Files.newInputStream(file) - ) + XContentParser parser = params.isEmpty() + ? YamlXContent.yamlXContent.createParser( + XContentParserConfiguration.EMPTY.withRegistry(executeableSectionRegistry), + Files.newInputStream(file) + ) + : new ParameterizableYamlXContentParser( + YamlXContent.yamlXContent.createParser( + XContentParserConfiguration.EMPTY.withRegistry(executeableSectionRegistry), + Files.newInputStream(file) + ), + params + ) ) { return parse(api, filename, Optional.of(file), parser); } catch (Exception e) { @@ -103,6 +114,10 @@ public static ClientYamlTestSuite parse(String api, String suiteName, Optional

(testSections)); } + public static ClientYamlTestSuite parse(NamedXContentRegistry xcontentRegistry, String api, Path filePath) throws IOException { + return parse(xcontentRegistry, api, filePath, Collections.emptyMap()); + } + private final String api; private final String name; private final Optional file; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 39d7808571870..3e321d57d877c 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -19,7 +19,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalOrder; -import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.terms.AbstractInternalTerms; import org.elasticsearch.xcontent.XContentBuilder; @@ -37,7 +36,7 @@ public class InternalMultiTerms extends AbstractInternalTerms { + public static class Bucket extends AbstractInternalTerms.AbstractTermsBucket { long bucketOrd; diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java index 76d8130f954de..be020f74eafff 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; @@ -358,18 +359,20 @@ public void testReduceWithDoublePromotion() { keyConverters2, null ); - AggregationReduceContext context = new AggregationReduceContext.ForPartial( + AggregationReduceContext context = new AggregationReduceContext.ForFinal( bigArrays, mockScriptService, () -> false, - mock(AggregationBuilder.class) + mock(AggregationBuilder.class), + i -> {}, + PipelineAggregator.PipelineTree.EMPTY ); InternalMultiTerms result = (InternalMultiTerms) InternalAggregationTestCase.reduce(List.of(terms1, terms2), context); assertThat(result.buckets, hasSize(3)); - assertThat(result.buckets.get(0).getKeyAsString(), equalTo("4|9.223372036854776E18|4.0")); + assertThat(result.buckets.get(0).getKeyAsString(), equalTo("4|9.223372036854776E18|1.0")); assertThat(result.buckets.get(0).getDocCount(), equalTo(3L)); - assertThat(result.buckets.get(1).getKeyAsString(), equalTo("4|9.223372036854776E18|1.0")); + assertThat(result.buckets.get(1).getKeyAsString(), equalTo("4|9.223372036854776E18|4.0")); assertThat(result.buckets.get(1).getDocCount(), equalTo(3L)); assertThat(result.buckets.get(2).getKeyAsString(), equalTo("3|9.223372036854776E18|3.0")); assertThat(result.buckets.get(2).getDocCount(), equalTo(2L)); diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 934aeef26843f..0d51a4271e85b 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -561,12 +561,8 @@ public void maybeFetchRegion( listener.onResponse(false); return; } - long regionLength = regionSize; try { - if (region == getEndingRegion(blobLength)) { - regionLength = blobLength - getRegionStart(region); - } - ByteRange regionRange = ByteRange.of(0, regionLength); + ByteRange regionRange = ByteRange.of(0, computeCacheFileRegionSize(blobLength, region)); if (regionRange.isEmpty()) { listener.onResponse(false); return; diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index eae3031512d4f..72e63b3255999 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -109,6 +109,8 @@ tasks.named("yamlRestTestV7CompatTransform").configure { task -> ) task.skipTest("ml/jobs_crud/Test update job", "Behaviour change #89824 - added limit filter to categorization analyzer") task.skipTest("ml/jobs_crud/Test create job with delimited format", "removing undocumented functionality") + task.skipTest("ml/jobs_crud/Test cannot create job with model snapshot id set", "Exception type has changed.") + task.skipTest("ml/validate/Test job config is invalid because model snapshot id set", "Exception type has changed.") task.skipTest("ml/datafeeds_crud/Test update datafeed to point to missing job", "behaviour change #44752 - not allowing to update datafeed job_id") task.skipTest( "ml/datafeeds_crud/Test update datafeed to point to different job", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java index c77d2613cd321..a0a2bbc3bed19 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java @@ -16,6 +16,8 @@ public abstract class EsqlQueryRequestBuilder extends ActionRequestBuilder { + private final ActionType action; + /** Creates a new ES|QL query request builder. */ public static EsqlQueryRequestBuilder newRequestBuilder( ElasticsearchClient client @@ -26,6 +28,11 @@ public abstract class EsqlQueryRequestBuilder action, Request request) { super(client, action, request); + this.action = action; + } + + public final ActionType action() { + return action; } public abstract EsqlQueryRequestBuilder query(String query); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java index 749304caf6e20..97c7d6d8cb60d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java @@ -12,18 +12,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.function.Function; -import java.util.stream.Collectors; /** * The response object returned by the Explain Lifecycle API. @@ -37,26 +32,6 @@ public class ExplainLifecycleResponse extends ActionResponse implements ToXConte private Map indexResponses; - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "explain_lifecycle_response", - a -> new ExplainLifecycleResponse( - ((List) a[0]).stream() - .collect(Collectors.toMap(IndexLifecycleExplainResponse::getIndex, Function.identity())) - ) - ); - static { - PARSER.declareNamedObjects( - ConstructingObjectParser.constructorArg(), - (p, c, n) -> IndexLifecycleExplainResponse.PARSER.apply(p, c), - INDICES_FIELD - ); - } - - public static ExplainLifecycleResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - public ExplainLifecycleResponse(StreamInput in) throws IOException { super(in); int size = in.readVInt(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java new file mode 100644 index 0000000000000..50ca46d85190f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Writes a chat completion result in the following json format: + * { + * "completion": [ + * { + * "result": "some result 1" + * }, + * { + * "result": "some result 2" + * } + * ] + * } + * + */ +public record ChatCompletionResults(List results) implements InferenceServiceResults { + + public static final String NAME = "chat_completion_service_results"; + public static final String COMPLETION = TaskType.COMPLETION.name().toLowerCase(Locale.ROOT); + + public ChatCompletionResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(Result::new)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray(COMPLETION); + for (Result result : results) { + result.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(results); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public List transformToCoordinationFormat() { + throw new UnsupportedOperationException(); + } + + @Override + public List transformToLegacyFormat() { + throw new UnsupportedOperationException(); + } + + public List getResults() { + return results; + } + + @Override + public Map asMap() { + Map map = new LinkedHashMap<>(); + map.put(COMPLETION, results.stream().map(Result::asMap).collect(Collectors.toList())); + + return map; + } + + public record Result(String content) implements Writeable, ToXContentObject { + + public static final String RESULT = "result"; + + public Result(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(content); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(RESULT, content); + builder.endObject(); + + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + public Map asMap() { + return Map.of(RESULT, content); + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java new file mode 100644 index 0000000000000..eef864f2e8992 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class ErrorChunkedInferenceResults implements ChunkedInferenceServiceResults { + + public static final String NAME = "error_chunked"; + + private final Exception exception; + + public ErrorChunkedInferenceResults(Exception exception) { + this.exception = Objects.requireNonNull(exception); + } + + public ErrorChunkedInferenceResults(StreamInput in) throws IOException { + this.exception = in.readException(); + } + + public Exception getException() { + return exception; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeException(exception); + } + + @Override + public boolean equals(Object object) { + if (object == this) { + return true; + } + if (object == null || getClass() != object.getClass()) { + return false; + } + ErrorChunkedInferenceResults that = (ErrorChunkedInferenceResults) object; + // Just compare the message for serialization test purposes + return Objects.equals(exception.getMessage(), that.exception.getMessage()); + } + + @Override + public int hashCode() { + // Just compare the message for serialization test purposes + return Objects.hash(exception.getMessage()); + } + + @Override + public List transformToCoordinationFormat() { + return null; + } + + @Override + public List transformToLegacyFormat() { + return null; + } + + @Override + public Map asMap() { + Map asMap = new LinkedHashMap<>(); + asMap.put(NAME, exception.getMessage()); + return asMap; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(NAME, exception.getMessage()); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java index 8d4e9d25b94a3..d03a6d5c0c7c5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java @@ -60,7 +60,7 @@ public static class Request extends ActionRequest implements ToXContentObject { private static final ObjectParser PARSER = new ObjectParser<>("preview_datafeed_action", Request.Builder::new); static { PARSER.declareObject(Builder::setDatafeedBuilder, DatafeedConfig.STRICT_PARSER, DATAFEED_CONFIG); - PARSER.declareObject(Builder::setJobBuilder, Job.STRICT_PARSER, JOB_CONFIG); + PARSER.declareObject(Builder::setJobBuilder, Job.REST_REQUEST_PARSER, JOB_CONFIG); PARSER.declareString(Builder::setStart, START_TIME); PARSER.declareString(Builder::setEnd, END_TIME); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java index 400bdaa3a27ea..efb4dacd83ba4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.ml.job.messages.Messages; import java.io.IOException; -import java.util.List; import java.util.Objects; public class PutJobAction extends ActionType { @@ -35,7 +34,7 @@ private PutJobAction() { public static class Request extends AcknowledgedRequest { public static Request parseRequest(String jobId, XContentParser parser, IndicesOptions indicesOptions) { - Job.Builder jobBuilder = Job.STRICT_PARSER.apply(parser, null); + Job.Builder jobBuilder = Job.REST_REQUEST_PARSER.apply(parser, null); if (jobBuilder.getId() == null) { jobBuilder.setId(jobId); } else if (Strings.isNullOrEmpty(jobId) == false && jobId.equals(jobBuilder.getId()) == false) { @@ -58,14 +57,6 @@ public Request(Job.Builder jobBuilder) { // would occur when parsing an old job config that already had duplicate detectors. jobBuilder.validateDetectorsAreUnique(); - // Some fields cannot be set at create time - List invalidJobCreationSettings = jobBuilder.invalidCreateTimeSettings(); - if (invalidJobCreationSettings.isEmpty() == false) { - throw new IllegalArgumentException( - Messages.getMessage(Messages.JOB_CONFIG_INVALID_CREATE_SETTINGS, String.join(",", invalidJobCreationSettings)) - ); - } - this.jobBuilder = jobBuilder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java index 48549ae100e36..76cba60667c32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java @@ -14,11 +14,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.messages.Messages; import java.io.IOException; import java.util.Date; -import java.util.List; import java.util.Objects; public class ValidateJobConfigAction extends ActionType { @@ -32,10 +30,10 @@ protected ValidateJobConfigAction() { public static class Request extends ActionRequest { - private Job job; + private final Job job; public static Request parseRequest(XContentParser parser) { - Job.Builder jobBuilder = Job.STRICT_PARSER.apply(parser, null); + Job.Builder jobBuilder = Job.REST_REQUEST_PARSER.apply(parser, null); // When jobs are PUT their ID must be supplied in the URL - assume this will // be valid unless an invalid job ID is specified in the JSON to be validated jobBuilder.setId(jobBuilder.getId() != null ? jobBuilder.getId() : "ok"); @@ -45,14 +43,6 @@ public static Request parseRequest(XContentParser parser) { // would occur when parsing an old job config that already had duplicate detectors. jobBuilder.validateDetectorsAreUnique(); - // Some fields cannot be set at create time - List invalidJobCreationSettings = jobBuilder.invalidCreateTimeSettings(); - if (invalidJobCreationSettings.isEmpty() == false) { - throw new IllegalArgumentException( - Messages.getMessage(Messages.JOB_CONFIG_INVALID_CREATE_SETTINGS, String.join(",", invalidJobCreationSettings)) - ); - } - return new Request(jobBuilder.build(new Date())); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index fbb1a137bdc13..8da0209e10293 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -98,8 +98,9 @@ public class Job implements SimpleDiffable, Writeable, ToXContentObject { public static final ParseField RESULTS_FIELD = new ParseField("jobs"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ObjectParser LENIENT_PARSER = createParser(true); - public static final ObjectParser STRICT_PARSER = createParser(false); + public static final ObjectParser LENIENT_PARSER = createParser(true, true); + // Use the REST request parser to parse a job passed to the API, to disallow setting internal fields. + public static final ObjectParser REST_REQUEST_PARSER = createParser(false, false); public static final TimeValue MIN_BACKGROUND_PERSIST_INTERVAL = TimeValue.timeValueHours(1); @@ -114,26 +115,12 @@ public class Job implements SimpleDiffable, Writeable, ToXContentObject { public static final long DEFAULT_MODEL_SNAPSHOT_RETENTION_DAYS = 10; public static final long DEFAULT_DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS = 1; - private static ObjectParser createParser(boolean ignoreUnknownFields) { + private static ObjectParser createParser(boolean allowInternalFields, boolean ignoreUnknownFields) { ObjectParser parser = new ObjectParser<>("job_details", ignoreUnknownFields, Builder::new); parser.declareString(Builder::setId, ID); - parser.declareString(Builder::setJobType, JOB_TYPE); - parser.declareString(Builder::setJobVersion, JOB_VERSION); parser.declareStringArray(Builder::setGroups, GROUPS); parser.declareStringOrNull(Builder::setDescription, DESCRIPTION); - parser.declareField( - Builder::setCreateTime, - p -> TimeUtils.parseTimeField(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ValueType.VALUE - ); - parser.declareField( - Builder::setFinishedTime, - p -> TimeUtils.parseTimeField(p, FINISHED_TIME.getPreferredName()), - FINISHED_TIME, - ValueType.VALUE - ); parser.declareObject( Builder::setAnalysisConfig, ignoreUnknownFields ? AnalysisConfig.LENIENT_PARSER : AnalysisConfig.STRICT_PARSER, @@ -165,17 +152,35 @@ private static ObjectParser createParser(boolean ignoreUnknownFie parser.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS); parser.declareLong(Builder::setDailyModelSnapshotRetentionAfterDays, DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS); parser.declareField(Builder::setCustomSettings, (p, c) -> p.mapOrdered(), CUSTOM_SETTINGS, ValueType.OBJECT); - parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); - parser.declareStringOrNull(Builder::setModelSnapshotMinVersion, MODEL_SNAPSHOT_MIN_VERSION); parser.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); - parser.declareBoolean(Builder::setDeleting, DELETING); parser.declareBoolean(Builder::setAllowLazyOpen, ALLOW_LAZY_OPEN); - parser.declareObject(Builder::setBlocked, ignoreUnknownFields ? Blocked.LENIENT_PARSER : Blocked.STRICT_PARSER, BLOCKED); parser.declareObject( Builder::setDatafeed, ignoreUnknownFields ? DatafeedConfig.LENIENT_PARSER : DatafeedConfig.STRICT_PARSER, DATAFEED_CONFIG ); + + if (allowInternalFields) { + parser.declareString(Builder::setJobType, JOB_TYPE); + parser.declareString(Builder::setJobVersion, JOB_VERSION); + parser.declareField( + Builder::setCreateTime, + p -> TimeUtils.parseTimeField(p, CREATE_TIME.getPreferredName()), + CREATE_TIME, + ValueType.VALUE + ); + parser.declareField( + Builder::setFinishedTime, + p -> TimeUtils.parseTimeField(p, FINISHED_TIME.getPreferredName()), + FINISHED_TIME, + ValueType.VALUE + ); + parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); + parser.declareStringOrNull(Builder::setModelSnapshotMinVersion, MODEL_SNAPSHOT_MIN_VERSION); + parser.declareBoolean(Builder::setDeleting, DELETING); + parser.declareObject(Builder::setBlocked, ignoreUnknownFields ? Blocked.LENIENT_PARSER : Blocked.STRICT_PARSER, BLOCKED); + } + return parser; } @@ -1020,26 +1025,6 @@ public Builder setDatafeedIndicesOptionsIfRequired(IndicesOptions indicesOptions return this; } - /** - * Return the list of fields that have been set and are invalid to - * be set when the job is created e.g. model snapshot Id should not - * be set at job creation. - * @return List of fields set fields that should not be. - */ - public List invalidCreateTimeSettings() { - List invalidCreateValues = new ArrayList<>(); - if (modelSnapshotId != null) { - invalidCreateValues.add(MODEL_SNAPSHOT_ID.getPreferredName()); - } - if (finishedTime != null) { - invalidCreateValues.add(FINISHED_TIME.getPreferredName()); - } - if (createTime != null) { - invalidCreateValues.add(CREATE_TIME.getPreferredName()); - } - return invalidCreateValues; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(id); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index ad7a6b998fafd..52c97ece1b017 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -222,8 +222,6 @@ public final class Messages { public static final String JOB_CONFIG_FUNCTION_REQUIRES_OVERFIELD = "over_field_name must be set when the ''{0}'' function is used"; public static final String JOB_CONFIG_ID_ALREADY_TAKEN = "The job cannot be created with the Id ''{0}''. The Id is already used."; public static final String JOB_CONFIG_ID_TOO_LONG = "The job id cannot contain more than {0,number,integer} characters."; - public static final String JOB_CONFIG_INVALID_CREATE_SETTINGS = - "The job is configured with fields [{0}] that are illegal to set at job creation"; public static final String JOB_CONFIG_INVALID_FIELDNAME_CHARS = "Invalid field name ''{0}''. Field names including over, by and partition " + "fields cannot contain any of these characters: {1}"; public static final String JOB_CONFIG_INVALID_FIELDNAME = diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyResponse.java index 6e484d5b04426..3cdee1faa3f11 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyResponse.java @@ -29,25 +29,25 @@ */ public final class GetApiKeyResponse extends ActionResponse implements ToXContentObject { - private final ApiKey[] foundApiKeysInfo; + public static final GetApiKeyResponse EMPTY = new GetApiKeyResponse(List.of()); - public GetApiKeyResponse(Collection foundApiKeysInfo) { - Objects.requireNonNull(foundApiKeysInfo, "found_api_keys_info must be provided"); - this.foundApiKeysInfo = foundApiKeysInfo.toArray(new ApiKey[0]); - } + private final List foundApiKeyInfoList; - public static GetApiKeyResponse emptyResponse() { - return new GetApiKeyResponse(List.of()); + public GetApiKeyResponse(Collection foundApiKeyInfos) { + Objects.requireNonNull(foundApiKeyInfos, "found_api_keys_info must be provided"); + this.foundApiKeyInfoList = foundApiKeyInfos.stream().map(Item::new).toList(); } - public ApiKey[] getApiKeyInfos() { - return foundApiKeysInfo; + public List getApiKeyInfoList() { + return foundApiKeyInfoList; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject().array("api_keys", (Object[]) foundApiKeysInfo); - return builder.endObject(); + builder.startObject(); + builder.field("api_keys", foundApiKeyInfoList); + builder.endObject(); + return builder; } @Override @@ -55,21 +55,49 @@ public void writeTo(StreamOutput out) throws IOException { TransportAction.localOnly(); } - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("get_api_key_response", args -> { - return (args[0] == null) ? GetApiKeyResponse.emptyResponse() : new GetApiKeyResponse((List) args[0]); - }); - static { - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ApiKey.fromXContent(p), new ParseField("api_keys")); + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetApiKeyResponse that = (GetApiKeyResponse) o; + return Objects.equals(foundApiKeyInfoList, that.foundApiKeyInfoList); } - public static GetApiKeyResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); + @Override + public int hashCode() { + return Objects.hash(foundApiKeyInfoList); } @Override public String toString() { - return "GetApiKeyResponse [foundApiKeysInfo=" + foundApiKeysInfo + "]"; + return "GetApiKeyResponse{foundApiKeysInfo=" + foundApiKeyInfoList + "}"; } + public record Item(ApiKey apiKeyInfo) implements ToXContentObject { + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + apiKeyInfo.innerToXContent(builder, params); + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return "Item{apiKeyInfo=" + apiKeyInfo + "}"; + } + } + + @SuppressWarnings("unchecked") + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_api_key_response", + args -> (args[0] == null) ? GetApiKeyResponse.EMPTY : new GetApiKeyResponse((List) args[0]) + ); + static { + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ApiKey.fromXContent(p), new ParseField("api_keys")); + } + + public static GetApiKeyResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java index 9e510d48654b2..11b9163026322 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -27,31 +28,25 @@ */ public final class QueryApiKeyResponse extends ActionResponse implements ToXContentObject { + public static final QueryApiKeyResponse EMPTY = new QueryApiKeyResponse(0, List.of(), null); + private final long total; - private final Item[] items; + private final List foundApiKeyInfoList; private final @Nullable InternalAggregations aggregations; public QueryApiKeyResponse(long total, Collection items, @Nullable InternalAggregations aggregations) { this.total = total; Objects.requireNonNull(items, "items must be provided"); - this.items = items.toArray(new Item[0]); + this.foundApiKeyInfoList = items instanceof List ? (List) items : new ArrayList<>(items); this.aggregations = aggregations; } - public static QueryApiKeyResponse emptyResponse() { - return new QueryApiKeyResponse(0, List.of(), null); - } - public long getTotal() { return total; } - public Item[] getItems() { - return items; - } - - public int getCount() { - return items.length; + public List getApiKeyInfoList() { + return foundApiKeyInfoList; } public InternalAggregations getAggregations() { @@ -60,11 +55,13 @@ public InternalAggregations getAggregations() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject().field("total", total).field("count", items.length).array("api_keys", (Object[]) items); + builder.startObject(); + builder.field("total", total).field("count", foundApiKeyInfoList.size()).field("api_keys", foundApiKeyInfoList); if (aggregations != null) { aggregations.toXContent(builder, params); } - return builder.endObject(); + builder.endObject(); + return builder; } @Override @@ -77,44 +74,30 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; QueryApiKeyResponse that = (QueryApiKeyResponse) o; - return total == that.total && Arrays.equals(items, that.items) && Objects.equals(aggregations, that.aggregations); + return total == that.total + && Objects.equals(foundApiKeyInfoList, that.foundApiKeyInfoList) + && Objects.equals(aggregations, that.aggregations); } @Override public int hashCode() { int result = Objects.hash(total); - result = 31 * result + Arrays.hashCode(items); + result = 31 * result + Objects.hash(foundApiKeyInfoList); result = 31 * result + Objects.hash(aggregations); return result; } @Override public String toString() { - return "QueryApiKeyResponse{total=" + total + ", items=" + Arrays.toString(items) + ", aggs=" + aggregations + "}"; + return "QueryApiKeyResponse{total=" + total + ", items=" + foundApiKeyInfoList + ", aggs=" + aggregations + "}"; } - public static class Item implements ToXContentObject { - private final ApiKey apiKey; - @Nullable - private final Object[] sortValues; - - public Item(ApiKey apiKey, @Nullable Object[] sortValues) { - this.apiKey = apiKey; - this.sortValues = sortValues; - } - - public ApiKey getApiKey() { - return apiKey; - } - - public Object[] getSortValues() { - return sortValues; - } + public record Item(ApiKey apiKeyInfo, @Nullable Object[] sortValues) implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - apiKey.innerToXContent(builder, params); + apiKeyInfo.innerToXContent(builder, params); if (sortValues != null && sortValues.length > 0) { builder.array("_sort", sortValues); } @@ -122,24 +105,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Item item = (Item) o; - return Objects.equals(apiKey, item.apiKey) && Arrays.equals(sortValues, item.sortValues); - } - - @Override - public int hashCode() { - int result = Objects.hash(apiKey); - result = 31 * result + Arrays.hashCode(sortValues); - return result; - } - @Override public String toString() { - return "Item{" + "apiKey=" + apiKey + ", sortValues=" + Arrays.toString(sortValues) + '}'; + return "Item{apiKeyInfo=" + apiKeyInfo + ", sortValues=" + Arrays.toString(sortValues) + '}'; } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java index d71690f3dc8e7..6a06be1b63b77 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java @@ -28,7 +28,9 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.action.profile.Profile; import org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo.RoleDescriptorsBytes; +import org.elasticsearch.xpack.core.security.authc.RealmConfig.RealmIdentifier; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.service.ServiceAccountSettings; @@ -1008,6 +1010,11 @@ public String toString() { return builder.toString(); } + /** + * {@link RealmRef} expresses the grouping of realms, identified with {@link RealmIdentifier}s, under {@link RealmDomain}s. + * A domain groups different realms, such that any username, authenticated by different realms from the same domain, + * is to be associated to a single {@link Profile}. + */ public static class RealmRef implements Writeable, ToXContentObject { private final String nodeName; @@ -1082,6 +1089,13 @@ public String getType() { return domain; } + /** + * The {@code RealmIdentifier} is the fully qualified way to refer to a realm. + */ + public RealmIdentifier getIdentifier() { + return new RealmIdentifier(type, name); + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java index a6a3d926db689..63989ee86b3a0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java @@ -14,7 +14,6 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; -import org.elasticsearch.xpack.core.security.authc.RealmConfig.RealmIdentifier; import org.elasticsearch.xpack.core.security.authc.support.DelegatedAuthorizationSettings; import org.elasticsearch.xpack.core.security.user.User; @@ -22,7 +21,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; /** * An authentication mechanism to which the default authentication org.elasticsearch.xpack.security.authc.AuthenticationService @@ -145,8 +143,11 @@ public void usageStats(ActionListener> listener) { listener.onResponse(stats); } - public void initRealmRef(Map realmRefs) { - final RealmRef realmRef = Objects.requireNonNull(realmRefs.get(new RealmIdentifier(type(), name())), "realmRef must not be null"); + /** + * Must be called only once by the realms initialization logic, soon after this {@code Realm} is constructed, + * in order to link in the realm domain details, which may refer to any of the other realms. + */ + public void setRealmRef(RealmRef realmRef) { this.realmRef.set(realmRef); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index e9aa982a05d8b..ecbd12a7f4643 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -1623,7 +1623,7 @@ public String toString() { return sb.toString(); } - static Restriction parse(String roleName, XContentParser parser) throws IOException { + public static Restriction parse(String roleName, XContentParser parser) throws IOException { // advance to the START_OBJECT token if needed XContentParser.Token token = parser.currentToken() == null ? parser.nextToken() : parser.currentToken(); if (token != XContentParser.Token.START_OBJECT) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicate.java new file mode 100644 index 0000000000000..90ee353b46eaa --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicate.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.authz.permission; + +import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.apache.lucene.util.automaton.Transition; +import org.elasticsearch.common.hash.MessageDigests; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.plugins.FieldPredicate; + +import java.io.IOException; +import java.security.DigestOutputStream; +import java.security.MessageDigest; +import java.util.Base64; + +/** + * An implementation of {@link FieldPredicate} which matches fields + * against an {@link Automaton}. + */ +class AutomatonFieldPredicate implements FieldPredicate { + private final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(AutomatonFieldPredicate.class); + + private final String automatonHash; + private final CharacterRunAutomaton automaton; + + AutomatonFieldPredicate(Automaton originalAutomaton, CharacterRunAutomaton automaton) { + this.automatonHash = sha256(originalAutomaton); + this.automaton = automaton; + } + + @Override + public boolean test(String field) { + return automaton.run(field); + } + + @Override + public String modifyHash(String hash) { + return hash + ":" + automatonHash; + } + + @Override + public long ramBytesUsed() { + return SHALLOW_SIZE + RamUsageEstimator.sizeOf(automatonHash); // automaton itself is a shallow copy so not counted here + } + + private static String sha256(Automaton automaton) { + MessageDigest messageDigest = MessageDigests.sha256(); + try { + StreamOutput out = new OutputStreamStreamOutput(new DigestOutputStream(Streams.NULL_OUTPUT_STREAM, messageDigest)); + Transition t = new Transition(); + for (int state = 0; state < automaton.getNumStates(); state++) { + out.writeInt(state); + out.writeBoolean(automaton.isAccept(state)); + + int numTransitions = automaton.initTransition(state, t); + for (int i = 0; i < numTransitions; ++i) { + automaton.getNextTransition(t); + out.writeInt(t.dest); + out.writeInt(t.min); + out.writeInt(t.max); + } + } + } catch (IOException bogus) { + // cannot happen + throw new Error(bogus); + } + return Base64.getEncoder().encodeToString(messageDigest.digest()); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index 8f2088f55ade6..f3c2d9f62e40f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.xpack.core.security.authz.accesscontrol.FieldSubsetReader; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition.FieldGrantExcludeGroup; import org.elasticsearch.xpack.core.security.authz.support.SecurityQueryTemplateEvaluator.DlsQueryEvaluationContext; @@ -67,6 +68,7 @@ public final class FieldPermissions implements Accountable, CacheKey { private final CharacterRunAutomaton permittedFieldsAutomaton; private final boolean permittedFieldsAutomatonIsTotal; private final Automaton originalAutomaton; + private final FieldPredicate fieldPredicate; private final long ramBytesUsed; @@ -106,6 +108,9 @@ private FieldPermissions(List fieldPermissionsDefini this.permittedFieldsAutomaton = new CharacterRunAutomaton(permittedFieldsAutomaton); // we cache the result of isTotal since this might be a costly operation this.permittedFieldsAutomatonIsTotal = Operations.isTotal(permittedFieldsAutomaton); + this.fieldPredicate = permittedFieldsAutomatonIsTotal + ? FieldPredicate.ACCEPT_ALL + : new AutomatonFieldPredicate(originalAutomaton, this.permittedFieldsAutomaton); long ramBytesUsed = BASE_FIELD_PERM_DEF_BYTES; ramBytesUsed += this.fieldPermissionsDefinitions.stream() @@ -113,6 +118,7 @@ private FieldPermissions(List fieldPermissionsDefini .sum(); ramBytesUsed += permittedFieldsAutomaton.ramBytesUsed(); ramBytesUsed += runAutomatonRamBytesUsed(permittedFieldsAutomaton); + ramBytesUsed += fieldPredicate.ramBytesUsed(); this.ramBytesUsed = ramBytesUsed; } @@ -220,6 +226,10 @@ public boolean grantsAccessTo(String fieldName) { return permittedFieldsAutomatonIsTotal || permittedFieldsAutomaton.run(fieldName); } + public FieldPredicate fieldPredicate() { + return fieldPredicate; + } + public List getFieldPermissionsDefinitions() { return fieldPermissionsDefinitions; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index c514fb07cc32b..501bd307b74ca 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -97,6 +97,10 @@ public class ClusterPrivilegeResolver { GetComponentTemplateAction.NAME, GetComposableIndexTemplateAction.NAME ); + private static final Set MONITOR_INFERENCE_PATTERN = Set.of( + "cluster:monitor/xpack/inference*", + "cluster:monitor/xpack/ml/trained_models/deployment/infer" + ); private static final Set MONITOR_ML_PATTERN = Set.of("cluster:monitor/xpack/ml/*"); private static final Set MONITOR_TEXT_STRUCTURE_PATTERN = Set.of("cluster:monitor/text_structure/*"); private static final Set MONITOR_TRANSFORM_PATTERN = Set.of("cluster:monitor/data_frame/*", "cluster:monitor/transform/*"); @@ -110,6 +114,13 @@ public class ClusterPrivilegeResolver { "indices:admin/index_template/*" ); private static final Predicate ACTION_MATCHER = Automatons.predicate(ALL_CLUSTER_PATTERN); + private static final Set MANAGE_INFERENCE_PATTERN = Set.of( + "cluster:admin/xpack/inference/*", + "cluster:monitor/xpack/inference*", // no trailing slash to match the POST InferenceAction name + "cluster:admin/xpack/ml/trained_models/deployment/start", + "cluster:admin/xpack/ml/trained_models/deployment/stop", + "cluster:monitor/xpack/ml/trained_models/deployment/infer" + ); private static final Set MANAGE_ML_PATTERN = Set.of("cluster:admin/xpack/ml/*", "cluster:monitor/xpack/ml/*"); private static final Set MANAGE_TRANSFORM_PATTERN = Set.of( "cluster:admin/data_frame/*", @@ -182,6 +193,10 @@ public class ClusterPrivilegeResolver { public static final NamedClusterPrivilege NONE = new ActionClusterPrivilege("none", Set.of(), Set.of()); public static final NamedClusterPrivilege ALL = new ActionClusterPrivilege("all", ALL_CLUSTER_PATTERN); public static final NamedClusterPrivilege MONITOR = new ActionClusterPrivilege("monitor", MONITOR_PATTERN); + public static final NamedClusterPrivilege MONITOR_INFERENCE = new ActionClusterPrivilege( + "monitor_inference", + MONITOR_INFERENCE_PATTERN + ); public static final NamedClusterPrivilege MONITOR_ML = new ActionClusterPrivilege("monitor_ml", MONITOR_ML_PATTERN); public static final NamedClusterPrivilege MONITOR_TRANSFORM_DEPRECATED = new ActionClusterPrivilege( "monitor_data_frame_transforms", @@ -199,6 +214,7 @@ public class ClusterPrivilegeResolver { public static final NamedClusterPrivilege MONITOR_ROLLUP = new ActionClusterPrivilege("monitor_rollup", MONITOR_ROLLUP_PATTERN); public static final NamedClusterPrivilege MONITOR_ENRICH = new ActionClusterPrivilege("monitor_enrich", MONITOR_ENRICH_PATTERN); public static final NamedClusterPrivilege MANAGE = new ActionClusterPrivilege("manage", ALL_CLUSTER_PATTERN, ALL_SECURITY_PATTERN); + public static final NamedClusterPrivilege MANAGE_INFERENCE = new ActionClusterPrivilege("manage_inference", MANAGE_INFERENCE_PATTERN); public static final NamedClusterPrivilege MANAGE_ML = new ActionClusterPrivilege("manage_ml", MANAGE_ML_PATTERN); public static final NamedClusterPrivilege MANAGE_TRANSFORM_DEPRECATED = new ActionClusterPrivilege( "manage_data_frame_transforms", @@ -348,6 +364,7 @@ public class ClusterPrivilegeResolver { NONE, ALL, MONITOR, + MONITOR_INFERENCE, MONITOR_ML, MONITOR_TEXT_STRUCTURE, MONITOR_TRANSFORM_DEPRECATED, @@ -356,6 +373,7 @@ public class ClusterPrivilegeResolver { MONITOR_ROLLUP, MONITOR_ENRICH, MANAGE, + MANAGE_INFERENCE, MANAGE_ML, MANAGE_TRANSFORM_DEPRECATED, MANAGE_TRANSFORM, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 2bfcf9a12366e..dc5b8bfcce262 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -373,6 +373,32 @@ private static Map initializeReservedRoles() { null ) ), + entry( + "inference_admin", + new RoleDescriptor( + "inference_admin", + new String[] { "manage_inference" }, + null, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null + ) + ), + entry( + "inference_user", + new RoleDescriptor( + "inference_user", + new String[] { "monitor_inference" }, + null, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null + ) + ), entry( "machine_learning_user", new RoleDescriptor( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumResponse.java index 43dc92857551a..d89732cb3b177 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumResponse.java @@ -7,22 +7,15 @@ package org.elasticsearch.xpack.core.termsenum.action; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Arrays; import java.util.Collections; import java.util.List; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * The response of the _terms_enum action. */ @@ -31,28 +24,6 @@ public class TermsEnumResponse extends BroadcastResponse { public static final String TERMS_FIELD = "terms"; public static final String COMPLETE_FIELD = "complete"; - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "term_enum_results", - true, - arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new TermsEnumResponse( - (List) arg[1], - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()), - (Boolean) arg[2] - ); - } - ); - static { - declareBroadcastFields(PARSER); - PARSER.declareStringArray(optionalConstructorArg(), new ParseField(TERMS_FIELD)); - PARSER.declareBoolean(optionalConstructorArg(), new ParseField(COMPLETE_FIELD)); - } - private final List terms; private boolean complete; @@ -106,7 +77,4 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.field(COMPLETE_FIELD, complete); } - public static TermsEnumResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java index 635ce03879089..6f1fdb8a20cae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java @@ -51,6 +51,9 @@ public class TransformMessages { "Failed to parse transform statistics for transform [{0}]"; public static final String FAILED_TO_LOAD_TRANSFORM_CHECKPOINT = "Failed to load transform checkpoint for transform [{0}]"; public static final String FAILED_TO_LOAD_TRANSFORM_STATE = "Failed to load transform state for transform [{0}]"; + + public static final String TRANSFORM_CANNOT_START_WITHOUT_PERMISSIONS = "Cannot start transform [{0}] because user lacks required " + + "permissions, see privileges_check_failed issue for more details"; public static final String TRANSFORM_CONFIGURATION_BAD_FUNCTION_COUNT = "Transform configuration must specify exactly 1 function"; public static final String TRANSFORM_CONFIGURATION_PIVOT_NO_GROUP_BY = "Pivot transform configuration must specify at least 1 group_by"; public static final String TRANSFORM_CONFIGURATION_PIVOT_NO_AGGREGATION = diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java index 67c5e22902cf2..f06ba16d9da78 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java @@ -21,7 +21,6 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -42,7 +41,6 @@ import java.util.Objects; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public class PreviewTransformAction extends ActionType { @@ -154,26 +152,6 @@ public static class Response extends ActionResponse implements ToXContentObject private final List> docs; private final TransformDestIndexSettings generatedDestIndexSettings; - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_frame_transform_preview", - true, - args -> { - @SuppressWarnings("unchecked") - List> docs = (List>) args[0]; - TransformDestIndexSettings generatedDestIndex = (TransformDestIndexSettings) args[1]; - - return new Response(docs, generatedDestIndex); - } - ); - static { - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> p.mapOrdered(), PREVIEW); - PARSER.declareObject( - optionalConstructorArg(), - (p, c) -> TransformDestIndexSettings.fromXContent(p), - GENERATED_DEST_INDEX_SETTINGS - ); - } - public Response(List> docs, TransformDestIndexSettings generatedDestIndexSettings) { this.docs = docs; this.generatedDestIndexSettings = generatedDestIndexSettings; @@ -237,9 +215,5 @@ public int hashCode() { public String toString() { return Strings.toString(this, true, true); } - - public static Response fromXContent(final XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index bd267d19398b0..918976c0d3db8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -67,6 +67,7 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.EnginePlugin; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; @@ -454,8 +455,8 @@ public void onIndexModule(IndexModule indexModule) { } @Override - public Function> getFieldFilter() { - List>> items = filterPlugins(MapperPlugin.class).stream() + public Function getFieldFilter() { + List> items = filterPlugins(MapperPlugin.class).stream() .map(p -> p.getFieldFilter()) .filter(p -> p.equals(NOOP_FIELD_FILTER) == false) .toList(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java index ab084e66c3ad1..937502281b64d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -19,10 +20,29 @@ import java.io.IOException; import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; public class ExplainLifecycleResponseTests extends AbstractXContentSerializingTestCase { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "explain_lifecycle_response", + a -> new ExplainLifecycleResponse( + ((List) a[0]).stream() + .collect(Collectors.toMap(IndexLifecycleExplainResponse::getIndex, Function.identity())) + ) + ); + static { + PARSER.declareNamedObjects( + ConstructingObjectParser.constructorArg(), + (p, c, n) -> IndexLifecycleExplainResponse.PARSER.apply(p, c), + ExplainLifecycleResponse.INDICES_FIELD + ); + } + @Override protected ExplainLifecycleResponse createTestInstance() { Map indexResponses = new HashMap<>(); @@ -51,7 +71,7 @@ protected ExplainLifecycleResponse mutateInstance(ExplainLifecycleResponse respo @Override protected ExplainLifecycleResponse doParseInstance(XContentParser parser) throws IOException { - return ExplainLifecycleResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java index 4fff2804f9350..047f3a418c36b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java @@ -43,7 +43,6 @@ import java.util.Collections; import java.util.Date; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -101,7 +100,7 @@ protected Writeable.Reader instanceReader() { @Override protected Job doParseInstance(XContentParser parser) { - return Job.STRICT_PARSER.apply(parser, null).build(); + return Job.LENIENT_PARSER.apply(parser, null).build(); } public void testToXContentForInternalStorage() throws IOException { @@ -119,10 +118,10 @@ public void testToXContentForInternalStorage() throws IOException { } } - public void testFutureConfigParse() throws IOException { + public void testRestRequestParser_DoesntAllowInternalFields() throws IOException { XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, FUTURE_JOB); - XContentParseException e = expectThrows(XContentParseException.class, () -> Job.STRICT_PARSER.apply(parser, null).build()); - assertEquals("[4:5] [job_details] unknown field [tomorrows_technology_today]", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> Job.REST_REQUEST_PARSER.apply(parser, null).build()); + assertEquals("[3:5] [job_details] unknown field [create_time]", e.getMessage()); } public void testFutureMetadataParse() throws IOException { @@ -554,22 +553,6 @@ public void testBuilder_givenTimeFieldInAnalysisConfig() { assertThat(e.getMessage(), equalTo(Messages.getMessage(Messages.JOB_CONFIG_TIME_FIELD_NOT_ALLOWED_IN_ANALYSIS_CONFIG))); } - public void testInvalidCreateTimeSettings() { - Job.Builder builder = new Job.Builder("invalid-settings"); - builder.setModelSnapshotId("snapshot-foo"); - assertEquals(Collections.singletonList(Job.MODEL_SNAPSHOT_ID.getPreferredName()), builder.invalidCreateTimeSettings()); - - builder.setCreateTime(new Date()); - builder.setFinishedTime(new Date()); - - Set expected = new HashSet<>(); - expected.add(Job.CREATE_TIME.getPreferredName()); - expected.add(Job.FINISHED_TIME.getPreferredName()); - expected.add(Job.MODEL_SNAPSHOT_ID.getPreferredName()); - - assertEquals(expected, new HashSet<>(builder.invalidCreateTimeSettings())); - } - public void testEmptyGroup() { Job.Builder builder = buildJobBuilder("foo"); builder.setGroups(Arrays.asList("foo-group", "")); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicateTests.java new file mode 100644 index 0000000000000..d62cbb7dbab6b --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicateTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.authz.permission; + +import org.apache.lucene.util.automaton.Automata; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class AutomatonFieldPredicateTests extends ESTestCase { + public void testMatching() { + String str = randomAlphaOfLength(10); + Automaton a = Automata.makeString(str); + AutomatonFieldPredicate pred = new AutomatonFieldPredicate(a, new CharacterRunAutomaton(a)); + assertTrue(pred.test(str)); + assertFalse(pred.test(str + randomAlphaOfLength(1))); + } + + public void testHash() { + Automaton a = Automata.makeString("a"); + AutomatonFieldPredicate predA = new AutomatonFieldPredicate(a, new CharacterRunAutomaton(a)); + + Automaton b = Automata.makeString("b"); + AutomatonFieldPredicate predB = new AutomatonFieldPredicate(b, new CharacterRunAutomaton(b)); + + assertThat(predA.modifyHash("a"), not(equalTo(predB.modifyHash("a")))); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 5e190f72c596c..4ff250c3a68b3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -266,6 +266,8 @@ public void testIsReserved() { assertThat(ReservedRolesStore.isReserved("transport_client"), is(true)); assertThat(ReservedRolesStore.isReserved("kibana_admin"), is(true)); assertThat(ReservedRolesStore.isReserved("kibana_user"), is(true)); + assertThat(ReservedRolesStore.isReserved("inference_admin"), is(true)); + assertThat(ReservedRolesStore.isReserved("inference_user"), is(true)); assertThat(ReservedRolesStore.isReserved("ingest_admin"), is(true)); assertThat(ReservedRolesStore.isReserved("monitoring_user"), is(true)); assertThat(ReservedRolesStore.isReserved("reporting_user"), is(true)); @@ -3877,6 +3879,46 @@ public void testEnrichUserRole() { assertOnlyReadAllowed(role, ".enrich-foo"); } + public void testInferenceAdminRole() { + final TransportRequest request = mock(TransportRequest.class); + final Authentication authentication = AuthenticationTestHelper.builder().build(); + + RoleDescriptor roleDescriptor = ReservedRolesStore.roleDescriptor("inference_admin"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role role = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference/get", request, authentication)); + assertTrue(role.cluster().check("cluster:admin/xpack/inference/put", request, authentication)); + assertTrue(role.cluster().check("cluster:admin/xpack/inference/delete", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/ml/trained_models/deployment/infer", request, authentication)); + assertTrue(role.cluster().check("cluster:admin/xpack/ml/trained_models/deployment/start", request, authentication)); + assertTrue(role.cluster().check("cluster:admin/xpack/ml/trained_models/deployment/stop", request, authentication)); + assertFalse(role.runAs().check(randomAlphaOfLengthBetween(1, 30))); + assertNoAccessAllowed(role, ".inference"); + } + + public void testInferenceUserRole() { + final TransportRequest request = mock(TransportRequest.class); + final Authentication authentication = AuthenticationTestHelper.builder().build(); + + RoleDescriptor roleDescriptor = ReservedRolesStore.roleDescriptor("inference_user"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role role = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference/get", request, authentication)); + assertFalse(role.cluster().check("cluster:admin/xpack/inference/put", request, authentication)); + assertFalse(role.cluster().check("cluster:admin/xpack/inference/delete", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/ml/trained_models/deployment/infer", request, authentication)); + assertFalse(role.cluster().check("cluster:admin/xpack/ml/trained_models/deployment/start", request, authentication)); + assertFalse(role.cluster().check("cluster:admin/xpack/ml/trained_models/deployment/stop", request, authentication)); + assertFalse(role.runAs().check(randomAlphaOfLengthBetween(1, 30))); + assertNoAccessAllowed(role, ".inference"); + } + private IndexAbstraction mockIndexAbstraction(String name) { IndexAbstraction mock = mock(IndexAbstraction.class); when(mock.getName()).thenReturn(name); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TermsEnumResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TermsEnumResponseTests.java index a31c44a165cdf..1804de134c8fb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TermsEnumResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TermsEnumResponseTests.java @@ -8,19 +8,48 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.AbstractBroadcastResponseTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.termsenum.action.TermsEnumResponse; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Set; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + public class TermsEnumResponseTests extends AbstractBroadcastResponseTestCase { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "term_enum_results", + true, + arg -> { + BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; + return new TermsEnumResponse( + (List) arg[1], + response.getTotalShards(), + response.getSuccessfulShards(), + response.getFailedShards(), + Arrays.asList(response.getShardFailures()), + (Boolean) arg[2] + ); + } + ); + + static { + AbstractBroadcastResponseTestCase.declareBroadcastFields(PARSER); + PARSER.declareStringArray(optionalConstructorArg(), new ParseField(TermsEnumResponse.TERMS_FIELD)); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField(TermsEnumResponse.COMPLETE_FIELD)); + } + protected static List getRandomTerms() { int termCount = randomIntBetween(0, 100); Set uniqueTerms = Sets.newHashSetWithExpectedSize(termCount); @@ -48,7 +77,7 @@ private static TermsEnumResponse createRandomTermEnumResponse() { @Override protected TermsEnumResponse doParseInstance(XContentParser parser) throws IOException { - return TermsEnumResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformsActionResponseTests.java index 9613fc83efd50..9a573818fb111 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformsActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformsActionResponseTests.java @@ -9,8 +9,10 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Response; +import org.elasticsearch.xpack.core.transform.transforms.TransformDestIndexSettings; import org.elasticsearch.xpack.core.transform.transforms.TransformDestIndexSettingsTests; import java.io.IOException; @@ -18,8 +20,31 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + public class PreviewTransformsActionResponseTests extends AbstractXContentSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "data_frame_transform_preview", + true, + args -> { + @SuppressWarnings("unchecked") + List> docs = (List>) args[0]; + TransformDestIndexSettings generatedDestIndex = (TransformDestIndexSettings) args[1]; + + return new Response(docs, generatedDestIndex); + } + ); + + static { + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> p.mapOrdered(), PreviewTransformAction.Response.PREVIEW); + PARSER.declareObject( + optionalConstructorArg(), + (p, c) -> TransformDestIndexSettings.fromXContent(p), + PreviewTransformAction.Response.GENERATED_DEST_INDEX_SETTINGS + ); + } + public static Response randomPreviewResponse() { int size = randomIntBetween(0, 10); List> data = new ArrayList<>(size); @@ -32,7 +57,7 @@ public static Response randomPreviewResponse() { @Override protected Response doParseInstance(XContentParser parser) throws IOException { - return Response.fromXContent(parser); + return PARSER.parse(parser, null); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java index ba2cd0ba04312..f1c2de11496bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -123,6 +122,20 @@ public static TransformConfig randomTransformConfig(String id, TransformConfigVe return randomTransformConfig(id, version, pivotConfig, latestConfig); } + public static TransformConfig randomTransformConfig(String id, TimeValue frequency, TransformConfigVersion version) { + PivotConfig pivotConfig; + LatestConfig latestConfig; + if (randomBoolean()) { + pivotConfig = PivotConfigTests.randomPivotConfig(); + latestConfig = null; + } else { + pivotConfig = null; + latestConfig = LatestConfigTests.randomLatestConfig(); + } + + return randomTransformConfig(id, frequency, version, pivotConfig, latestConfig); + } + public static TransformConfig randomTransformConfigWithSettings(SettingsConfig settingsConfig) { PivotConfig pivotConfig; LatestConfig latestConfig; @@ -157,12 +170,28 @@ public static TransformConfig randomTransformConfig( TransformConfigVersion version, PivotConfig pivotConfig, LatestConfig latestConfig + ) { + return randomTransformConfig( + id, + randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)), + version, + pivotConfig, + latestConfig + ); + } + + public static TransformConfig randomTransformConfig( + String id, + TimeValue frequency, + TransformConfigVersion version, + PivotConfig pivotConfig, + LatestConfig latestConfig ) { return new TransformConfig( id, randomSourceConfig(), randomDestConfig(), - randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)), + frequency, randomBoolean() ? null : randomSyncConfig(), randomHeaders(), pivotConfig, @@ -281,10 +310,7 @@ protected ToXContent.Params getToXContentParams() { } private static Map randomHeaders() { - Map headers = Maps.newMapWithExpectedSize(1); - headers.put("key", "value"); - - return headers; + return Map.of("key", "value"); } public void testDefaultMatchAll() throws IOException { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json index f452682c620c4..353411ed80b2e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json @@ -29,13 +29,33 @@ "ecs.version": { "type": "keyword" }, - "host.id": { - "type": "keyword" - }, "@timestamp": { "type": "date", "format": "epoch_second" }, + "host": { + "properties": { + "arch": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "type": { + "type": "keyword" + } + } + }, + "cloud": { + "properties": { + "provider": { + "type": "keyword" + }, + "region": { + "type": "keyword" + } + } + }, "profiling": { "properties": { "project.id": { diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java index 65a4d84e921a2..afa2e95e1284c 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java @@ -31,9 +31,7 @@ import java.util.Collection; import java.util.List; -import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; import static org.elasticsearch.xpack.downsample.DataStreamLifecycleDriver.getBackingIndices; import static org.elasticsearch.xpack.downsample.DataStreamLifecycleDriver.putTSDBIndexTemplate; @@ -57,11 +55,10 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return settings.build(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105577") @TestLogging(value = "org.elasticsearch.datastreams.lifecycle:TRACE", reason = "debugging") public void testDataStreamLifecycleDownsampleRollingRestart() throws Exception { final InternalTestCluster cluster = internalCluster(); - final List masterNodes = cluster.startMasterOnlyNodes(1); + cluster.startMasterOnlyNodes(1); cluster.startDataOnlyNodes(3); ensureStableCluster(cluster.size()); ensureGreen(); @@ -99,36 +96,12 @@ public void testDataStreamLifecycleDownsampleRollingRestart() throws Exception { long sleepTime = randomLongBetween(3000, 4500); logger.info("-> giving data stream lifecycle [{}] millis to make some progress before starting the disruption", sleepTime); Thread.sleep(sleepTime); - final CountDownLatch disruptionStart = new CountDownLatch(1); - final CountDownLatch disruptionEnd = new CountDownLatch(1); List backingIndices = getBackingIndices(client(), dataStreamName); // first generation index String sourceIndex = backingIndices.get(0); - new Thread(new Disruptor(cluster, sourceIndex, new DisruptionListener() { - @Override - public void disruptionStart() { - disruptionStart.countDown(); - } - @Override - public void disruptionEnd() { - disruptionEnd.countDown(); - } - }, masterNodes.get(0), (ignored) -> { - try { - cluster.rollingRestart(new InternalTestCluster.RestartCallback() { - @Override - public boolean validateClusterForming() { - return true; - } - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - })).start(); - - waitUntil(() -> getClusterPendingTasks(cluster.client()).pendingTasks().isEmpty(), 60, TimeUnit.SECONDS); - ensureStableCluster(cluster.numDataAndMasterNodes()); + internalCluster().rollingRestart(new InternalTestCluster.RestartCallback() { + }); // if the source index has already been downsampled and moved into the data stream just use its name directly final String targetIndex = sourceIndex.startsWith("downsample-5m-") ? sourceIndex : "downsample-5m-" + sourceIndex; @@ -147,55 +120,6 @@ public boolean validateClusterForming() { throw new AssertionError(e); } }, 60, TimeUnit.SECONDS); - } - - interface DisruptionListener { - void disruptionStart(); - - void disruptionEnd(); - } - - private class Disruptor implements Runnable { - final InternalTestCluster cluster; - private final String sourceIndex; - private final DisruptionListener listener; - private final String clientNode; - private final Consumer disruption; - - private Disruptor( - final InternalTestCluster cluster, - final String sourceIndex, - final DisruptionListener listener, - final String clientNode, - final Consumer disruption - ) { - this.cluster = cluster; - this.sourceIndex = sourceIndex; - this.listener = listener; - this.clientNode = clientNode; - this.disruption = disruption; - } - - @Override - public void run() { - listener.disruptionStart(); - try { - final String candidateNode = cluster.client(clientNode) - .admin() - .cluster() - .prepareSearchShards(sourceIndex) - .get() - .getNodes()[0].getName(); - logger.info("Candidate node [" + candidateNode + "]"); - disruption.accept(candidateNode); - ensureGreen(sourceIndex); - ensureStableCluster(cluster.numDataAndMasterNodes(), clientNode); - - } catch (Exception e) { - logger.error("Ignoring Error while injecting disruption [" + e.getMessage() + "]"); - } finally { - listener.disruptionEnd(); - } - } + ensureGreen(targetIndex); } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java index 0d3a784e00e53..a6ba4346b1a25 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.core.downsample.DownsampleShardPersistentTaskState; import org.elasticsearch.xpack.core.downsample.DownsampleShardTask; +import java.util.Collection; import java.util.List; import java.util.function.Predicate; import java.util.function.Supplier; @@ -133,4 +134,9 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(PersistentTaskParams.class, DownsampleShardTaskParams.NAME, DownsampleShardTaskParams::new) ); } + + @Override + public Collection createComponents(PluginServices services) { + return List.of(new DownsampleMetrics(services.telemetryProvider().getMeterRegistry())); + } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java new file mode 100644 index 0000000000000..797b89ecf11a0 --- /dev/null +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.downsample; + +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +import java.io.IOException; +import java.util.Map; + +/** + * Contains metrics related to downsampling actions. + * It gets initialized as a component by the {@link Downsample} plugin, can be injected to its actions. + * + * In tests, use TestTelemetryPlugin to inject a MeterRegistry for testing purposes + * and check that metrics get recorded as expected. + * + * To add a new metric, you need to: + * - Add a constant for its name, following the naming conventions for metrics. + * - Register it in method {@link #doStart}. + * - Add a function for recording its value. + * - If needed, inject {@link DownsampleMetrics} to the action containing the logic + * that records the metric value. For reference, see {@link TransportDownsampleIndexerAction}. + */ +public class DownsampleMetrics extends AbstractLifecycleComponent { + + public static final String LATENCY_SHARD = "es.tsdb.downsample.latency.shard.histogram"; + public static final String LATENCY_TOTAL = "es.tsdb.downsample.latency.total.histogram"; + + private final MeterRegistry meterRegistry; + + public DownsampleMetrics(MeterRegistry meterRegistry) { + this.meterRegistry = meterRegistry; + } + + @Override + protected void doStart() { + // Register all metrics to track. + meterRegistry.registerLongHistogram(LATENCY_SHARD, "Downsampling action latency per shard", "ms"); + meterRegistry.registerLongHistogram(LATENCY_TOTAL, "Downsampling latency end-to-end", "ms"); + } + + @Override + protected void doStop() {} + + @Override + protected void doClose() throws IOException {} + + enum ActionStatus { + + SUCCESS("success"), + MISSING_DOCS("missing_docs"), + FAILED("failed"); + + static final String NAME = "status"; + + private final String message; + + ActionStatus(String message) { + this.message = message; + } + + String getMessage() { + return message; + } + } + + void recordLatencyShard(long durationInMilliSeconds, ActionStatus status) { + meterRegistry.getLongHistogram(LATENCY_SHARD).record(durationInMilliSeconds, Map.of(ActionStatus.NAME, status.getMessage())); + } + + void recordLatencyTotal(long durationInMilliSeconds, ActionStatus status) { + meterRegistry.getLongHistogram(LATENCY_TOTAL).record(durationInMilliSeconds, Map.of(ActionStatus.NAME, status.getMessage())); + } +} diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java index 844c644ee9ea6..59c1c9c38efae 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java @@ -85,6 +85,7 @@ class DownsampleShardIndexer { public static final ByteSizeValue DOWNSAMPLE_MAX_BYTES_IN_FLIGHT = new ByteSizeValue(50, ByteSizeUnit.MB); private final IndexShard indexShard; private final Client client; + private final DownsampleMetrics downsampleMetrics; private final String downsampleIndex; private final Engine.Searcher searcher; private final SearchExecutionContext searchExecutionContext; @@ -103,6 +104,7 @@ class DownsampleShardIndexer { final DownsampleShardTask task, final Client client, final IndexService indexService, + final DownsampleMetrics downsampleMetrics, final ShardId shardId, final String downsampleIndex, final DownsampleConfig config, @@ -113,6 +115,7 @@ class DownsampleShardIndexer { ) { this.task = task; this.client = client; + this.downsampleMetrics = downsampleMetrics; this.indexShard = indexService.getShard(shardId.id()); this.downsampleIndex = downsampleIndex; this.searcher = indexShard.acquireSearcher("downsampling"); @@ -164,6 +167,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept timeSeriesSearcher.search(initialStateQuery, bucketCollector); } + TimeValue duration = TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime); logger.info( "Shard [{}] successfully sent [{}], received source doc [{}], indexed downsampled doc [{}], failed [{}], took [{}]", indexShard.shardId(), @@ -171,7 +175,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept task.getNumSent(), task.getNumIndexed(), task.getNumFailed(), - TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime) + duration ); if (task.getNumIndexed() != task.getNumSent()) { @@ -187,6 +191,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept + task.getNumSent() + "]"; logger.info(error); + downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ActionStatus.MISSING_DOCS); throw new DownsampleShardIndexerException(error, false); } @@ -199,6 +204,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept + task.getNumFailed() + "]"; logger.info(error); + downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ActionStatus.FAILED); throw new DownsampleShardIndexerException(error, false); } @@ -208,6 +214,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept ActionListener.noop() ); logger.info("Downsampling task [" + task.getPersistentTaskId() + " on shard " + indexShard.shardId() + " completed"); + downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ActionStatus.SUCCESS); return new DownsampleIndexerAction.ShardDownsampleResponse(indexShard.shardId(), task.getNumIndexed()); } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java index b4116d42d25ca..5e6f8b6b5b18e 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java @@ -188,6 +188,7 @@ private static IndexShardRoutingTable findShardRoutingTable(ShardId shardId, Clu static void realNodeOperation( Client client, IndicesService indicesService, + DownsampleMetrics downsampleMetrics, DownsampleShardTask task, DownsampleShardTaskParams params, BytesRef lastDownsampledTsid @@ -209,6 +210,7 @@ protected void doRun() throws Exception { task, client, indicesService.indexServiceSafe(params.shardId().getIndex()), + downsampleMetrics, params.shardId(), params.downsampleIndex(), params.downsampleConfig(), @@ -303,17 +305,25 @@ public static class TA extends TransportAction { private final Client client; private final IndicesService indicesService; + private final DownsampleMetrics downsampleMetrics; @Inject - public TA(TransportService transportService, ActionFilters actionFilters, Client client, IndicesService indicesService) { + public TA( + TransportService transportService, + ActionFilters actionFilters, + Client client, + IndicesService indicesService, + DownsampleMetrics downsampleMetrics + ) { super(NAME, actionFilters, transportService.getTaskManager()); this.client = client; this.indicesService = indicesService; + this.downsampleMetrics = downsampleMetrics; } @Override protected void doExecute(Task t, Request request, ActionListener listener) { - realNodeOperation(client, indicesService, request.task, request.params, request.lastDownsampleTsid); + realNodeOperation(client, indicesService, downsampleMetrics, request.task, request.params, request.lastDownsampleTsid); listener.onResponse(ActionResponse.Empty.INSTANCE); } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 5debe5d2edfc9..c526561999497 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -91,6 +91,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; @@ -115,6 +116,7 @@ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAc private final IndexScopedSettings indexScopedSettings; private final ThreadContext threadContext; private final PersistentTasksService persistentTasksService; + private final DownsampleMetrics downsampleMetrics; private static final Set FORBIDDEN_SETTINGS = Set.of( IndexSettings.DEFAULT_PIPELINE.getKey(), @@ -153,7 +155,8 @@ public TransportDownsampleAction( ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IndexScopedSettings indexScopedSettings, - PersistentTasksService persistentTasksService + PersistentTasksService persistentTasksService, + DownsampleMetrics downsampleMetrics ) { super( DownsampleAction.NAME, @@ -173,6 +176,21 @@ public TransportDownsampleAction( this.threadContext = threadPool.getThreadContext(); this.taskQueue = clusterService.createTaskQueue("downsample", Priority.URGENT, STATE_UPDATE_TASK_EXECUTOR); this.persistentTasksService = persistentTasksService; + this.downsampleMetrics = downsampleMetrics; + } + + private void recordLatencyOnSuccess(long startTime) { + downsampleMetrics.recordLatencyTotal( + TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime).getMillis(), + DownsampleMetrics.ActionStatus.SUCCESS + ); + } + + private void recordLatencyOnFailure(long startTime) { + downsampleMetrics.recordLatencyTotal( + TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime).getMillis(), + DownsampleMetrics.ActionStatus.FAILED + ); } @Override @@ -182,6 +200,7 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { + long startTime = client.threadPool().relativeTimeInMillis(); String sourceIndexName = request.getSourceIndex(); final IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); @@ -234,33 +253,11 @@ protected void masterOperation( } final TaskId parentTask = new TaskId(clusterService.localNode().getId(), task.getId()); - // Shortcircuit if target index has been downsampled: + // Short circuit if target index has been downsampled: final String downsampleIndexName = request.getTargetIndex(); - IndexMetadata downsampleIndex = state.getMetadata().index(downsampleIndexName); - if (downsampleIndex != null) { - var downsampleStatus = IndexMetadata.INDEX_DOWNSAMPLE_STATUS.get(downsampleIndex.getSettings()); - if (downsampleStatus == DownsampleTaskStatus.UNKNOWN) { - // This isn't a downsample index, so fail: - listener.onFailure(new ResourceAlreadyExistsException(downsampleIndex.getIndex())); - return; - } else if (downsampleStatus == DownsampleTaskStatus.SUCCESS) { - listener.onResponse(AcknowledgedResponse.TRUE); - return; - } - // In case the write block has been set on the target index means that the shard level downsampling itself was successful, - // but the previous invocation failed later performing settings update, refresh or force merge. - // The write block is used a signal to resume from the refresh part of the downsample api invocation. - if (downsampleIndex.getSettings().get(IndexMetadata.SETTING_BLOCKS_WRITE) != null) { - var refreshRequest = new RefreshRequest(downsampleIndexName); - refreshRequest.setParentTask(parentTask); - client.admin() - .indices() - .refresh( - refreshRequest, - new RefreshDownsampleIndexActionListener(listener, parentTask, downsampleIndexName, request.getWaitTimeout()) - ); - return; - } + if (canShortCircuit(downsampleIndexName, parentTask, request.getWaitTimeout(), startTime, state.metadata(), listener)) { + logger.info("Skipping downsampling, because a previous execution already completed downsampling"); + return; } try { MetadataCreateIndexService.validateIndexName(downsampleIndexName, state); @@ -347,6 +344,7 @@ protected void masterOperation( sourceIndexMetadata, downsampleIndexName, parentTask, + startTime, metricFields, labelFields, dimensionFields @@ -356,12 +354,25 @@ protected void masterOperation( } }, e -> { if (e instanceof ResourceAlreadyExistsException) { + var metadata = clusterService.state().metadata(); + if (canShortCircuit( + request.getTargetIndex(), + parentTask, + request.getWaitTimeout(), + startTime, + metadata, + listener + )) { + logger.info("Downsample tasks are not created, because a previous execution already completed downsampling"); + return; + } performShardDownsampling( request, delegate, sourceIndexMetadata, downsampleIndexName, parentTask, + startTime, metricFields, labelFields, dimensionFields @@ -374,6 +385,54 @@ protected void masterOperation( })); } + /** + * Shortcircuit when another downsample api invocation already completed successfully. + */ + private boolean canShortCircuit( + String targetIndexName, + TaskId parentTask, + TimeValue waitTimeout, + long startTime, + Metadata metadata, + ActionListener listener + ) { + IndexMetadata targetIndexMetadata = metadata.index(targetIndexName); + if (targetIndexMetadata == null) { + return false; + } + + var downsampleStatus = IndexMetadata.INDEX_DOWNSAMPLE_STATUS.get(targetIndexMetadata.getSettings()); + if (downsampleStatus == DownsampleTaskStatus.UNKNOWN) { + // This isn't a downsample index, so fail: + listener.onFailure(new ResourceAlreadyExistsException(targetIndexMetadata.getIndex())); + return true; + } else if (downsampleStatus == DownsampleTaskStatus.SUCCESS) { + listener.onResponse(AcknowledgedResponse.TRUE); + return true; + } + // In case the write block has been set on the target index means that the shard level downsampling itself was successful, + // but the previous invocation failed later performing settings update, refresh or force merge. + // The write block is used a signal to resume from the refresh part of the downsample api invocation. + if (targetIndexMetadata.getSettings().get(IndexMetadata.SETTING_BLOCKS_WRITE) != null) { + var refreshRequest = new RefreshRequest(targetIndexMetadata.getIndex().getName()); + refreshRequest.setParentTask(parentTask); + client.admin() + .indices() + .refresh( + refreshRequest, + new RefreshDownsampleIndexActionListener( + listener, + parentTask, + targetIndexMetadata.getIndex().getName(), + waitTimeout, + startTime + ) + ); + return true; + } + return false; + } + // 3. downsample index created or already exist (in case of retry). Run downsample indexer persistent task on each shard. private void performShardDownsampling( DownsampleAction.Request request, @@ -381,6 +440,7 @@ private void performShardDownsampling( IndexMetadata sourceIndexMetadata, String downsampleIndexName, TaskId parentTask, + long startTime, List metricFields, List labelFields, List dimensionFields @@ -390,6 +450,7 @@ private void performShardDownsampling( // NOTE: before we set the number of replicas to 0, as a result here we are // only dealing with primary shards. final AtomicInteger countDown = new AtomicInteger(numberOfShards); + final AtomicBoolean errorReported = new AtomicBoolean(false); for (int shardNum = 0; shardNum < numberOfShards; shardNum++) { final ShardId shardId = new ShardId(sourceIndex, shardNum); final String persistentTaskId = createPersistentTaskId( @@ -434,13 +495,16 @@ public void onResponse(PersistentTasksCustomMetadata.PersistentTask listener, final IndexMetadata sourceIndexMetadata, final String downsampleIndexName, - final TaskId parentTask + final TaskId parentTask, + final long startTime ) { // 4. Make downsample index read-only and set the correct number of replicas final Settings.Builder settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_WRITE, true); @@ -503,7 +568,13 @@ private void updateTargetIndexSettingStep( .indices() .updateSettings( updateSettingsReq, - new UpdateDownsampleIndexSettingsActionListener(listener, parentTask, downsampleIndexName, request.getWaitTimeout()) + new UpdateDownsampleIndexSettingsActionListener( + listener, + parentTask, + downsampleIndexName, + request.getWaitTimeout(), + startTime + ) ); } @@ -781,7 +852,17 @@ private void createDownsampleIndex( .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, String.valueOf(numberOfReplicas)) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "-1") .put(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey(), DownsampleTaskStatus.STARTED) - .put(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey(), downsampleInterval); + .put(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey(), downsampleInterval) + .put(IndexSettings.MODE.getKey(), sourceIndexMetadata.getIndexMode()) + .putList(IndexMetadata.INDEX_ROUTING_PATH.getKey(), sourceIndexMetadata.getRoutingPaths()) + .put( + IndexSettings.TIME_SERIES_START_TIME.getKey(), + sourceIndexMetadata.getSettings().get(IndexSettings.TIME_SERIES_START_TIME.getKey()) + ) + .put( + IndexSettings.TIME_SERIES_END_TIME.getKey(), + sourceIndexMetadata.getSettings().get(IndexSettings.TIME_SERIES_END_TIME.getKey()) + ); if (sourceIndexMetadata.getSettings().hasValue(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey())) { builder.put( MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), @@ -837,17 +918,20 @@ class UpdateDownsampleIndexSettingsActionListener implements ActionListener listener, final TaskId parentTask, final String downsampleIndexName, - final TimeValue timeout + final TimeValue timeout, + final long startTime ) { this.listener = listener; this.parentTask = parentTask; this.downsampleIndexName = downsampleIndexName; this.timeout = timeout; + this.startTime = startTime; } @Override @@ -856,11 +940,12 @@ public void onResponse(final AcknowledgedResponse response) { request.setParentTask(parentTask); client.admin() .indices() - .refresh(request, new RefreshDownsampleIndexActionListener(listener, parentTask, downsampleIndexName, timeout)); + .refresh(request, new RefreshDownsampleIndexActionListener(listener, parentTask, downsampleIndexName, timeout, startTime)); } @Override public void onFailure(Exception e) { + recordLatencyOnSuccess(startTime); // Downsampling has already completed in all shards. listener.onFailure(e); } @@ -875,17 +960,20 @@ class RefreshDownsampleIndexActionListener implements ActionListener actionListener, TaskId parentTask, final String downsampleIndexName, - final TimeValue timeout + final TimeValue timeout, + final long startTime ) { this.actionListener = actionListener; this.parentTask = parentTask; this.downsampleIndexName = downsampleIndexName; this.timeout = timeout; + this.startTime = startTime; } @Override @@ -896,7 +984,9 @@ public void onResponse(final BroadcastResponse response) { // Mark downsample index as "completed successfully" ("index.downsample.status": "success") taskQueue.submitTask( "update-downsample-metadata [" + downsampleIndexName + "]", - new DownsampleClusterStateUpdateTask(new ForceMergeActionListener(parentTask, downsampleIndexName, actionListener)) { + new DownsampleClusterStateUpdateTask( + new ForceMergeActionListener(parentTask, downsampleIndexName, startTime, actionListener) + ) { @Override public ClusterState execute(ClusterState currentState) { @@ -923,6 +1013,7 @@ public ClusterState execute(ClusterState currentState) { @Override public void onFailure(Exception e) { + recordLatencyOnSuccess(startTime); // Downsampling has already completed in all shards. actionListener.onFailure(e); } @@ -936,42 +1027,43 @@ class ForceMergeActionListener implements ActionListener { final ActionListener actionListener; private final TaskId parentTask; private final String downsampleIndexName; + private final long startTime; ForceMergeActionListener( final TaskId parentTask, final String downsampleIndexName, + final long startTime, final ActionListener onFailure ) { this.parentTask = parentTask; this.downsampleIndexName = downsampleIndexName; + this.startTime = startTime; this.actionListener = onFailure; } @Override public void onResponse(final AcknowledgedResponse response) { - /* - * At this point downsample index has been created - * successfully even force merge fails. - * So, we should not fail the downsmaple operation - */ ForceMergeRequest request = new ForceMergeRequest(downsampleIndexName); request.maxNumSegments(1); request.setParentTask(parentTask); - client.admin() - .indices() - .forceMerge(request, ActionListener.wrap(mergeIndexResp -> actionListener.onResponse(AcknowledgedResponse.TRUE), t -> { - /* - * At this point downsample index has been created - * successfully even if force merge failed. - * So, we should not fail the downsample operation. - */ - logger.error("Failed to force-merge downsample index [" + downsampleIndexName + "]", t); - actionListener.onResponse(AcknowledgedResponse.TRUE); - })); + client.admin().indices().forceMerge(request, ActionListener.wrap(mergeIndexResp -> { + actionListener.onResponse(AcknowledgedResponse.TRUE); + recordLatencyOnSuccess(startTime); + }, t -> { + /* + * At this point downsample index has been created + * successfully even if force merge failed. + * So, we should not fail the downsample operation. + */ + logger.error("Failed to force-merge downsample index [" + downsampleIndexName + "]", t); + actionListener.onResponse(AcknowledgedResponse.TRUE); + recordLatencyOnSuccess(startTime); + })); } @Override public void onFailure(Exception e) { + recordLatencyOnSuccess(startTime); this.actionListener.onFailure(e); } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java index 24d1df638f80b..f7cfe2d859583 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java @@ -52,6 +52,8 @@ public class TransportDownsampleIndexerAction extends TransportBroadcastAction< private final ClusterService clusterService; private final IndicesService indicesService; + private final DownsampleMetrics downsampleMetrics; + @Inject public TransportDownsampleIndexerAction( Client client, @@ -59,7 +61,8 @@ public TransportDownsampleIndexerAction( TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver + IndexNameExpressionResolver indexNameExpressionResolver, + DownsampleMetrics downsampleMetrics ) { super( DownsampleIndexerAction.NAME, @@ -74,6 +77,7 @@ public TransportDownsampleIndexerAction( this.client = new OriginSettingClient(client, ClientHelper.ROLLUP_ORIGIN); this.clusterService = clusterService; this.indicesService = indicesService; + this.downsampleMetrics = downsampleMetrics; } @Override @@ -139,6 +143,7 @@ protected DownsampleIndexerAction.ShardDownsampleResponse shardOperation( (DownsampleShardTask) task, client, indexService, + downsampleMetrics, request.shardId(), request.getDownsampleIndex(), request.getRollupConfig(), diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index a7b36bbd7dc9b..5e0e6be61b9fc 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -57,6 +57,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchResponseUtils; @@ -80,6 +81,8 @@ import org.elasticsearch.tasks.TaskCancelHelper; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -93,6 +96,7 @@ import org.elasticsearch.xpack.core.ilm.RolloverAction; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.ilm.IndexLifecycle; +import org.hamcrest.Matchers; import org.junit.Before; import java.io.IOException; @@ -162,7 +166,8 @@ protected Collection> getPlugins() { Downsample.class, AggregateMetricMapperPlugin.class, DataStreamsPlugin.class, - IndexLifecycle.class + IndexLifecycle.class, + TestTelemetryPlugin.class ); } @@ -623,6 +628,7 @@ public void testCancelDownsampleIndexer() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, @@ -672,6 +678,7 @@ public void testDownsampleBulkFailed() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, @@ -739,6 +746,7 @@ public void testTooManyBytesInFlight() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, @@ -791,6 +799,7 @@ public void testDownsampleStats() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, @@ -810,6 +819,27 @@ public void testDownsampleStats() throws IOException { assertDownsampleIndexer(indexService, shardNum, task, executeResponse, task.getTotalShardDocCount()); } + + // Check that metrics get collected as expected. + final TestTelemetryPlugin plugin = getInstanceFromNode(PluginsService.class).filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + + List measurements = plugin.getLongHistogramMeasurement(DownsampleMetrics.LATENCY_SHARD); + assertFalse(measurements.isEmpty()); + for (Measurement measurement : measurements) { + assertTrue(measurement.value().toString(), measurement.value().longValue() >= 0 && measurement.value().longValue() < 1000_000); + assertEquals(1, measurement.attributes().size()); + assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "failed", "missing_docs"))); + } + + measurements = plugin.getLongHistogramMeasurement(DownsampleMetrics.LATENCY_TOTAL); + assertFalse(measurements.isEmpty()); + for (Measurement measurement : measurements) { + assertTrue(measurement.value().toString(), measurement.value().longValue() >= 0 && measurement.value().longValue() < 1000_000); + assertEquals(1, measurement.attributes().size()); + assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "failed"))); + } } public void testResumeDownsample() throws IOException { @@ -848,6 +878,7 @@ public void testResumeDownsample() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, @@ -923,6 +954,7 @@ public void testResumeDownsamplePartial() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, diff --git a/x-pack/plugin/ent-search/build.gradle b/x-pack/plugin/ent-search/build.gradle index 4551011b03ca1..efd33cd163fdc 100644 --- a/x-pack/plugin/ent-search/build.gradle +++ b/x-pack/plugin/ent-search/build.gradle @@ -34,8 +34,6 @@ dependencies { javaRestTestImplementation(project(path: xpackModule('core'))) javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation(project(':modules:lang-mustache')) - - module ':modules:search-business-rules' } testClusters.configureEach { diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java index 95b3b576eb46c..e01b8de941851 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java @@ -66,10 +66,10 @@ public void testBehavioralAnalyticsDataRetention() throws Exception { String legacyAnalyticsCollectionName = "oldstuff"; String newAnalyticsCollectionName = "newstuff"; - if (isRunningAgainstOldCluster()) { - // Ensure index template is installed before executing the tests. - assertBusy(() -> assertDataStreamTemplateExists(EVENT_DATA_STREAM_LEGACY_TEMPLATE_NAME)); + // Wait for the cluster to finish initialization + waitForClusterReady(); + if (isRunningAgainstOldCluster()) { // Create an analytics collection Request legacyPutRequest = new Request("PUT", "_application/analytics/" + legacyAnalyticsCollectionName); assertOK(client().performRequest(legacyPutRequest)); @@ -77,9 +77,6 @@ public void testBehavioralAnalyticsDataRetention() throws Exception { // Validate that ILM lifecycle is in place assertBusy(() -> assertUsingLegacyDataRetentionPolicy(legacyAnalyticsCollectionName)); } else { - // Ensure index template is updated to version 3 before executing the tests. - assertBusy(() -> assertDataStreamTemplateExists(EVENT_DATA_STREAM_LEGACY_TEMPLATE_NAME, DSL_REGISTRY_VERSION)); - // Create a new analytics collection Request putRequest = new Request("PUT", "_application/analytics/" + newAnalyticsCollectionName); assertOK(client().performRequest(putRequest)); @@ -129,6 +126,21 @@ private void assertUsingDslDataRetention(String analyticsCollectionName) throws assertTrue(evaluatedNewDataStream); } + private void waitForClusterReady() throws Exception { + // Ensure index template is installed with the right version before executing the tests. + if (isRunningAgainstOldCluster()) { + // No minimum version of the registry required when running on old clusters. + assertBusy(() -> assertDataStreamTemplateExists(EVENT_DATA_STREAM_LEGACY_TEMPLATE_NAME)); + + // When running on old cluster, wait for the ILM policy to be installed. + assertBusy(() -> assertILMPolicyExists(EVENT_DATA_STREAM_LEGACY_ILM_POLICY_NAME)); + } else { + // DSL has been introduced with the version 3 of the registry. + // Wait for this version to be deployed. + assertBusy(() -> assertDataStreamTemplateExists(EVENT_DATA_STREAM_LEGACY_TEMPLATE_NAME, DSL_REGISTRY_VERSION)); + } + } + private void assertDataStreamTemplateExists(String templateName) throws IOException { assertDataStreamTemplateExists(templateName, null); } @@ -138,6 +150,7 @@ private void assertDataStreamTemplateExists(String templateName, Integer minVers Request getIndexTemplateRequest = new Request("GET", "_index_template/" + templateName); Response response = client().performRequest(getIndexTemplateRequest); assertOK(response); + if (minVersion != null) { String pathToVersion = "index_templates.0.index_template.version"; ObjectPath indexTemplatesResponse = ObjectPath.createFromResponse(response); @@ -151,4 +164,20 @@ private void assertDataStreamTemplateExists(String templateName, Integer minVers throw e; } } + + private void assertILMPolicyExists(String policyName) throws IOException { + try { + Request getILMPolicyRequest = new Request("GET", "_ilm/policy/" + policyName); + Response response = client().performRequest(getILMPolicyRequest); + assertOK(response); + + assertNotNull(ObjectPath.createFromResponse(response).evaluate(policyName)); + } catch (ResponseException e) { + int status = e.getResponse().getStatusLine().getStatusCode(); + if (status == 404) { + throw new AssertionError("Waiting for the policy to be created"); + } + throw e; + } + } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java index 8a544f735b570..659c58d2bd1b8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java @@ -146,20 +146,5 @@ protected void addCustomFields(XContentBuilder builder, Params params) throws IO builder.field(COLLECTION_NAME_FIELD.getPreferredName(), name); } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "put_analytics_collection_response", - false, - (p) -> { - return new Response((boolean) p[0], (String) p[1]); - } - ); - static { - PARSER.declareString(constructorArg(), COLLECTION_NAME_FIELD); - } - - public static Response fromXContent(String resourceName, XContentParser parser) throws IOException { - return new Response(AcknowledgedResponse.fromXContent(parser).isAcknowledged(), resourceName); - } - } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionResponseBWCSerializingTests.java index ce6259b40765e..f6a13477acae7 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionResponseBWCSerializingTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.application.analytics.action; import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; @@ -40,7 +41,7 @@ protected PutAnalyticsCollectionAction.Response mutateInstance(PutAnalyticsColle @Override protected PutAnalyticsCollectionAction.Response doParseInstance(XContentParser parser) throws IOException { - return PutAnalyticsCollectionAction.Response.fromXContent(this.name, parser); + return new PutAnalyticsCollectionAction.Response(AcknowledgedResponse.fromXContent(parser).isAcknowledged(), this.name); } @Override diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 8f8d2774a5020..3fdfa7835b036 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -88,7 +88,7 @@ tasks.named("test").configure { into "${rootDir}/docs/reference/esql/functions" include '**/*.asciidoc', '**/*.svg' preserve { - include '/*.asciidoc' + include '/*.asciidoc', 'README.md' } } } diff --git a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/GroupingAggregator.java b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/GroupingAggregator.java index bb7b2cc888c2c..7e92fc5c2734e 100644 --- a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/GroupingAggregator.java +++ b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/GroupingAggregator.java @@ -17,4 +17,9 @@ public @interface GroupingAggregator { IntermediateState[] value() default {}; + + /** + * If {@code true} then the @timestamp LongVector will be appended to the input blocks of the aggregation function. + */ + boolean includeTimestamps() default false; } diff --git a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/IntermediateState.java b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/IntermediateState.java index 54a5caa05d149..3781fc6290365 100644 --- a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/IntermediateState.java +++ b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/IntermediateState.java @@ -7,9 +7,22 @@ package org.elasticsearch.compute.ann; +/** + * Description of a column of data that makes up the intermediate state of + * an aggregation. + */ public @interface IntermediateState { - + /** + * Name of the column. + */ String name(); + /** + * Type of the column. This should be the name of an element type or + * an element type followed by {@code _BLOCK}. If this ends in {@code _BLOCK} + * then the aggregation will the {@code Block} as an argument to + * {@code combineIntermediate} and a position. It's the aggregation's + * responsibility to iterate values from the block as needed. + */ String type(); } diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 532fd51a42437..d04daf6631447 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -365,7 +365,46 @@ tasks.named('stringTemplates').configure { it.inputFile = arrayStateInputFile it.outputFile = "org/elasticsearch/compute/aggregation/DoubleArrayState.java" } - File multivalueDedupeInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st") + File valuesAggregatorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st") + template { + it.properties = intProperties + it.inputFile = valuesAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/ValuesIntAggregator.java" + } + template { + it.properties = longProperties + it.inputFile = valuesAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/ValuesLongAggregator.java" + } + template { + it.properties = doubleProperties + it.inputFile = valuesAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java" + } + template { + it.properties = bytesRefProperties + it.inputFile = valuesAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java" + } + + File rateAggregatorInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st") + template { + it.properties = intProperties + it.inputFile = rateAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/RateIntAggregator.java" + } + template { + it.properties = longProperties + it.inputFile = rateAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/RateLongAggregator.java" + } + template { + it.properties = doubleProperties + it.inputFile = rateAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/RateDoubleAggregator.java" + } + + File multivalueDedupeInputFile = file("src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st") template { it.properties = intProperties it.inputFile = multivalueDedupeInputFile diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java index a9bea3105ee10..3f031db2978f9 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java @@ -54,8 +54,12 @@ public AggregatorFunctionSupplierImplementer( this.groupingAggregatorImplementer = groupingAggregatorImplementer; Set createParameters = new LinkedHashSet<>(); - createParameters.addAll(aggregatorImplementer.createParameters()); - createParameters.addAll(groupingAggregatorImplementer.createParameters()); + if (aggregatorImplementer != null) { + createParameters.addAll(aggregatorImplementer.createParameters()); + } + if (groupingAggregatorImplementer != null) { + createParameters.addAll(groupingAggregatorImplementer.createParameters()); + } this.createParameters = new ArrayList<>(createParameters); this.createParameters.add(0, new Parameter(LIST_INTEGER, "channels")); @@ -84,7 +88,11 @@ private TypeSpec type() { createParameters.stream().forEach(p -> p.declareField(builder)); builder.addMethod(ctor()); - builder.addMethod(aggregator()); + if (aggregatorImplementer != null) { + builder.addMethod(aggregator()); + } else { + builder.addMethod(unsupportedNonGroupingAggregator()); + } builder.addMethod(groupingAggregator()); builder.addMethod(describe()); return builder.build(); @@ -96,6 +104,15 @@ private MethodSpec ctor() { return builder.build(); } + private MethodSpec unsupportedNonGroupingAggregator() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("aggregator") + .addParameter(DRIVER_CONTEXT, "driverContext") + .returns(Types.AGGREGATOR_FUNCTION); + builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); + builder.addStatement("throw new UnsupportedOperationException($S)", "non-grouping aggregator is not supported"); + return builder.build(); + } + private MethodSpec aggregator() { MethodSpec.Builder builder = MethodSpec.methodBuilder("aggregator") .addParameter(DRIVER_CONTEXT, "driverContext") diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java index 1d74416a81894..d3fe51b4cc225 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java @@ -108,11 +108,9 @@ public AggregatorImplementer(Elements elements, TypeElement declarationType, Int (declarationType.getSimpleName() + "AggregatorFunction").replace("AggregatorAggregator", "Aggregator") ); this.valuesIsBytesRef = BYTES_REF.equals(TypeName.get(combine.getParameters().get(combine.getParameters().size() - 1).asType())); - intermediateState = Arrays.stream(interStateAnno).map(state -> new IntermediateStateDesc(state.name(), state.type())).toList(); + intermediateState = Arrays.stream(interStateAnno).map(IntermediateStateDesc::newIntermediateStateDesc).toList(); } - record IntermediateStateDesc(String name, String elementType) {} - ClassName implementation() { return implementation; } @@ -229,7 +227,7 @@ private MethodSpec create() { for (Parameter p : createParameters) { builder.addParameter(p.type(), p.name()); } - if (init.getParameters().isEmpty()) { + if (createParameters.isEmpty()) { builder.addStatement("return new $T(driverContext, channels, $L)", implementation, callInit()); } else { builder.addStatement( @@ -410,20 +408,7 @@ private MethodSpec addIntermediateInput() { builder.addStatement("assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size()"); for (int i = 0; i < intermediateState.size(); i++) { var interState = intermediateState.get(i); - ClassName blockType = blockType(interState.elementType()); - builder.addStatement("Block $L = page.getBlock(channels.get($L))", interState.name + "Uncast", i); - builder.beginControlFlow("if ($L.areAllValuesNull())", interState.name + "Uncast"); - { - builder.addStatement("return"); - builder.endControlFlow(); - } - builder.addStatement( - "$T $L = (($T) $L).asVector()", - vectorType(interState.elementType()), - interState.name(), - blockType, - interState.name() + "Uncast" - ); + interState.assignToVariable(builder, i); builder.addStatement("assert $L.getPositionCount() == 1", interState.name()); } if (combineIntermediate != null) { @@ -449,15 +434,7 @@ private MethodSpec addIntermediateInput() { } String intermediateStateRowAccess() { - return intermediateState.stream().map(AggregatorImplementer::vectorAccess).collect(joining(", ")); - } - - static String vectorAccess(IntermediateStateDesc isd) { - String s = isd.name() + "." + vectorAccessorName(isd.elementType()) + "(0"; - if (isd.elementType().equals("BYTES_REF")) { - s += ", scratch"; - } - return s + ")"; + return intermediateState.stream().map(desc -> desc.access("0")).collect(joining(", ")); } private String primitiveStateMethod() { @@ -548,4 +525,42 @@ private boolean hasPrimitiveState() { default -> false; }; } + + record IntermediateStateDesc(String name, String elementType, boolean block) { + static IntermediateStateDesc newIntermediateStateDesc(IntermediateState state) { + String type = state.type(); + boolean block = false; + if (type.toUpperCase(Locale.ROOT).endsWith("_BLOCK")) { + type = type.substring(0, type.length() - "_BLOCK".length()); + block = true; + } + return new IntermediateStateDesc(state.name(), type, block); + } + + public String access(String position) { + if (block) { + return name(); + } + String s = name() + "." + vectorAccessorName(elementType()) + "(" + position; + if (elementType().equals("BYTES_REF")) { + s += ", scratch"; + } + return s + ")"; + } + + public void assignToVariable(MethodSpec.Builder builder, int offset) { + builder.addStatement("Block $L = page.getBlock(channels.get($L))", name + "Uncast", offset); + ClassName blockType = blockType(elementType()); + builder.beginControlFlow("if ($L.areAllValuesNull())", name + "Uncast"); + { + builder.addStatement("return"); + builder.endControlFlow(); + } + if (block) { + builder.addStatement("$T $L = ($T) $L", blockType, name, blockType, name + "Uncast"); + } else { + builder.addStatement("$T $L = (($T) $L).asVector()", vectorType(elementType), name, blockType, name + "Uncast"); + } + } + } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java index b724ee9152ca8..d07b24047b7e2 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java @@ -86,17 +86,21 @@ public boolean process(Set set, RoundEnvironment roundEnv write(aggClass, "aggregator", implementer.sourceFile(), env); } GroupingAggregatorImplementer groupingAggregatorImplementer = null; - if (aggClass.getAnnotation(Aggregator.class) != null) { - assert aggClass.getAnnotation(GroupingAggregator.class) != null; + if (aggClass.getAnnotation(GroupingAggregator.class) != null) { IntermediateState[] intermediateState = aggClass.getAnnotation(GroupingAggregator.class).value(); - if (intermediateState.length == 0) { + if (intermediateState.length == 0 && aggClass.getAnnotation(Aggregator.class) != null) { intermediateState = aggClass.getAnnotation(Aggregator.class).value(); } - - groupingAggregatorImplementer = new GroupingAggregatorImplementer(env.getElementUtils(), aggClass, intermediateState); + boolean includeTimestamps = aggClass.getAnnotation(GroupingAggregator.class).includeTimestamps(); + groupingAggregatorImplementer = new GroupingAggregatorImplementer( + env.getElementUtils(), + aggClass, + intermediateState, + includeTimestamps + ); write(aggClass, "grouping aggregator", groupingAggregatorImplementer.sourceFile(), env); } - if (implementer != null && groupingAggregatorImplementer != null) { + if (implementer != null || groupingAggregatorImplementer != null) { write( aggClass, "aggregator function supplier", diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java index 12e5de9fef591..cb65d2337d588 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java @@ -47,10 +47,10 @@ import static org.elasticsearch.compute.gen.Types.INT_VECTOR; import static org.elasticsearch.compute.gen.Types.LIST_AGG_FUNC_DESC; import static org.elasticsearch.compute.gen.Types.LIST_INTEGER; +import static org.elasticsearch.compute.gen.Types.LONG_BLOCK; +import static org.elasticsearch.compute.gen.Types.LONG_VECTOR; import static org.elasticsearch.compute.gen.Types.PAGE; import static org.elasticsearch.compute.gen.Types.SEEN_GROUP_IDS; -import static org.elasticsearch.compute.gen.Types.blockType; -import static org.elasticsearch.compute.gen.Types.vectorType; /** * Implements "GroupingAggregationFunction" from a class containing static methods @@ -72,9 +72,15 @@ public class GroupingAggregatorImplementer { private final boolean valuesIsBytesRef; private final List createParameters; private final ClassName implementation; - private final List intermediateState; - - public GroupingAggregatorImplementer(Elements elements, TypeElement declarationType, IntermediateState[] interStateAnno) { + private final List intermediateState; + private final boolean includeTimestampVector; + + public GroupingAggregatorImplementer( + Elements elements, + TypeElement declarationType, + IntermediateState[] interStateAnno, + boolean includeTimestampVector + ) { this.declarationType = declarationType; this.init = findRequiredMethod(declarationType, new String[] { "init", "initGrouping" }, e -> true); @@ -94,7 +100,7 @@ public GroupingAggregatorImplementer(Elements elements, TypeElement declarationT this.createParameters = init.getParameters() .stream() .map(Parameter::from) - .filter(f -> false == f.type().equals(BIG_ARRAYS)) + .filter(f -> false == f.type().equals(BIG_ARRAYS) && false == f.type().equals(DRIVER_CONTEXT)) .collect(Collectors.toList()); this.implementation = ClassName.get( @@ -102,11 +108,12 @@ public GroupingAggregatorImplementer(Elements elements, TypeElement declarationT (declarationType.getSimpleName() + "GroupingAggregatorFunction").replace("AggregatorGroupingAggregator", "GroupingAggregator") ); - intermediateState = Arrays.stream(interStateAnno).map(state -> new IntermediateStateDesc(state.name(), state.type())).toList(); + intermediateState = Arrays.stream(interStateAnno) + .map(AggregatorImplementer.IntermediateStateDesc::newIntermediateStateDesc) + .toList(); + this.includeTimestampVector = includeTimestampVector; } - record IntermediateStateDesc(String name, String elementType) {} - public ClassName implementation() { return implementation; } @@ -266,15 +273,24 @@ private MethodSpec prepareProcessPage() { builder.addStatement("$T valuesBlock = page.getBlock(channels.get(0))", valueBlockType(init, combine)); builder.addStatement("$T valuesVector = valuesBlock.asVector()", valueVectorType(init, combine)); + if (includeTimestampVector) { + builder.addStatement("$T timestampsBlock = page.getBlock(channels.get(1))", LONG_BLOCK); + builder.addStatement("$T timestampsVector = timestampsBlock.asVector()", LONG_VECTOR); + + builder.beginControlFlow("if (timestampsVector == null) "); + builder.addStatement("throw new IllegalStateException($S)", "expected @timestamp vector; but got a block"); + builder.endControlFlow(); + } builder.beginControlFlow("if (valuesVector == null)"); + String extra = includeTimestampVector ? ", timestampsVector" : ""; { builder.beginControlFlow("if (valuesBlock.mayHaveNulls())"); builder.addStatement("state.enableGroupIdTracking(seenGroupIds)"); builder.endControlFlow(); - builder.addStatement("return $L", addInput(b -> b.addStatement("addRawInput(positionOffset, groupIds, valuesBlock)"))); + builder.addStatement("return $L", addInput(b -> b.addStatement("addRawInput(positionOffset, groupIds, valuesBlock$L)", extra))); } builder.endControlFlow(); - builder.addStatement("return $L", addInput(b -> b.addStatement("addRawInput(positionOffset, groupIds, valuesVector)"))); + builder.addStatement("return $L", addInput(b -> b.addStatement("addRawInput(positionOffset, groupIds, valuesVector$L)", extra))); return builder.build(); } @@ -310,6 +326,9 @@ private MethodSpec addRawInputLoop(TypeName groupsType, TypeName valuesType) { MethodSpec.Builder builder = MethodSpec.methodBuilder(methodName); builder.addModifiers(Modifier.PRIVATE); builder.addParameter(TypeName.INT, "positionOffset").addParameter(groupsType, "groups").addParameter(valuesType, "values"); + if (includeTimestampVector) { + builder.addParameter(LONG_VECTOR, "timestamps"); + } if (valuesIsBytesRef) { // Add bytes_ref scratch var that will be used for bytes_ref blocks/vectors builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); @@ -356,6 +375,10 @@ private void combineRawInput(MethodSpec.Builder builder, String blockVariable, S combineRawInputForBytesRef(builder, blockVariable, offsetVariable); return; } + if (includeTimestampVector) { + combineRawInputWithTimestamp(builder, offsetVariable); + return; + } TypeName valueType = TypeName.get(combine.getParameters().get(combine.getParameters().size() - 1).asType()); if (valueType.isPrimitive() == false) { throw new IllegalArgumentException("second parameter to combine must be a primitive"); @@ -405,6 +428,22 @@ private void combineRawInputForVoid( ); } + private void combineRawInputWithTimestamp(MethodSpec.Builder builder, String offsetVariable) { + TypeName valueType = TypeName.get(combine.getParameters().get(combine.getParameters().size() - 1).asType()); + String blockType = valueType.toString().substring(0, 1).toUpperCase(Locale.ROOT) + valueType.toString().substring(1); + if (offsetVariable.contains(" + ")) { + builder.addStatement("var valuePosition = $L", offsetVariable); + offsetVariable = "valuePosition"; + } + builder.addStatement( + "$T.combine(state, groupId, timestamps.getLong($L), values.get$L($L))", + declarationType, + offsetVariable, + blockType, + offsetVariable + ); + } + private void combineRawInputForBytesRef(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { // scratch is a BytesRef var that must have been defined before the iteration starts builder.addStatement("$T.combine(state, groupId, $L.getBytesRef($L, scratch))", declarationType, blockVariable, offsetVariable); @@ -421,11 +460,7 @@ private MethodSpec addIntermediateInput() { builder.addStatement("assert channels.size() == intermediateBlockCount()"); int count = 0; for (var interState : intermediateState) { - builder.addStatement( - "$T " + interState.name() + " = page.<$T>getBlock(channels.get(" + count + ")).asVector()", - vectorType(interState.elementType()), - blockType(interState.elementType()) - ); + interState.assignToVariable(builder, count); count++; } final String first = intermediateState.get(0).name(); @@ -433,13 +468,13 @@ private MethodSpec addIntermediateInput() { builder.addStatement( "assert " + intermediateState.stream() - .map(IntermediateStateDesc::name) + .map(AggregatorImplementer.IntermediateStateDesc::name) .skip(1) .map(s -> first + ".getPositionCount() == " + s + ".getPositionCount()") .collect(joining(" && ")) ); } - if (intermediateState.stream().map(IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { + if (intermediateState.stream().map(AggregatorImplementer.IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); } builder.beginControlFlow("for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++)"); @@ -469,15 +504,11 @@ private MethodSpec addIntermediateInput() { } String intermediateStateRowAccess() { - return intermediateState.stream().map(GroupingAggregatorImplementer::vectorAccess).collect(joining(", ")); - } - - static String vectorAccess(IntermediateStateDesc isd) { - String s = isd.name() + "." + vectorAccessorName(isd.elementType()) + "(groupPosition + positionOffset"; - if (isd.elementType().equals("BYTES_REF")) { - s += ", scratch"; + String rowAccess = intermediateState.stream().map(desc -> desc.access("groupPosition + positionOffset")).collect(joining(", ")); + if (intermediateState.stream().anyMatch(AggregatorImplementer.IntermediateStateDesc::block)) { + rowAccess += ", groupPosition + positionOffset"; } - return s + ")"; + return rowAccess; } private void combineStates(MethodSpec.Builder builder) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java new file mode 100644 index 0000000000000..2dc5b441ca00d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java @@ -0,0 +1,340 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * A rate grouping aggregation definition for double. + * This class is generated. Edit `X-RateAggregator.java.st` instead. + */ +@GroupingAggregator( + includeTimestamps = true, + value = { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "DOUBLE_BLOCK"), + @IntermediateState(name = "resets", type = "DOUBLE") } +) +public class RateDoubleAggregator { + + public static DoubleRateGroupingState initGrouping(DriverContext driverContext, long unitInMillis) { + return new DoubleRateGroupingState(driverContext.bigArrays(), driverContext.breaker(), unitInMillis); + } + + public static void combine(DoubleRateGroupingState current, int groupId, long timestamp, double value) { + current.append(groupId, timestamp, value); + } + + public static void combineIntermediate( + DoubleRateGroupingState current, + int groupId, + LongBlock timestamps, + DoubleBlock values, + double reset, + int otherPosition + ) { + current.combine(groupId, timestamps, values, reset, otherPosition); + } + + public static void combineStates( + DoubleRateGroupingState current, + int currentGroupId, // make the stylecheck happy + DoubleRateGroupingState otherState, + int otherGroupId + ) { + current.combineState(currentGroupId, otherState, otherGroupId); + } + + public static Block evaluateFinal(DoubleRateGroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext.blockFactory()); + } + + private static class DoubleRateState { + static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject(DoubleRateState.class); + final long[] timestamps; // descending order + final double[] values; + double reset = 0; + + DoubleRateState(int initialSize) { + this.timestamps = new long[initialSize]; + this.values = new double[initialSize]; + } + + DoubleRateState(long[] ts, double[] vs) { + this.timestamps = ts; + this.values = vs; + } + + private double dv(double v0, double v1) { + // counter reset detection + return v0 > v1 ? v1 : v1 - v0; + } + + void append(long t, double v) { + assert timestamps.length == 2 : "expected two timestamps; got " + timestamps.length; + assert t < timestamps[1] : "@timestamp goes backward: " + t + " >= " + timestamps[1]; + reset += dv(v, values[1]) + dv(values[1], values[0]) - dv(v, values[0]); + timestamps[1] = t; + values[1] = v; + } + + int entries() { + return timestamps.length; + } + + static long bytesUsed(int entries) { + var ts = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * entries); + var vs = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Double.BYTES * entries); + return BASE_RAM_USAGE + ts + vs; + } + } + + public static final class DoubleRateGroupingState implements Releasable, Accountable, GroupingAggregatorState { + private ObjectArray states; + private final long unitInMillis; + private final BigArrays bigArrays; + private final CircuitBreaker breaker; + private long stateBytes; // for individual states + + DoubleRateGroupingState(BigArrays bigArrays, CircuitBreaker breaker, long unitInMillis) { + this.bigArrays = bigArrays; + this.breaker = breaker; + this.states = bigArrays.newObjectArray(1); + this.unitInMillis = unitInMillis; + } + + void ensureCapacity(int groupId) { + states = bigArrays.grow(states, groupId + 1); + } + + void adjustBreaker(long bytes) { + breaker.addEstimateBytesAndMaybeBreak(bytes, "<>"); + stateBytes += bytes; + assert stateBytes >= 0 : stateBytes; + } + + void append(int groupId, long timestamp, double value) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + adjustBreaker(DoubleRateState.bytesUsed(1)); + state = new DoubleRateState(new long[] { timestamp }, new double[] { value }); + states.set(groupId, state); + } else { + if (state.entries() == 1) { + adjustBreaker(DoubleRateState.bytesUsed(2)); + state = new DoubleRateState(new long[] { state.timestamps[0], timestamp }, new double[] { state.values[0], value }); + states.set(groupId, state); + adjustBreaker(-DoubleRateState.bytesUsed(1)); // old state + } else { + state.append(timestamp, value); + } + } + } + + void combine(int groupId, LongBlock timestamps, DoubleBlock values, double reset, int otherPosition) { + final int valueCount = timestamps.getValueCount(otherPosition); + if (valueCount == 0) { + return; + } + final int firstIndex = timestamps.getFirstValueIndex(otherPosition); + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + adjustBreaker(DoubleRateState.bytesUsed(valueCount)); + state = new DoubleRateState(valueCount); + state.reset = reset; + states.set(groupId, state); + // TODO: add bulk_copy to Block + for (int i = 0; i < valueCount; i++) { + state.timestamps[i] = timestamps.getLong(firstIndex + i); + state.values[i] = values.getDouble(firstIndex + i); + } + } else { + adjustBreaker(DoubleRateState.bytesUsed(state.entries() + valueCount)); + var newState = new DoubleRateState(state.entries() + valueCount); + newState.reset = state.reset + reset; + states.set(groupId, newState); + merge(state, newState, firstIndex, valueCount, timestamps, values); + adjustBreaker(-DoubleRateState.bytesUsed(state.entries())); // old state + } + } + + void merge(DoubleRateState curr, DoubleRateState dst, int firstIndex, int rightCount, LongBlock timestamps, DoubleBlock values) { + int i = 0, j = 0, k = 0; + final int leftCount = curr.entries(); + while (i < leftCount && j < rightCount) { + final var t1 = curr.timestamps[i]; + final var t2 = timestamps.getLong(firstIndex + j); + if (t1 > t2) { + dst.timestamps[k] = t1; + dst.values[k] = curr.values[i]; + ++i; + } else { + dst.timestamps[k] = t2; + dst.values[k] = values.getDouble(firstIndex + j); + ++j; + } + ++k; + } + if (i < leftCount) { + System.arraycopy(curr.timestamps, i, dst.timestamps, k, leftCount - i); + System.arraycopy(curr.values, i, dst.values, k, leftCount - i); + } + while (j < rightCount) { + dst.timestamps[k] = timestamps.getLong(firstIndex + j); + dst.values[k] = values.getDouble(firstIndex + j); + ++k; + ++j; + } + } + + void combineState(int groupId, DoubleRateGroupingState otherState, int otherGroupId) { + var other = otherGroupId < otherState.states.size() ? otherState.states.get(otherGroupId) : null; + if (other == null) { + return; + } + ensureCapacity(groupId); + var curr = states.get(groupId); + if (curr == null) { + var len = other.entries(); + adjustBreaker(DoubleRateState.bytesUsed(len)); + curr = new DoubleRateState(Arrays.copyOf(other.timestamps, len), Arrays.copyOf(other.values, len)); + curr.reset = other.reset; + states.set(groupId, curr); + } else { + states.set(groupId, mergeState(curr, other)); + } + } + + DoubleRateState mergeState(DoubleRateState s1, DoubleRateState s2) { + var newLen = s1.entries() + s2.entries(); + adjustBreaker(DoubleRateState.bytesUsed(newLen)); + var dst = new DoubleRateState(newLen); + dst.reset = s1.reset + s2.reset; + int i = 0, j = 0, k = 0; + while (i < s1.entries() && j < s2.entries()) { + if (s1.timestamps[i] > s2.timestamps[j]) { + dst.timestamps[k] = s1.timestamps[i]; + dst.values[k] = s1.values[i]; + ++i; + } else { + dst.timestamps[k] = s2.timestamps[j]; + dst.values[k] = s2.values[j]; + ++j; + } + ++k; + } + System.arraycopy(s1.timestamps, i, dst.timestamps, k, s1.entries() - i); + System.arraycopy(s1.values, i, dst.values, k, s1.entries() - i); + System.arraycopy(s2.timestamps, j, dst.timestamps, k, s2.entries() - j); + System.arraycopy(s2.values, j, dst.values, k, s2.entries() - j); + return dst; + } + + @Override + public long ramBytesUsed() { + return states.ramBytesUsed() + stateBytes; + } + + @Override + public void close() { + Releasables.close(states, () -> adjustBreaker(-stateBytes)); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3 : "blocks=" + blocks.length + ",offset=" + offset; + final BlockFactory blockFactory = driverContext.blockFactory(); + final int positionCount = selected.getPositionCount(); + try ( + LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); + DoubleBlock.Builder values = blockFactory.newDoubleBlockBuilder(positionCount * 2); + DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + ) { + for (int i = 0; i < positionCount; i++) { + final var groupId = selected.getInt(i); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state != null) { + timestamps.beginPositionEntry(); + for (long t : state.timestamps) { + timestamps.appendLong(t); + } + timestamps.endPositionEntry(); + + values.beginPositionEntry(); + for (double v : state.values) { + values.appendDouble(v); + } + values.endPositionEntry(); + + resets.appendDouble(state.reset); + } else { + timestamps.appendNull(); + values.appendNull(); + resets.appendDouble(0); + } + } + blocks[offset] = timestamps.build(); + blocks[offset + 1] = values.build(); + blocks[offset + 2] = resets.build().asBlock(); + } + } + + Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { + int positionCount = selected.getPositionCount(); + try (DoubleBlock.Builder rates = blockFactory.newDoubleBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + final var groupId = selected.getInt(p); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state == null) { + rates.appendNull(); + continue; + } + int len = state.entries(); + long dt = state.timestamps[0] - state.timestamps[len - 1]; + if (dt == 0) { + // TODO: maybe issue warning when we don't have enough sample? + rates.appendNull(); + } else { + double reset = state.reset; + for (int i = 1; i < len; i++) { + if (state.values[i - 1] < state.values[i]) { + reset += state.values[i]; + } + } + double dv = state.values[0] - state.values[len - 1] + reset; + rates.appendDouble(dv * unitInMillis / dt); + } + } + return rates.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java new file mode 100644 index 0000000000000..1ba8b9264c24a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java @@ -0,0 +1,341 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * A rate grouping aggregation definition for int. + * This class is generated. Edit `X-RateAggregator.java.st` instead. + */ +@GroupingAggregator( + includeTimestamps = true, + value = { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "INT_BLOCK"), + @IntermediateState(name = "resets", type = "DOUBLE") } +) +public class RateIntAggregator { + + public static IntRateGroupingState initGrouping(DriverContext driverContext, long unitInMillis) { + return new IntRateGroupingState(driverContext.bigArrays(), driverContext.breaker(), unitInMillis); + } + + public static void combine(IntRateGroupingState current, int groupId, long timestamp, int value) { + current.append(groupId, timestamp, value); + } + + public static void combineIntermediate( + IntRateGroupingState current, + int groupId, + LongBlock timestamps, + IntBlock values, + double reset, + int otherPosition + ) { + current.combine(groupId, timestamps, values, reset, otherPosition); + } + + public static void combineStates( + IntRateGroupingState current, + int currentGroupId, // make the stylecheck happy + IntRateGroupingState otherState, + int otherGroupId + ) { + current.combineState(currentGroupId, otherState, otherGroupId); + } + + public static Block evaluateFinal(IntRateGroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext.blockFactory()); + } + + private static class IntRateState { + static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject(IntRateState.class); + final long[] timestamps; // descending order + final int[] values; + double reset = 0; + + IntRateState(int initialSize) { + this.timestamps = new long[initialSize]; + this.values = new int[initialSize]; + } + + IntRateState(long[] ts, int[] vs) { + this.timestamps = ts; + this.values = vs; + } + + private int dv(int v0, int v1) { + // counter reset detection + return v0 > v1 ? v1 : v1 - v0; + } + + void append(long t, int v) { + assert timestamps.length == 2 : "expected two timestamps; got " + timestamps.length; + assert t < timestamps[1] : "@timestamp goes backward: " + t + " >= " + timestamps[1]; + reset += dv(v, values[1]) + dv(values[1], values[0]) - dv(v, values[0]); + timestamps[1] = t; + values[1] = v; + } + + int entries() { + return timestamps.length; + } + + static long bytesUsed(int entries) { + var ts = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * entries); + var vs = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Integer.BYTES * entries); + return BASE_RAM_USAGE + ts + vs; + } + } + + public static final class IntRateGroupingState implements Releasable, Accountable, GroupingAggregatorState { + private ObjectArray states; + private final long unitInMillis; + private final BigArrays bigArrays; + private final CircuitBreaker breaker; + private long stateBytes; // for individual states + + IntRateGroupingState(BigArrays bigArrays, CircuitBreaker breaker, long unitInMillis) { + this.bigArrays = bigArrays; + this.breaker = breaker; + this.states = bigArrays.newObjectArray(1); + this.unitInMillis = unitInMillis; + } + + void ensureCapacity(int groupId) { + states = bigArrays.grow(states, groupId + 1); + } + + void adjustBreaker(long bytes) { + breaker.addEstimateBytesAndMaybeBreak(bytes, "<>"); + stateBytes += bytes; + assert stateBytes >= 0 : stateBytes; + } + + void append(int groupId, long timestamp, int value) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + adjustBreaker(IntRateState.bytesUsed(1)); + state = new IntRateState(new long[] { timestamp }, new int[] { value }); + states.set(groupId, state); + } else { + if (state.entries() == 1) { + adjustBreaker(IntRateState.bytesUsed(2)); + state = new IntRateState(new long[] { state.timestamps[0], timestamp }, new int[] { state.values[0], value }); + states.set(groupId, state); + adjustBreaker(-IntRateState.bytesUsed(1)); // old state + } else { + state.append(timestamp, value); + } + } + } + + void combine(int groupId, LongBlock timestamps, IntBlock values, double reset, int otherPosition) { + final int valueCount = timestamps.getValueCount(otherPosition); + if (valueCount == 0) { + return; + } + final int firstIndex = timestamps.getFirstValueIndex(otherPosition); + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + adjustBreaker(IntRateState.bytesUsed(valueCount)); + state = new IntRateState(valueCount); + state.reset = reset; + states.set(groupId, state); + // TODO: add bulk_copy to Block + for (int i = 0; i < valueCount; i++) { + state.timestamps[i] = timestamps.getLong(firstIndex + i); + state.values[i] = values.getInt(firstIndex + i); + } + } else { + adjustBreaker(IntRateState.bytesUsed(state.entries() + valueCount)); + var newState = new IntRateState(state.entries() + valueCount); + newState.reset = state.reset + reset; + states.set(groupId, newState); + merge(state, newState, firstIndex, valueCount, timestamps, values); + adjustBreaker(-IntRateState.bytesUsed(state.entries())); // old state + } + } + + void merge(IntRateState curr, IntRateState dst, int firstIndex, int rightCount, LongBlock timestamps, IntBlock values) { + int i = 0, j = 0, k = 0; + final int leftCount = curr.entries(); + while (i < leftCount && j < rightCount) { + final var t1 = curr.timestamps[i]; + final var t2 = timestamps.getLong(firstIndex + j); + if (t1 > t2) { + dst.timestamps[k] = t1; + dst.values[k] = curr.values[i]; + ++i; + } else { + dst.timestamps[k] = t2; + dst.values[k] = values.getInt(firstIndex + j); + ++j; + } + ++k; + } + if (i < leftCount) { + System.arraycopy(curr.timestamps, i, dst.timestamps, k, leftCount - i); + System.arraycopy(curr.values, i, dst.values, k, leftCount - i); + } + while (j < rightCount) { + dst.timestamps[k] = timestamps.getLong(firstIndex + j); + dst.values[k] = values.getInt(firstIndex + j); + ++k; + ++j; + } + } + + void combineState(int groupId, IntRateGroupingState otherState, int otherGroupId) { + var other = otherGroupId < otherState.states.size() ? otherState.states.get(otherGroupId) : null; + if (other == null) { + return; + } + ensureCapacity(groupId); + var curr = states.get(groupId); + if (curr == null) { + var len = other.entries(); + adjustBreaker(IntRateState.bytesUsed(len)); + curr = new IntRateState(Arrays.copyOf(other.timestamps, len), Arrays.copyOf(other.values, len)); + curr.reset = other.reset; + states.set(groupId, curr); + } else { + states.set(groupId, mergeState(curr, other)); + } + } + + IntRateState mergeState(IntRateState s1, IntRateState s2) { + var newLen = s1.entries() + s2.entries(); + adjustBreaker(IntRateState.bytesUsed(newLen)); + var dst = new IntRateState(newLen); + dst.reset = s1.reset + s2.reset; + int i = 0, j = 0, k = 0; + while (i < s1.entries() && j < s2.entries()) { + if (s1.timestamps[i] > s2.timestamps[j]) { + dst.timestamps[k] = s1.timestamps[i]; + dst.values[k] = s1.values[i]; + ++i; + } else { + dst.timestamps[k] = s2.timestamps[j]; + dst.values[k] = s2.values[j]; + ++j; + } + ++k; + } + System.arraycopy(s1.timestamps, i, dst.timestamps, k, s1.entries() - i); + System.arraycopy(s1.values, i, dst.values, k, s1.entries() - i); + System.arraycopy(s2.timestamps, j, dst.timestamps, k, s2.entries() - j); + System.arraycopy(s2.values, j, dst.values, k, s2.entries() - j); + return dst; + } + + @Override + public long ramBytesUsed() { + return states.ramBytesUsed() + stateBytes; + } + + @Override + public void close() { + Releasables.close(states, () -> adjustBreaker(-stateBytes)); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3 : "blocks=" + blocks.length + ",offset=" + offset; + final BlockFactory blockFactory = driverContext.blockFactory(); + final int positionCount = selected.getPositionCount(); + try ( + LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); + IntBlock.Builder values = blockFactory.newIntBlockBuilder(positionCount * 2); + DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + ) { + for (int i = 0; i < positionCount; i++) { + final var groupId = selected.getInt(i); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state != null) { + timestamps.beginPositionEntry(); + for (long t : state.timestamps) { + timestamps.appendLong(t); + } + timestamps.endPositionEntry(); + + values.beginPositionEntry(); + for (int v : state.values) { + values.appendInt(v); + } + values.endPositionEntry(); + + resets.appendDouble(state.reset); + } else { + timestamps.appendNull(); + values.appendNull(); + resets.appendDouble(0); + } + } + blocks[offset] = timestamps.build(); + blocks[offset + 1] = values.build(); + blocks[offset + 2] = resets.build().asBlock(); + } + } + + Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { + int positionCount = selected.getPositionCount(); + try (DoubleBlock.Builder rates = blockFactory.newDoubleBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + final var groupId = selected.getInt(p); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state == null) { + rates.appendNull(); + continue; + } + int len = state.entries(); + long dt = state.timestamps[0] - state.timestamps[len - 1]; + if (dt == 0) { + // TODO: maybe issue warning when we don't have enough sample? + rates.appendNull(); + } else { + double reset = state.reset; + for (int i = 1; i < len; i++) { + if (state.values[i - 1] < state.values[i]) { + reset += state.values[i]; + } + } + double dv = state.values[0] - state.values[len - 1] + reset; + rates.appendDouble(dv * unitInMillis / dt); + } + } + return rates.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java new file mode 100644 index 0000000000000..846c6f0cc2730 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java @@ -0,0 +1,340 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * A rate grouping aggregation definition for long. + * This class is generated. Edit `X-RateAggregator.java.st` instead. + */ +@GroupingAggregator( + includeTimestamps = true, + value = { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "LONG_BLOCK"), + @IntermediateState(name = "resets", type = "DOUBLE") } +) +public class RateLongAggregator { + + public static LongRateGroupingState initGrouping(DriverContext driverContext, long unitInMillis) { + return new LongRateGroupingState(driverContext.bigArrays(), driverContext.breaker(), unitInMillis); + } + + public static void combine(LongRateGroupingState current, int groupId, long timestamp, long value) { + current.append(groupId, timestamp, value); + } + + public static void combineIntermediate( + LongRateGroupingState current, + int groupId, + LongBlock timestamps, + LongBlock values, + double reset, + int otherPosition + ) { + current.combine(groupId, timestamps, values, reset, otherPosition); + } + + public static void combineStates( + LongRateGroupingState current, + int currentGroupId, // make the stylecheck happy + LongRateGroupingState otherState, + int otherGroupId + ) { + current.combineState(currentGroupId, otherState, otherGroupId); + } + + public static Block evaluateFinal(LongRateGroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext.blockFactory()); + } + + private static class LongRateState { + static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject(LongRateState.class); + final long[] timestamps; // descending order + final long[] values; + double reset = 0; + + LongRateState(int initialSize) { + this.timestamps = new long[initialSize]; + this.values = new long[initialSize]; + } + + LongRateState(long[] ts, long[] vs) { + this.timestamps = ts; + this.values = vs; + } + + private long dv(long v0, long v1) { + // counter reset detection + return v0 > v1 ? v1 : v1 - v0; + } + + void append(long t, long v) { + assert timestamps.length == 2 : "expected two timestamps; got " + timestamps.length; + assert t < timestamps[1] : "@timestamp goes backward: " + t + " >= " + timestamps[1]; + reset += dv(v, values[1]) + dv(values[1], values[0]) - dv(v, values[0]); + timestamps[1] = t; + values[1] = v; + } + + int entries() { + return timestamps.length; + } + + static long bytesUsed(int entries) { + var ts = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * entries); + var vs = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * entries); + return BASE_RAM_USAGE + ts + vs; + } + } + + public static final class LongRateGroupingState implements Releasable, Accountable, GroupingAggregatorState { + private ObjectArray states; + private final long unitInMillis; + private final BigArrays bigArrays; + private final CircuitBreaker breaker; + private long stateBytes; // for individual states + + LongRateGroupingState(BigArrays bigArrays, CircuitBreaker breaker, long unitInMillis) { + this.bigArrays = bigArrays; + this.breaker = breaker; + this.states = bigArrays.newObjectArray(1); + this.unitInMillis = unitInMillis; + } + + void ensureCapacity(int groupId) { + states = bigArrays.grow(states, groupId + 1); + } + + void adjustBreaker(long bytes) { + breaker.addEstimateBytesAndMaybeBreak(bytes, "<>"); + stateBytes += bytes; + assert stateBytes >= 0 : stateBytes; + } + + void append(int groupId, long timestamp, long value) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + adjustBreaker(LongRateState.bytesUsed(1)); + state = new LongRateState(new long[] { timestamp }, new long[] { value }); + states.set(groupId, state); + } else { + if (state.entries() == 1) { + adjustBreaker(LongRateState.bytesUsed(2)); + state = new LongRateState(new long[] { state.timestamps[0], timestamp }, new long[] { state.values[0], value }); + states.set(groupId, state); + adjustBreaker(-LongRateState.bytesUsed(1)); // old state + } else { + state.append(timestamp, value); + } + } + } + + void combine(int groupId, LongBlock timestamps, LongBlock values, double reset, int otherPosition) { + final int valueCount = timestamps.getValueCount(otherPosition); + if (valueCount == 0) { + return; + } + final int firstIndex = timestamps.getFirstValueIndex(otherPosition); + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + adjustBreaker(LongRateState.bytesUsed(valueCount)); + state = new LongRateState(valueCount); + state.reset = reset; + states.set(groupId, state); + // TODO: add bulk_copy to Block + for (int i = 0; i < valueCount; i++) { + state.timestamps[i] = timestamps.getLong(firstIndex + i); + state.values[i] = values.getLong(firstIndex + i); + } + } else { + adjustBreaker(LongRateState.bytesUsed(state.entries() + valueCount)); + var newState = new LongRateState(state.entries() + valueCount); + newState.reset = state.reset + reset; + states.set(groupId, newState); + merge(state, newState, firstIndex, valueCount, timestamps, values); + adjustBreaker(-LongRateState.bytesUsed(state.entries())); // old state + } + } + + void merge(LongRateState curr, LongRateState dst, int firstIndex, int rightCount, LongBlock timestamps, LongBlock values) { + int i = 0, j = 0, k = 0; + final int leftCount = curr.entries(); + while (i < leftCount && j < rightCount) { + final var t1 = curr.timestamps[i]; + final var t2 = timestamps.getLong(firstIndex + j); + if (t1 > t2) { + dst.timestamps[k] = t1; + dst.values[k] = curr.values[i]; + ++i; + } else { + dst.timestamps[k] = t2; + dst.values[k] = values.getLong(firstIndex + j); + ++j; + } + ++k; + } + if (i < leftCount) { + System.arraycopy(curr.timestamps, i, dst.timestamps, k, leftCount - i); + System.arraycopy(curr.values, i, dst.values, k, leftCount - i); + } + while (j < rightCount) { + dst.timestamps[k] = timestamps.getLong(firstIndex + j); + dst.values[k] = values.getLong(firstIndex + j); + ++k; + ++j; + } + } + + void combineState(int groupId, LongRateGroupingState otherState, int otherGroupId) { + var other = otherGroupId < otherState.states.size() ? otherState.states.get(otherGroupId) : null; + if (other == null) { + return; + } + ensureCapacity(groupId); + var curr = states.get(groupId); + if (curr == null) { + var len = other.entries(); + adjustBreaker(LongRateState.bytesUsed(len)); + curr = new LongRateState(Arrays.copyOf(other.timestamps, len), Arrays.copyOf(other.values, len)); + curr.reset = other.reset; + states.set(groupId, curr); + } else { + states.set(groupId, mergeState(curr, other)); + } + } + + LongRateState mergeState(LongRateState s1, LongRateState s2) { + var newLen = s1.entries() + s2.entries(); + adjustBreaker(LongRateState.bytesUsed(newLen)); + var dst = new LongRateState(newLen); + dst.reset = s1.reset + s2.reset; + int i = 0, j = 0, k = 0; + while (i < s1.entries() && j < s2.entries()) { + if (s1.timestamps[i] > s2.timestamps[j]) { + dst.timestamps[k] = s1.timestamps[i]; + dst.values[k] = s1.values[i]; + ++i; + } else { + dst.timestamps[k] = s2.timestamps[j]; + dst.values[k] = s2.values[j]; + ++j; + } + ++k; + } + System.arraycopy(s1.timestamps, i, dst.timestamps, k, s1.entries() - i); + System.arraycopy(s1.values, i, dst.values, k, s1.entries() - i); + System.arraycopy(s2.timestamps, j, dst.timestamps, k, s2.entries() - j); + System.arraycopy(s2.values, j, dst.values, k, s2.entries() - j); + return dst; + } + + @Override + public long ramBytesUsed() { + return states.ramBytesUsed() + stateBytes; + } + + @Override + public void close() { + Releasables.close(states, () -> adjustBreaker(-stateBytes)); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3 : "blocks=" + blocks.length + ",offset=" + offset; + final BlockFactory blockFactory = driverContext.blockFactory(); + final int positionCount = selected.getPositionCount(); + try ( + LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); + LongBlock.Builder values = blockFactory.newLongBlockBuilder(positionCount * 2); + DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + ) { + for (int i = 0; i < positionCount; i++) { + final var groupId = selected.getInt(i); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state != null) { + timestamps.beginPositionEntry(); + for (long t : state.timestamps) { + timestamps.appendLong(t); + } + timestamps.endPositionEntry(); + + values.beginPositionEntry(); + for (long v : state.values) { + values.appendLong(v); + } + values.endPositionEntry(); + + resets.appendDouble(state.reset); + } else { + timestamps.appendNull(); + values.appendNull(); + resets.appendDouble(0); + } + } + blocks[offset] = timestamps.build(); + blocks[offset + 1] = values.build(); + blocks[offset + 2] = resets.build().asBlock(); + } + } + + Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { + int positionCount = selected.getPositionCount(); + try (DoubleBlock.Builder rates = blockFactory.newDoubleBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + final var groupId = selected.getInt(p); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state == null) { + rates.appendNull(); + continue; + } + int len = state.entries(); + long dt = state.timestamps[0] - state.timestamps[len - 1]; + if (dt == 0) { + // TODO: maybe issue warning when we don't have enough sample? + rates.appendNull(); + } else { + double reset = state.reset; + for (int i = 1; i < len; i++) { + if (state.values[i - 1] < state.values[i]) { + reset += state.values[i]; + } + } + double dv = state.values[0] - state.values[len - 1] + reset; + rates.appendDouble(dv * unitInMillis / dt); + } + } + return rates.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java new file mode 100644 index 0000000000000..736b320a9dde8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java @@ -0,0 +1,190 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.common.util.LongLongHash; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +/** + * Aggregates field values for BytesRef. + * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead + * of this file. + */ +@Aggregator({ @IntermediateState(name = "values", type = "BYTES_REF_BLOCK") }) +@GroupingAggregator +class ValuesBytesRefAggregator { + public static SingleState initSingle(BigArrays bigArrays) { + return new SingleState(bigArrays); + } + + public static void combine(SingleState state, BytesRef v) { + state.values.add(v); + } + + public static void combineIntermediate(SingleState state, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { + combine(state, values.getBytesRef(i, scratch)); + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, BytesRef v) { + state.values.add(groupId, BlockHash.hashOrdToGroup(state.bytes.add(v))); + } + + public static void combineIntermediate(GroupingState state, int groupId, BytesRefBlock values, int valuesPosition) { + BytesRef scratch = new BytesRef(); + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { + combine(state, groupId, values.getBytesRef(i, scratch)); + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + BytesRef scratch = new BytesRef(); + for (int id = 0; id < state.values.size(); id++) { + if (state.values.getKey1(id) == statePosition) { + long value = state.values.getKey2(id); + combine(current, currentGroupId, state.bytes.get(value, scratch)); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { + private final BytesRefHash values; + + private SingleState(BigArrays bigArrays) { + values = new BytesRefHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(1); + } + BytesRef scratch = new BytesRef(); + if (values.size() == 1) { + return blockFactory.newConstantBytesRefBlockWith(values.get(0, scratch), 1); + } + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder((int) values.size())) { + builder.beginPositionEntry(); + for (int id = 0; id < values.size(); id++) { + builder.appendBytesRef(values.get(id, scratch)); + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() { + values.close(); + } + } + + /** + * State for a grouped {@code VALUES} aggregation. This implementation + * emphasizes collect-time performance over the performance of rendering + * results. That's good, but it's a pretty intensive emphasis, requiring + * an {@code O(n^2)} operation for collection to support a {@code O(1)} + * collector operation. But at least it's fairly simple. + */ + public static class GroupingState implements Releasable { + private final LongLongHash values; + private final BytesRefHash bytes; + + private GroupingState(BigArrays bigArrays) { + values = new LongLongHash(1, bigArrays); + bytes = new BytesRefHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } + BytesRef scratch = new BytesRef(); + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + /* + * Count can effectively be in three states - 0, 1, many. We use those + * states to buffer the first value, so we can avoid calling + * beginPositionEntry on single valued fields. + */ + int count = 0; + long first = 0; + for (int id = 0; id < values.size(); id++) { + if (values.getKey1(id) == selectedGroup) { + long value = values.getKey2(id); + switch (count) { + case 0 -> first = value; + case 1 -> { + builder.beginPositionEntry(); + builder.appendBytesRef(bytes.get(first, scratch)); + builder.appendBytesRef(bytes.get(value, scratch)); + } + default -> builder.appendBytesRef(bytes.get(value, scratch)); + } + count++; + } + } + switch (count) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendBytesRef(bytes.get(first, scratch)); + default -> builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we figure out seen values from nulls on the values block + } + + @Override + public void close() { + Releasables.closeExpectNoException(values, bytes); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java new file mode 100644 index 0000000000000..a8409367bc090 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java @@ -0,0 +1,180 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.common.util.LongLongHash; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; + +/** + * Aggregates field values for double. + * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead + * of this file. + */ +@Aggregator({ @IntermediateState(name = "values", type = "DOUBLE_BLOCK") }) +@GroupingAggregator +class ValuesDoubleAggregator { + public static SingleState initSingle(BigArrays bigArrays) { + return new SingleState(bigArrays); + } + + public static void combine(SingleState state, double v) { + state.values.add(Double.doubleToLongBits(v)); + } + + public static void combineIntermediate(SingleState state, DoubleBlock values) { + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { + combine(state, values.getDouble(i)); + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, double v) { + state.values.add(groupId, Double.doubleToLongBits(v)); + } + + public static void combineIntermediate(GroupingState state, int groupId, DoubleBlock values, int valuesPosition) { + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { + combine(state, groupId, values.getDouble(i)); + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + for (int id = 0; id < state.values.size(); id++) { + if (state.values.getKey1(id) == statePosition) { + double value = Double.longBitsToDouble(state.values.getKey2(id)); + combine(current, currentGroupId, value); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { + private final LongHash values; + + private SingleState(BigArrays bigArrays) { + values = new LongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(1); + } + if (values.size() == 1) { + return blockFactory.newConstantDoubleBlockWith(Double.longBitsToDouble(values.get(0)), 1); + } + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder((int) values.size())) { + builder.beginPositionEntry(); + for (int id = 0; id < values.size(); id++) { + builder.appendDouble(Double.longBitsToDouble(values.get(id))); + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() { + values.close(); + } + } + + /** + * State for a grouped {@code VALUES} aggregation. This implementation + * emphasizes collect-time performance over the performance of rendering + * results. That's good, but it's a pretty intensive emphasis, requiring + * an {@code O(n^2)} operation for collection to support a {@code O(1)} + * collector operation. But at least it's fairly simple. + */ + public static class GroupingState implements Releasable { + private final LongLongHash values; + + private GroupingState(BigArrays bigArrays) { + values = new LongLongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + /* + * Count can effectively be in three states - 0, 1, many. We use those + * states to buffer the first value, so we can avoid calling + * beginPositionEntry on single valued fields. + */ + int count = 0; + double first = 0; + for (int id = 0; id < values.size(); id++) { + if (values.getKey1(id) == selectedGroup) { + double value = Double.longBitsToDouble(values.getKey2(id)); + switch (count) { + case 0 -> first = value; + case 1 -> { + builder.beginPositionEntry(); + builder.appendDouble(first); + builder.appendDouble(value); + } + default -> builder.appendDouble(value); + } + count++; + } + } + switch (count) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendDouble(first); + default -> builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we figure out seen values from nulls on the values block + } + + @Override + public void close() { + values.close(); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java new file mode 100644 index 0000000000000..2420dcee70712 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java @@ -0,0 +1,187 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; + +/** + * Aggregates field values for int. + * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead + * of this file. + */ +@Aggregator({ @IntermediateState(name = "values", type = "INT_BLOCK") }) +@GroupingAggregator +class ValuesIntAggregator { + public static SingleState initSingle(BigArrays bigArrays) { + return new SingleState(bigArrays); + } + + public static void combine(SingleState state, int v) { + state.values.add(v); + } + + public static void combineIntermediate(SingleState state, IntBlock values) { + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { + combine(state, values.getInt(i)); + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, int v) { + /* + * Encode the groupId and value into a single long - + * the top 32 bits for the group, the bottom 32 for the value. + */ + state.values.add((((long) groupId) << Integer.SIZE) | (v & 0xFFFFFFFFL)); + } + + public static void combineIntermediate(GroupingState state, int groupId, IntBlock values, int valuesPosition) { + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { + combine(state, groupId, values.getInt(i)); + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + for (int id = 0; id < state.values.size(); id++) { + long both = state.values.get(id); + int group = (int) (both >>> Integer.SIZE); + if (group == statePosition) { + int value = (int) both; + combine(current, currentGroupId, value); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { + private final LongHash values; + + private SingleState(BigArrays bigArrays) { + values = new LongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(1); + } + if (values.size() == 1) { + return blockFactory.newConstantIntBlockWith((int) values.get(0), 1); + } + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder((int) values.size())) { + builder.beginPositionEntry(); + for (int id = 0; id < values.size(); id++) { + builder.appendInt((int) values.get(id)); + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() { + values.close(); + } + } + + /** + * State for a grouped {@code VALUES} aggregation. This implementation + * emphasizes collect-time performance over the performance of rendering + * results. That's good, but it's a pretty intensive emphasis, requiring + * an {@code O(n^2)} operation for collection to support a {@code O(1)} + * collector operation. But at least it's fairly simple. + */ + public static class GroupingState implements Releasable { + private final LongHash values; + + private GroupingState(BigArrays bigArrays) { + values = new LongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + /* + * Count can effectively be in three states - 0, 1, many. We use those + * states to buffer the first value, so we can avoid calling + * beginPositionEntry on single valued fields. + */ + int count = 0; + int first = 0; + for (int id = 0; id < values.size(); id++) { + long both = values.get(id); + int group = (int) (both >>> Integer.SIZE); + if (group == selectedGroup) { + int value = (int) both; + switch (count) { + case 0 -> first = value; + case 1 -> { + builder.beginPositionEntry(); + builder.appendInt(first); + builder.appendInt(value); + } + default -> builder.appendInt(value); + } + count++; + } + } + switch (count) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendInt(first); + default -> builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we figure out seen values from nulls on the values block + } + + @Override + public void close() { + values.close(); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java new file mode 100644 index 0000000000000..4938b8f15edb0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java @@ -0,0 +1,180 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.common.util.LongLongHash; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; + +/** + * Aggregates field values for long. + * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead + * of this file. + */ +@Aggregator({ @IntermediateState(name = "values", type = "LONG_BLOCK") }) +@GroupingAggregator +class ValuesLongAggregator { + public static SingleState initSingle(BigArrays bigArrays) { + return new SingleState(bigArrays); + } + + public static void combine(SingleState state, long v) { + state.values.add(v); + } + + public static void combineIntermediate(SingleState state, LongBlock values) { + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { + combine(state, values.getLong(i)); + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, long v) { + state.values.add(groupId, v); + } + + public static void combineIntermediate(GroupingState state, int groupId, LongBlock values, int valuesPosition) { + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { + combine(state, groupId, values.getLong(i)); + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + for (int id = 0; id < state.values.size(); id++) { + if (state.values.getKey1(id) == statePosition) { + long value = state.values.getKey2(id); + combine(current, currentGroupId, value); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { + private final LongHash values; + + private SingleState(BigArrays bigArrays) { + values = new LongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(1); + } + if (values.size() == 1) { + return blockFactory.newConstantLongBlockWith(values.get(0), 1); + } + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder((int) values.size())) { + builder.beginPositionEntry(); + for (int id = 0; id < values.size(); id++) { + builder.appendLong(values.get(id)); + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() { + values.close(); + } + } + + /** + * State for a grouped {@code VALUES} aggregation. This implementation + * emphasizes collect-time performance over the performance of rendering + * results. That's good, but it's a pretty intensive emphasis, requiring + * an {@code O(n^2)} operation for collection to support a {@code O(1)} + * collector operation. But at least it's fairly simple. + */ + public static class GroupingState implements Releasable { + private final LongLongHash values; + + private GroupingState(BigArrays bigArrays) { + values = new LongLongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + /* + * Count can effectively be in three states - 0, 1, many. We use those + * states to buffer the first value, so we can avoid calling + * beginPositionEntry on single valued fields. + */ + int count = 0; + long first = 0; + for (int id = 0; id < values.size(); id++) { + if (values.getKey1(id) == selectedGroup) { + long value = values.getKey2(id); + switch (count) { + case 0 -> first = value; + case 1 -> { + builder.beginPositionEntry(); + builder.appendLong(first); + builder.appendLong(value); + } + default -> builder.appendLong(value); + } + count++; + } + } + switch (count) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendLong(first); + default -> builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we figure out seen values from nulls on the values block + } + + @Override + public void close() { + values.close(); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java index eb618f4569ce7..a12677e70e8a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java @@ -148,8 +148,16 @@ private void addRawInput(int positionOffset, IntBlock groups, BooleanVector valu public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BooleanVector fbit = page.getBlock(channels.get(0)).asVector(); - BooleanVector tbit = page.getBlock(channels.get(1)).asVector(); + Block fbitUncast = page.getBlock(channels.get(0)); + if (fbitUncast.areAllValuesNull()) { + return; + } + BooleanVector fbit = ((BooleanBlock) fbitUncast).asVector(); + Block tbitUncast = page.getBlock(channels.get(1)); + if (tbitUncast.areAllValuesNull()) { + return; + } + BooleanVector tbit = ((BooleanBlock) tbitUncast).asVector(); assert fbit.getPositionCount() == tbit.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java index ba2eaf66bf2af..4879df5cf1c2c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java @@ -155,7 +155,11 @@ private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector val public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { + return; + } + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java index 2bb273bf2598c..1e0ce58377f9e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java @@ -153,7 +153,11 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { + return; + } + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java index 6e1017d962254..99e6ace52b256 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java @@ -151,7 +151,11 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { + return; + } + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java index d0c6cedeed2ff..85f823296c886 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java @@ -153,7 +153,11 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { + return; + } + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java index 86ada78737512..da93320eaf96e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { + return; + } + DoubleVector max = ((DoubleBlock) maxUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert max.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java index d0809b2a6853c..c8b1b6910c0aa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java @@ -148,8 +148,16 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - IntVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { + return; + } + IntVector max = ((IntBlock) maxUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert max.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java index 5b28fa01ef0ce..41d893f9bbf0c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - LongVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { + return; + } + LongVector max = ((LongBlock) maxUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert max.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java index 39d65eabbe4b7..e08488685d2cb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java @@ -150,7 +150,11 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java index ec7b21fc440a1..02866ee15b961 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java @@ -148,7 +148,11 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java index 4028bac4628a1..36c40e10e54d5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java @@ -150,7 +150,11 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java index 4d0e78a26865d..7d0374b3d21f7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { + return; + } + DoubleVector min = ((DoubleBlock) minUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert min.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java index 97b5eafa9e72e..6625fd327237b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java @@ -148,8 +148,16 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - IntVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { + return; + } + IntVector min = ((IntBlock) minUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert min.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java index 7e68a4b933841..f0c3727d7db0b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - LongVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { + return; + } + LongVector min = ((LongBlock) minUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert min.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java index e8c9fe4728308..9d486b9614dab 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java @@ -153,7 +153,11 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java index fb4e06784823d..8c2bd7091143f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java @@ -151,7 +151,11 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java index 45ce7d0d1c267..c1c332ba0094d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java @@ -153,7 +153,11 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..8806e1ed865c2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link RateDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + private final long unitInMillis; + + public RateDoubleAggregatorFunctionSupplier(List channels, long unitInMillis) { + this.channels = channels; + this.unitInMillis = unitInMillis; + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext) { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public RateDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return RateDoubleGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); + } + + @Override + public String describe() { + return "rate of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..8d9e011891e95 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java @@ -0,0 +1,225 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link RateDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.DOUBLE), + new IntermediateStateDesc("resets", ElementType.DOUBLE) ); + + private final RateDoubleAggregator.DoubleRateGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final long unitInMillis; + + public RateDoubleGroupingAggregatorFunction(List channels, + RateDoubleAggregator.DoubleRateGroupingState state, DriverContext driverContext, + long unitInMillis) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.unitInMillis = unitInMillis; + } + + public static RateDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext, long unitInMillis) { + return new RateDoubleGroupingAggregatorFunction(channels, RateDoubleAggregator.initGrouping(driverContext, unitInMillis), driverContext, unitInMillis); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); + DoubleVector valuesVector = valuesBlock.asVector(); + LongBlock timestampsBlock = page.getBlock(channels.get(1)); + LongVector timestampsVector = timestampsBlock.asVector(); + if (timestampsVector == null) { + throw new IllegalStateException("expected @timestamp vector; but got a block"); + } + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateDoubleAggregator.combine(state, groupId, timestamps.getLong(v), values.getDouble(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + var valuePosition = groupPosition + positionOffset; + RateDoubleAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getDouble(valuePosition)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateDoubleAggregator.combine(state, groupId, timestamps.getLong(v), values.getDouble(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + var valuePosition = groupPosition + positionOffset; + RateDoubleAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getDouble(valuePosition)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + Block resetsUncast = page.getBlock(channels.get(2)); + if (resetsUncast.areAllValuesNull()) { + return; + } + DoubleVector resets = ((DoubleBlock) resetsUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == resets.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + RateDoubleAggregator.combineIntermediate(state, groupId, timestamps, values, resets.getDouble(groupPosition + positionOffset), groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + RateDoubleAggregator.DoubleRateGroupingState inState = ((RateDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + RateDoubleAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = RateDoubleAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..a98f0217ef90e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link RateIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + private final long unitInMillis; + + public RateIntAggregatorFunctionSupplier(List channels, long unitInMillis) { + this.channels = channels; + this.unitInMillis = unitInMillis; + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext) { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public RateIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return RateIntGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); + } + + @Override + public String describe() { + return "rate of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..6bd4b833dc9e6 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java @@ -0,0 +1,225 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link RateIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.INT), + new IntermediateStateDesc("resets", ElementType.DOUBLE) ); + + private final RateIntAggregator.IntRateGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final long unitInMillis; + + public RateIntGroupingAggregatorFunction(List channels, + RateIntAggregator.IntRateGroupingState state, DriverContext driverContext, + long unitInMillis) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.unitInMillis = unitInMillis; + } + + public static RateIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext, long unitInMillis) { + return new RateIntGroupingAggregatorFunction(channels, RateIntAggregator.initGrouping(driverContext, unitInMillis), driverContext, unitInMillis); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + IntBlock valuesBlock = page.getBlock(channels.get(0)); + IntVector valuesVector = valuesBlock.asVector(); + LongBlock timestampsBlock = page.getBlock(channels.get(1)); + LongVector timestampsVector = timestampsBlock.asVector(); + if (timestampsVector == null) { + throw new IllegalStateException("expected @timestamp vector; but got a block"); + } + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateIntAggregator.combine(state, groupId, timestamps.getLong(v), values.getInt(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + var valuePosition = groupPosition + positionOffset; + RateIntAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getInt(valuePosition)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateIntAggregator.combine(state, groupId, timestamps.getLong(v), values.getInt(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + var valuePosition = groupPosition + positionOffset; + RateIntAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getInt(valuePosition)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + Block resetsUncast = page.getBlock(channels.get(2)); + if (resetsUncast.areAllValuesNull()) { + return; + } + DoubleVector resets = ((DoubleBlock) resetsUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == resets.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + RateIntAggregator.combineIntermediate(state, groupId, timestamps, values, resets.getDouble(groupPosition + positionOffset), groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + RateIntAggregator.IntRateGroupingState inState = ((RateIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + RateIntAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = RateIntAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..b8100dbbe4455 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link RateLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + private final long unitInMillis; + + public RateLongAggregatorFunctionSupplier(List channels, long unitInMillis) { + this.channels = channels; + this.unitInMillis = unitInMillis; + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext) { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public RateLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return RateLongGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); + } + + @Override + public String describe() { + return "rate of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..27318d6496737 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java @@ -0,0 +1,225 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link RateLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.LONG), + new IntermediateStateDesc("resets", ElementType.DOUBLE) ); + + private final RateLongAggregator.LongRateGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final long unitInMillis; + + public RateLongGroupingAggregatorFunction(List channels, + RateLongAggregator.LongRateGroupingState state, DriverContext driverContext, + long unitInMillis) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.unitInMillis = unitInMillis; + } + + public static RateLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext, long unitInMillis) { + return new RateLongGroupingAggregatorFunction(channels, RateLongAggregator.initGrouping(driverContext, unitInMillis), driverContext, unitInMillis); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + LongBlock timestampsBlock = page.getBlock(channels.get(1)); + LongVector timestampsVector = timestampsBlock.asVector(); + if (timestampsVector == null) { + throw new IllegalStateException("expected @timestamp vector; but got a block"); + } + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateLongAggregator.combine(state, groupId, timestamps.getLong(v), values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + var valuePosition = groupPosition + positionOffset; + RateLongAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getLong(valuePosition)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateLongAggregator.combine(state, groupId, timestamps.getLong(v), values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + var valuePosition = groupPosition + positionOffset; + RateLongAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getLong(valuePosition)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + LongBlock values = (LongBlock) valuesUncast; + Block resetsUncast = page.getBlock(channels.get(2)); + if (resetsUncast.areAllValuesNull()) { + return; + } + DoubleVector resets = ((DoubleBlock) resetsUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == resets.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + RateLongAggregator.combineIntermediate(state, groupId, timestamps, values, resets.getDouble(groupPosition + positionOffset), groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + RateLongAggregator.LongRateGroupingState inState = ((RateLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + RateLongAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = RateLongAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java index f60a3c8cf152a..5085cfc3bebcf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java @@ -151,9 +151,21 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector value = page.getBlock(channels.get(0)).asVector(); - DoubleVector delta = page.getBlock(channels.get(1)).asVector(); - BooleanVector seen = page.getBlock(channels.get(2)).asVector(); + Block valueUncast = page.getBlock(channels.get(0)); + if (valueUncast.areAllValuesNull()) { + return; + } + DoubleVector value = ((DoubleBlock) valueUncast).asVector(); + Block deltaUncast = page.getBlock(channels.get(1)); + if (deltaUncast.areAllValuesNull()) { + return; + } + DoubleVector delta = ((DoubleBlock) deltaUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert value.getPositionCount() == delta.getPositionCount() && value.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java index 373a7af4b3d67..6891fe548908f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block sumUncast = page.getBlock(channels.get(0)); + if (sumUncast.areAllValuesNull()) { + return; + } + LongVector sum = ((LongBlock) sumUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert sum.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java index 46e993b49c666..507aa343aa74e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block sumUncast = page.getBlock(channels.get(0)); + if (sumUncast.areAllValuesNull()) { + return; + } + LongVector sum = ((LongBlock) sumUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert sum.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java new file mode 100644 index 0000000000000..3e9bc91e0039a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java @@ -0,0 +1,120 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link ValuesBooleanAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBooleanAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final ValuesBooleanAggregator.SingleState state; + + private final List channels; + + public ValuesBooleanAggregatorFunction(DriverContext driverContext, List channels, + ValuesBooleanAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static ValuesBooleanAggregatorFunction create(DriverContext driverContext, + List channels) { + return new ValuesBooleanAggregatorFunction(driverContext, channels, ValuesBooleanAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + BooleanBlock block = page.getBlock(channels.get(0)); + BooleanVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(BooleanVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + ValuesBooleanAggregator.combine(state, vector.getBoolean(i)); + } + } + + private void addRawBlock(BooleanBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesBooleanAggregator.combine(state, block.getBoolean(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock values = (BooleanBlock) valuesUncast; + assert values.getPositionCount() == 1; + ValuesBooleanAggregator.combineIntermediate(state, values); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = ValuesBooleanAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..20d5a5fda7726 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link ValuesBooleanAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public ValuesBooleanAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public ValuesBooleanAggregatorFunction aggregator(DriverContext driverContext) { + return ValuesBooleanAggregatorFunction.create(driverContext, channels); + } + + @Override + public ValuesBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return ValuesBooleanGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "values of booleans"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..16e92a7c69ca8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java @@ -0,0 +1,195 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link ValuesBooleanAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.BOOLEAN) ); + + private final ValuesBooleanAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public ValuesBooleanGroupingAggregatorFunction(List channels, + ValuesBooleanAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static ValuesBooleanGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new ValuesBooleanGroupingAggregatorFunction(channels, ValuesBooleanAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BooleanBlock valuesBlock = page.getBlock(channels.get(0)); + BooleanVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BooleanBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesBooleanAggregator.combine(state, groupId, values.getBoolean(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BooleanVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesBooleanAggregator.combine(state, groupId, values.getBoolean(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BooleanBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesBooleanAggregator.combine(state, groupId, values.getBoolean(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BooleanVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + ValuesBooleanAggregator.combine(state, groupId, values.getBoolean(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock values = (BooleanBlock) valuesUncast; + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesBooleanAggregator.combineIntermediate(state, groupId, values, groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + ValuesBooleanAggregator.GroupingState inState = ((ValuesBooleanGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + ValuesBooleanAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = ValuesBooleanAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java new file mode 100644 index 0000000000000..24b88f59e38f4 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java @@ -0,0 +1,124 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link ValuesBytesRefAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBytesRefAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final ValuesBytesRefAggregator.SingleState state; + + private final List channels; + + public ValuesBytesRefAggregatorFunction(DriverContext driverContext, List channels, + ValuesBytesRefAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static ValuesBytesRefAggregatorFunction create(DriverContext driverContext, + List channels) { + return new ValuesBytesRefAggregatorFunction(driverContext, channels, ValuesBytesRefAggregator.initSingle(driverContext.bigArrays())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + ValuesBytesRefAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesBytesRefAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefBlock values = (BytesRefBlock) valuesUncast; + assert values.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + ValuesBytesRefAggregator.combineIntermediate(state, values); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = ValuesBytesRefAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..31de817edf868 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link ValuesBytesRefAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public ValuesBytesRefAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public ValuesBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + return ValuesBytesRefAggregatorFunction.create(driverContext, channels); + } + + @Override + public ValuesBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return ValuesBytesRefGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "values of bytes"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..f9a51fcc52221 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java @@ -0,0 +1,201 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link ValuesBytesRefAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.BYTES_REF) ); + + private final ValuesBytesRefAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public ValuesBytesRefGroupingAggregatorFunction(List channels, + ValuesBytesRefAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static ValuesBytesRefGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new ValuesBytesRefGroupingAggregatorFunction(channels, ValuesBytesRefAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesBytesRefAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesBytesRefAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesBytesRefAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + ValuesBytesRefAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefBlock values = (BytesRefBlock) valuesUncast; + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesBytesRefAggregator.combineIntermediate(state, groupId, values, groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + ValuesBytesRefAggregator.GroupingState inState = ((ValuesBytesRefGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + ValuesBytesRefAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = ValuesBytesRefAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java new file mode 100644 index 0000000000000..a6295038dbd7a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java @@ -0,0 +1,120 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link ValuesDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesDoubleAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.DOUBLE) ); + + private final DriverContext driverContext; + + private final ValuesDoubleAggregator.SingleState state; + + private final List channels; + + public ValuesDoubleAggregatorFunction(DriverContext driverContext, List channels, + ValuesDoubleAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static ValuesDoubleAggregatorFunction create(DriverContext driverContext, + List channels) { + return new ValuesDoubleAggregatorFunction(driverContext, channels, ValuesDoubleAggregator.initSingle(driverContext.bigArrays())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(DoubleVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + ValuesDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + + private void addRawBlock(DoubleBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + assert values.getPositionCount() == 1; + ValuesDoubleAggregator.combineIntermediate(state, values); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = ValuesDoubleAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..049deda37c460 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link ValuesDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public ValuesDoubleAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public ValuesDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return ValuesDoubleAggregatorFunction.create(driverContext, channels); + } + + @Override + public ValuesDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return ValuesDoubleGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "values of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..11a0eb96c6a8e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java @@ -0,0 +1,195 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link ValuesDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.DOUBLE) ); + + private final ValuesDoubleAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public ValuesDoubleGroupingAggregatorFunction(List channels, + ValuesDoubleAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static ValuesDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new ValuesDoubleGroupingAggregatorFunction(channels, ValuesDoubleAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); + DoubleVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + ValuesDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesDoubleAggregator.combineIntermediate(state, groupId, values, groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + ValuesDoubleAggregator.GroupingState inState = ((ValuesDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + ValuesDoubleAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = ValuesDoubleAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java new file mode 100644 index 0000000000000..19e578936cd14 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java @@ -0,0 +1,120 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link ValuesIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesIntAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.INT) ); + + private final DriverContext driverContext; + + private final ValuesIntAggregator.SingleState state; + + private final List channels; + + public ValuesIntAggregatorFunction(DriverContext driverContext, List channels, + ValuesIntAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static ValuesIntAggregatorFunction create(DriverContext driverContext, + List channels) { + return new ValuesIntAggregatorFunction(driverContext, channels, ValuesIntAggregator.initSingle(driverContext.bigArrays())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(IntVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + ValuesIntAggregator.combine(state, vector.getInt(i)); + } + } + + private void addRawBlock(IntBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesIntAggregator.combine(state, block.getInt(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + assert values.getPositionCount() == 1; + ValuesIntAggregator.combineIntermediate(state, values); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = ValuesIntAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..21402b5913813 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link ValuesIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public ValuesIntAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public ValuesIntAggregatorFunction aggregator(DriverContext driverContext) { + return ValuesIntAggregatorFunction.create(driverContext, channels); + } + + @Override + public ValuesIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return ValuesIntGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "values of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..67722cd1318c0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java @@ -0,0 +1,193 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link ValuesIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.INT) ); + + private final ValuesIntAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public ValuesIntGroupingAggregatorFunction(List channels, + ValuesIntAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static ValuesIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new ValuesIntGroupingAggregatorFunction(channels, ValuesIntAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + IntBlock valuesBlock = page.getBlock(channels.get(0)); + IntVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + ValuesIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesIntAggregator.combineIntermediate(state, groupId, values, groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + ValuesIntAggregator.GroupingState inState = ((ValuesIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + ValuesIntAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = ValuesIntAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java new file mode 100644 index 0000000000000..420da87076a37 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java @@ -0,0 +1,120 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link ValuesLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesLongAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final ValuesLongAggregator.SingleState state; + + private final List channels; + + public ValuesLongAggregatorFunction(DriverContext driverContext, List channels, + ValuesLongAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static ValuesLongAggregatorFunction create(DriverContext driverContext, + List channels) { + return new ValuesLongAggregatorFunction(driverContext, channels, ValuesLongAggregator.initSingle(driverContext.bigArrays())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + ValuesLongAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesLongAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + LongBlock values = (LongBlock) valuesUncast; + assert values.getPositionCount() == 1; + ValuesLongAggregator.combineIntermediate(state, values); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = ValuesLongAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..a025bd0ade17a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link ValuesLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public ValuesLongAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public ValuesLongAggregatorFunction aggregator(DriverContext driverContext) { + return ValuesLongAggregatorFunction.create(driverContext, channels); + } + + @Override + public ValuesLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return ValuesLongGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "values of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..06508ce360ba4 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java @@ -0,0 +1,195 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link ValuesLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.LONG) ); + + private final ValuesLongAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public ValuesLongGroupingAggregatorFunction(List channels, + ValuesLongAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static ValuesLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new ValuesLongGroupingAggregatorFunction(channels, ValuesLongAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + ValuesLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + LongBlock values = (LongBlock) valuesUncast; + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesLongAggregator.combineIntermediate(state, groupId, values, groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + ValuesLongAggregator.GroupingState inState = ((ValuesLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + ValuesLongAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = ValuesLongAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java index de35965f52575..795207b245023 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java @@ -156,11 +156,31 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); - DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); - DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); - DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); - LongVector count = page.getBlock(channels.get(4)).asVector(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java index 86b2f15187af6..12c0f24ef43e3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -163,11 +163,31 @@ private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector val public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); - DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); - DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); - DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); - LongVector count = page.getBlock(channels.get(4)).asVector(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java index 0ccff1a1463ac..2447939d56db9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java @@ -156,11 +156,31 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); - DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); - DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); - DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); - LongVector count = page.getBlock(channels.get(4)).asVector(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java index 30ef738669914..075f8749503b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java @@ -163,11 +163,31 @@ private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector val public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); - DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); - DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); - DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); - LongVector count = page.getBlock(channels.get(4)).asVector(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java new file mode 100644 index 0000000000000..252436ad9634f --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +/** + * Aggregates field values for booleans. + */ +@Aggregator({ @IntermediateState(name = "values", type = "BOOLEAN_BLOCK") }) +@GroupingAggregator +class ValuesBooleanAggregator { + public static SingleState initSingle() { + return new SingleState(); + } + + public static void combine(SingleState state, boolean v) { + if (v) { + state.seenTrue = true; + } else { + state.seenFalse = true; + } + } + + public static void combineIntermediate(SingleState state, BooleanBlock values) { + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { + combine(state, values.getBoolean(i)); + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, boolean v) { + long index = ((long) groupId) << 1 | (v ? 1 : 0); + state.values.set(index); + } + + public static void combineIntermediate(GroupingState state, int groupId, BooleanBlock values, int valuesPosition) { + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { + combine(state, groupId, values.getBoolean(i)); + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + long stateOffset = ((long) statePosition) << 1; + boolean seenFalse = state.values.get(stateOffset); + boolean seenTrue = state.values.get(stateOffset | 1); + + if (seenFalse) { + combine(current, currentGroupId, false); + } + if (seenTrue) { + combine(current, currentGroupId, true); + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { + private boolean seenFalse; + private boolean seenTrue; + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (seenFalse == false && seenTrue == false) { + return blockFactory.newConstantNullBlock(1); + } + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(2)) { + builder.beginPositionEntry(); + if (seenFalse) { + builder.appendBoolean(false); + } + if (seenTrue) { + builder.appendBoolean(true); + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() {} + } + + public static class GroupingState implements Releasable { + private final BitArray values; + + private GroupingState(BigArrays bigArrays) { + values = new BitArray(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + long index = ((long) selectedGroup) << 1; + boolean seenFalse = values.get(index); + boolean seenTrue = values.get(index | 1); + if (seenFalse) { + if (seenTrue) { + builder.beginPositionEntry(); + builder.appendBoolean(false); + builder.appendBoolean(true); + builder.endPositionEntry(); + } else { + builder.appendBoolean(false); + } + } else { + if (seenTrue) { + builder.appendBoolean(true); + } else { + builder.appendNull(); + } + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we don't need to track which values have been seen because we don't do anything special for groups without values + } + + @Override + public void close() { + Releasables.closeExpectNoException(values); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st new file mode 100644 index 0000000000000..ad305809c6651 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st @@ -0,0 +1,343 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +$if(int)$ +import org.elasticsearch.compute.data.IntBlock; +$endif$ +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * A rate grouping aggregation definition for $type$. + * This class is generated. Edit `X-RateAggregator.java.st` instead. + */ +@GroupingAggregator( + includeTimestamps = true, + value = { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "$TYPE$_BLOCK"), + @IntermediateState(name = "resets", type = "DOUBLE") } +) +public class Rate$Type$Aggregator { + + public static $Type$RateGroupingState initGrouping(DriverContext driverContext, long unitInMillis) { + return new $Type$RateGroupingState(driverContext.bigArrays(), driverContext.breaker(), unitInMillis); + } + + public static void combine($Type$RateGroupingState current, int groupId, long timestamp, $type$ value) { + current.append(groupId, timestamp, value); + } + + public static void combineIntermediate( + $Type$RateGroupingState current, + int groupId, + LongBlock timestamps, + $Type$Block values, + double reset, + int otherPosition + ) { + current.combine(groupId, timestamps, values, reset, otherPosition); + } + + public static void combineStates( + $Type$RateGroupingState current, + int currentGroupId, // make the stylecheck happy + $Type$RateGroupingState otherState, + int otherGroupId + ) { + current.combineState(currentGroupId, otherState, otherGroupId); + } + + public static Block evaluateFinal($Type$RateGroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext.blockFactory()); + } + + private static class $Type$RateState { + static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject($Type$RateState.class); + final long[] timestamps; // descending order + final $type$[] values; + double reset = 0; + + $Type$RateState(int initialSize) { + this.timestamps = new long[initialSize]; + this.values = new $type$[initialSize]; + } + + $Type$RateState(long[] ts, $type$[] vs) { + this.timestamps = ts; + this.values = vs; + } + + private $type$ dv($type$ v0, $type$ v1) { + // counter reset detection + return v0 > v1 ? v1 : v1 - v0; + } + + void append(long t, $type$ v) { + assert timestamps.length == 2 : "expected two timestamps; got " + timestamps.length; + assert t < timestamps[1] : "@timestamp goes backward: " + t + " >= " + timestamps[1]; + reset += dv(v, values[1]) + dv(values[1], values[0]) - dv(v, values[0]); + timestamps[1] = t; + values[1] = v; + } + + int entries() { + return timestamps.length; + } + + static long bytesUsed(int entries) { + var ts = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * entries); + var vs = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) $BYTES$ * entries); + return BASE_RAM_USAGE + ts + vs; + } + } + + public static final class $Type$RateGroupingState implements Releasable, Accountable, GroupingAggregatorState { + private ObjectArray<$Type$RateState> states; + private final long unitInMillis; + private final BigArrays bigArrays; + private final CircuitBreaker breaker; + private long stateBytes; // for individual states + + $Type$RateGroupingState(BigArrays bigArrays, CircuitBreaker breaker, long unitInMillis) { + this.bigArrays = bigArrays; + this.breaker = breaker; + this.states = bigArrays.newObjectArray(1); + this.unitInMillis = unitInMillis; + } + + void ensureCapacity(int groupId) { + states = bigArrays.grow(states, groupId + 1); + } + + void adjustBreaker(long bytes) { + breaker.addEstimateBytesAndMaybeBreak(bytes, "<>"); + stateBytes += bytes; + assert stateBytes >= 0 : stateBytes; + } + + void append(int groupId, long timestamp, $type$ value) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + adjustBreaker($Type$RateState.bytesUsed(1)); + state = new $Type$RateState(new long[] { timestamp }, new $type$[] { value }); + states.set(groupId, state); + } else { + if (state.entries() == 1) { + adjustBreaker($Type$RateState.bytesUsed(2)); + state = new $Type$RateState(new long[] { state.timestamps[0], timestamp }, new $type$[] { state.values[0], value }); + states.set(groupId, state); + adjustBreaker(-$Type$RateState.bytesUsed(1)); // old state + } else { + state.append(timestamp, value); + } + } + } + + void combine(int groupId, LongBlock timestamps, $Type$Block values, double reset, int otherPosition) { + final int valueCount = timestamps.getValueCount(otherPosition); + if (valueCount == 0) { + return; + } + final int firstIndex = timestamps.getFirstValueIndex(otherPosition); + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + adjustBreaker($Type$RateState.bytesUsed(valueCount)); + state = new $Type$RateState(valueCount); + state.reset = reset; + states.set(groupId, state); + // TODO: add bulk_copy to Block + for (int i = 0; i < valueCount; i++) { + state.timestamps[i] = timestamps.getLong(firstIndex + i); + state.values[i] = values.get$Type$(firstIndex + i); + } + } else { + adjustBreaker($Type$RateState.bytesUsed(state.entries() + valueCount)); + var newState = new $Type$RateState(state.entries() + valueCount); + newState.reset = state.reset + reset; + states.set(groupId, newState); + merge(state, newState, firstIndex, valueCount, timestamps, values); + adjustBreaker(-$Type$RateState.bytesUsed(state.entries())); // old state + } + } + + void merge($Type$RateState curr, $Type$RateState dst, int firstIndex, int rightCount, LongBlock timestamps, $Type$Block values) { + int i = 0, j = 0, k = 0; + final int leftCount = curr.entries(); + while (i < leftCount && j < rightCount) { + final var t1 = curr.timestamps[i]; + final var t2 = timestamps.getLong(firstIndex + j); + if (t1 > t2) { + dst.timestamps[k] = t1; + dst.values[k] = curr.values[i]; + ++i; + } else { + dst.timestamps[k] = t2; + dst.values[k] = values.get$Type$(firstIndex + j); + ++j; + } + ++k; + } + if (i < leftCount) { + System.arraycopy(curr.timestamps, i, dst.timestamps, k, leftCount - i); + System.arraycopy(curr.values, i, dst.values, k, leftCount - i); + } + while (j < rightCount) { + dst.timestamps[k] = timestamps.getLong(firstIndex + j); + dst.values[k] = values.get$Type$(firstIndex + j); + ++k; + ++j; + } + } + + void combineState(int groupId, $Type$RateGroupingState otherState, int otherGroupId) { + var other = otherGroupId < otherState.states.size() ? otherState.states.get(otherGroupId) : null; + if (other == null) { + return; + } + ensureCapacity(groupId); + var curr = states.get(groupId); + if (curr == null) { + var len = other.entries(); + adjustBreaker($Type$RateState.bytesUsed(len)); + curr = new $Type$RateState(Arrays.copyOf(other.timestamps, len), Arrays.copyOf(other.values, len)); + curr.reset = other.reset; + states.set(groupId, curr); + } else { + states.set(groupId, mergeState(curr, other)); + } + } + + $Type$RateState mergeState($Type$RateState s1, $Type$RateState s2) { + var newLen = s1.entries() + s2.entries(); + adjustBreaker($Type$RateState.bytesUsed(newLen)); + var dst = new $Type$RateState(newLen); + dst.reset = s1.reset + s2.reset; + int i = 0, j = 0, k = 0; + while (i < s1.entries() && j < s2.entries()) { + if (s1.timestamps[i] > s2.timestamps[j]) { + dst.timestamps[k] = s1.timestamps[i]; + dst.values[k] = s1.values[i]; + ++i; + } else { + dst.timestamps[k] = s2.timestamps[j]; + dst.values[k] = s2.values[j]; + ++j; + } + ++k; + } + System.arraycopy(s1.timestamps, i, dst.timestamps, k, s1.entries() - i); + System.arraycopy(s1.values, i, dst.values, k, s1.entries() - i); + System.arraycopy(s2.timestamps, j, dst.timestamps, k, s2.entries() - j); + System.arraycopy(s2.values, j, dst.values, k, s2.entries() - j); + return dst; + } + + @Override + public long ramBytesUsed() { + return states.ramBytesUsed() + stateBytes; + } + + @Override + public void close() { + Releasables.close(states, () -> adjustBreaker(-stateBytes)); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3 : "blocks=" + blocks.length + ",offset=" + offset; + final BlockFactory blockFactory = driverContext.blockFactory(); + final int positionCount = selected.getPositionCount(); + try ( + LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); + $Type$Block.Builder values = blockFactory.new$Type$BlockBuilder(positionCount * 2); + DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + ) { + for (int i = 0; i < positionCount; i++) { + final var groupId = selected.getInt(i); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state != null) { + timestamps.beginPositionEntry(); + for (long t : state.timestamps) { + timestamps.appendLong(t); + } + timestamps.endPositionEntry(); + + values.beginPositionEntry(); + for ($type$ v : state.values) { + values.append$Type$(v); + } + values.endPositionEntry(); + + resets.appendDouble(state.reset); + } else { + timestamps.appendNull(); + values.appendNull(); + resets.appendDouble(0); + } + } + blocks[offset] = timestamps.build(); + blocks[offset + 1] = values.build(); + blocks[offset + 2] = resets.build().asBlock(); + } + } + + Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { + int positionCount = selected.getPositionCount(); + try (DoubleBlock.Builder rates = blockFactory.newDoubleBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + final var groupId = selected.getInt(p); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state == null) { + rates.appendNull(); + continue; + } + int len = state.entries(); + long dt = state.timestamps[0] - state.timestamps[len - 1]; + if (dt == 0) { + // TODO: maybe issue warning when we don't have enough sample? + rates.appendNull(); + } else { + double reset = state.reset; + for (int i = 1; i < len; i++) { + if (state.values[i - 1] < state.values[i]) { + reset += state.values[i]; + } + } + double dv = state.values[0] - state.values[len - 1] + reset; + rates.appendDouble(dv * unitInMillis / dt); + } + } + return rates.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st new file mode 100644 index 0000000000000..f9b15ccd34092 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st @@ -0,0 +1,306 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +$endif$ +import org.elasticsearch.common.util.BigArrays; +$if(BytesRef)$ +import org.elasticsearch.common.util.BytesRefHash; +$else$ +import org.elasticsearch.common.util.LongHash; +$endif$ +$if(long||double||BytesRef)$ +import org.elasticsearch.common.util.LongLongHash; +$endif$ +$if(BytesRef)$ +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +$endif$ +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +$if(int||double||BytesRef)$ +import org.elasticsearch.compute.data.$Type$Block; +$endif$ +import org.elasticsearch.compute.data.IntVector; +$if(long)$ +import org.elasticsearch.compute.data.LongBlock; +$endif$ +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +$if(BytesRef)$ +import org.elasticsearch.core.Releasables; + +$else$ + +$endif$ +/** + * Aggregates field values for $type$. + * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead + * of this file. + */ +@Aggregator({ @IntermediateState(name = "values", type = "$TYPE$_BLOCK") }) +@GroupingAggregator +class Values$Type$Aggregator { + public static SingleState initSingle(BigArrays bigArrays) { + return new SingleState(bigArrays); + } + + public static void combine(SingleState state, $type$ v) { +$if(double)$ + state.values.add(Double.doubleToLongBits(v)); +$else$ + state.values.add(v); +$endif$ + } + + public static void combineIntermediate(SingleState state, $Type$Block values) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { +$if(BytesRef)$ + combine(state, values.getBytesRef(i, scratch)); +$else$ + combine(state, values.get$Type$(i)); +$endif$ + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, $type$ v) { +$if(long)$ + state.values.add(groupId, v); +$elseif(double)$ + state.values.add(groupId, Double.doubleToLongBits(v)); +$elseif(BytesRef)$ + state.values.add(groupId, BlockHash.hashOrdToGroup(state.bytes.add(v))); +$elseif(int)$ + /* + * Encode the groupId and value into a single long - + * the top 32 bits for the group, the bottom 32 for the value. + */ + state.values.add((((long) groupId) << Integer.SIZE) | (v & 0xFFFFFFFFL)); +$endif$ + } + + public static void combineIntermediate(GroupingState state, int groupId, $Type$Block values, int valuesPosition) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { +$if(BytesRef)$ + combine(state, groupId, values.getBytesRef(i, scratch)); +$else$ + combine(state, groupId, values.get$Type$(i)); +$endif$ + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + for (int id = 0; id < state.values.size(); id++) { +$if(long||BytesRef)$ + if (state.values.getKey1(id) == statePosition) { + long value = state.values.getKey2(id); +$elseif(double)$ + if (state.values.getKey1(id) == statePosition) { + double value = Double.longBitsToDouble(state.values.getKey2(id)); +$elseif(int)$ + long both = state.values.get(id); + int group = (int) (both >>> Integer.SIZE); + if (group == statePosition) { + int value = (int) both; +$endif$ + combine(current, currentGroupId, $if(BytesRef)$state.bytes.get(value, scratch)$else$value$endif$); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { +$if(BytesRef)$ + private final BytesRefHash values; + +$else$ + private final LongHash values; + +$endif$ + private SingleState(BigArrays bigArrays) { +$if(BytesRef)$ + values = new BytesRefHash(1, bigArrays); +$else$ + values = new LongHash(1, bigArrays); +$endif$ + } + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(1); + } +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + if (values.size() == 1) { +$if(long)$ + return blockFactory.newConstantLongBlockWith(values.get(0), 1); +$elseif(double)$ + return blockFactory.newConstantDoubleBlockWith(Double.longBitsToDouble(values.get(0)), 1); +$elseif(int)$ + return blockFactory.newConstantIntBlockWith((int) values.get(0), 1); +$elseif(BytesRef)$ + return blockFactory.newConstantBytesRefBlockWith(values.get(0, scratch), 1); +$endif$ + } + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder((int) values.size())) { + builder.beginPositionEntry(); + for (int id = 0; id < values.size(); id++) { +$if(long)$ + builder.appendLong(values.get(id)); +$elseif(double)$ + builder.appendDouble(Double.longBitsToDouble(values.get(id))); +$elseif(int)$ + builder.appendInt((int) values.get(id)); +$elseif(BytesRef)$ + builder.appendBytesRef(values.get(id, scratch)); +$endif$ + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() { + values.close(); + } + } + + /** + * State for a grouped {@code VALUES} aggregation. This implementation + * emphasizes collect-time performance over the performance of rendering + * results. That's good, but it's a pretty intensive emphasis, requiring + * an {@code O(n^2)} operation for collection to support a {@code O(1)} + * collector operation. But at least it's fairly simple. + */ + public static class GroupingState implements Releasable { +$if(long||double)$ + private final LongLongHash values; + +$elseif(BytesRef)$ + private final LongLongHash values; + private final BytesRefHash bytes; + +$elseif(int)$ + private final LongHash values; + +$endif$ + private GroupingState(BigArrays bigArrays) { +$if(long||double)$ + values = new LongLongHash(1, bigArrays); +$elseif(BytesRef)$ + values = new LongLongHash(1, bigArrays); + bytes = new BytesRefHash(1, bigArrays); +$elseif(int)$ + values = new LongHash(1, bigArrays); +$endif$ + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + /* + * Count can effectively be in three states - 0, 1, many. We use those + * states to buffer the first value, so we can avoid calling + * beginPositionEntry on single valued fields. + */ + int count = 0; + $if(BytesRef)$long$else$$type$$endif$ first = 0; + for (int id = 0; id < values.size(); id++) { +$if(long||BytesRef)$ + if (values.getKey1(id) == selectedGroup) { + long value = values.getKey2(id); +$elseif(double)$ + if (values.getKey1(id) == selectedGroup) { + double value = Double.longBitsToDouble(values.getKey2(id)); +$elseif(int)$ + long both = values.get(id); + int group = (int) (both >>> Integer.SIZE); + if (group == selectedGroup) { + int value = (int) both; +$endif$ + switch (count) { + case 0 -> first = value; + case 1 -> { + builder.beginPositionEntry(); + builder.append$Type$($if(BytesRef)$bytes.get(first, scratch)$else$first$endif$); + builder.append$Type$($if(BytesRef)$bytes.get(value, scratch)$else$value$endif$); + } + default -> builder.append$Type$($if(BytesRef)$bytes.get(value, scratch)$else$value$endif$); + } + count++; + } + } + switch (count) { + case 0 -> builder.appendNull(); + case 1 -> builder.append$Type$($if(BytesRef)$bytes.get(first, scratch)$else$first$endif$); + default -> builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we figure out seen values from nulls on the values block + } + + @Override + public void close() { +$if(BytesRef)$ + Releasables.closeExpectNoException(values, bytes); +$else$ + values.close(); +$endif$ + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index dd760408b3be5..93cd3a6b9326a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -19,7 +19,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.core.Releasable; @@ -36,12 +35,10 @@ public abstract sealed class BlockHash implements Releasable, SeenGroupIds // permits BooleanBlockHash, BytesRefBlockHash, DoubleBlockHash, IntBlockHash, LongBlockHash,// NullBlockHash, PackedValuesBlockHash, BytesRefLongBlockHash, LongLongBlockHash { - protected final BigArrays bigArrays; protected final BlockFactory blockFactory; - BlockHash(DriverContext driverContext) { - bigArrays = driverContext.bigArrays(); - blockFactory = driverContext.blockFactory(); + BlockHash(BlockFactory blockFactory) { + this.blockFactory = blockFactory; } /** @@ -79,40 +76,40 @@ public abstract sealed class BlockHash implements Releasable, SeenGroupIds // */ public static BlockHash build( List groups, - DriverContext driverContext, + BlockFactory blockFactory, int emitBatchSize, boolean allowBrokenOptimizations ) { if (groups.size() == 1) { - return newForElementType(groups.get(0).channel(), groups.get(0).elementType(), driverContext); + return newForElementType(groups.get(0).channel(), groups.get(0).elementType(), blockFactory); } if (allowBrokenOptimizations && groups.size() == 2) { var g1 = groups.get(0); var g2 = groups.get(1); if (g1.elementType() == ElementType.LONG && g2.elementType() == ElementType.LONG) { - return new LongLongBlockHash(driverContext, g1.channel(), g2.channel(), emitBatchSize); + return new LongLongBlockHash(blockFactory, g1.channel(), g2.channel(), emitBatchSize); } if (g1.elementType() == ElementType.BYTES_REF && g2.elementType() == ElementType.LONG) { - return new BytesRefLongBlockHash(driverContext, g1.channel(), g2.channel(), false, emitBatchSize); + return new BytesRefLongBlockHash(blockFactory, g1.channel(), g2.channel(), false, emitBatchSize); } if (g1.elementType() == ElementType.LONG && g2.elementType() == ElementType.BYTES_REF) { - return new BytesRefLongBlockHash(driverContext, g2.channel(), g1.channel(), true, emitBatchSize); + return new BytesRefLongBlockHash(blockFactory, g2.channel(), g1.channel(), true, emitBatchSize); } } - return new PackedValuesBlockHash(groups, driverContext, emitBatchSize); + return new PackedValuesBlockHash(groups, blockFactory, emitBatchSize); } /** * Creates a specialized hash table that maps a {@link Block} of the given input element type to ids. */ - private static BlockHash newForElementType(int channel, ElementType type, DriverContext driverContext) { + private static BlockHash newForElementType(int channel, ElementType type, BlockFactory blockFactory) { return switch (type) { - case NULL -> new NullBlockHash(channel, driverContext); - case BOOLEAN -> new BooleanBlockHash(channel, driverContext); - case INT -> new IntBlockHash(channel, driverContext); - case LONG -> new LongBlockHash(channel, driverContext); - case DOUBLE -> new DoubleBlockHash(channel, driverContext); - case BYTES_REF -> new BytesRefBlockHash(channel, driverContext); + case NULL -> new NullBlockHash(channel, blockFactory); + case BOOLEAN -> new BooleanBlockHash(channel, blockFactory); + case INT -> new IntBlockHash(channel, blockFactory); + case LONG -> new LongBlockHash(channel, blockFactory); + case DOUBLE -> new DoubleBlockHash(channel, blockFactory); + case BYTES_REF -> new BytesRefBlockHash(channel, blockFactory); default -> throw new IllegalArgumentException("unsupported grouping element type [" + type + "]"); }; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java index 79da105a9adaa..09ec04a1e4575 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java @@ -10,12 +10,12 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.MultivalueDedupeBoolean; import static org.elasticsearch.compute.operator.MultivalueDedupeBoolean.FALSE_ORD; @@ -30,8 +30,8 @@ final class BooleanBlockHash extends BlockHash { private final int channel; private final boolean[] everSeen = new boolean[TRUE_ORD + 1]; - BooleanBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + BooleanBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index fb9b680c62d1d..f368852ef78fb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -18,12 +18,12 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeBytesRef; @@ -46,10 +46,10 @@ final class BytesRefBlockHash extends BlockHash { */ private boolean seenNull; - BytesRefBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + BytesRefBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; - this.bytesRefHash = new BytesRefHash(1, bigArrays); + this.bytesRefHash = new BytesRefHash(1, blockFactory.bigArrays()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java index 7ee8a7165aa17..d11b3f0070e14 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java @@ -16,13 +16,13 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasables; /** @@ -36,8 +36,8 @@ final class BytesRefLongBlockHash extends BlockHash { private final BytesRefHash bytesHash; private final LongLongHash finalHash; - BytesRefLongBlockHash(DriverContext driverContext, int channel1, int channel2, boolean reverseOutput, int emitBatchSize) { - super(driverContext); + BytesRefLongBlockHash(BlockFactory blockFactory, int channel1, int channel2, boolean reverseOutput, int emitBatchSize) { + super(blockFactory); this.channel1 = channel1; this.channel2 = channel2; this.reverseOutput = reverseOutput; @@ -47,8 +47,8 @@ final class BytesRefLongBlockHash extends BlockHash { BytesRefHash bytesHash = null; LongLongHash longHash = null; try { - bytesHash = new BytesRefHash(1, bigArrays); - longHash = new LongLongHash(1, bigArrays); + bytesHash = new BytesRefHash(1, blockFactory.bigArrays()); + longHash = new LongLongHash(1, blockFactory.bigArrays()); this.bytesHash = bytesHash; this.finalHash = longHash; success = true; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index c03ce2a0a4dce..fe15a21a4beb0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -13,12 +13,12 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeDouble; @@ -40,10 +40,10 @@ final class DoubleBlockHash extends BlockHash { */ private boolean seenNull; - DoubleBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + DoubleBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; - this.longHash = new LongHash(1, bigArrays); + this.longHash = new LongHash(1, blockFactory.bigArrays()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index bd5438da153e4..47911c61fd704 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -13,10 +13,10 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeInt; @@ -37,10 +37,10 @@ final class IntBlockHash extends BlockHash { */ private boolean seenNull; - IntBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + IntBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; - this.longHash = new LongHash(1, bigArrays); + this.longHash = new LongHash(1, blockFactory.bigArrays()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index d817edb9e059a..639d9cf48a515 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -13,12 +13,12 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeLong; @@ -40,10 +40,10 @@ final class LongBlockHash extends BlockHash { */ private boolean seenNull; - LongBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + LongBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; - this.longHash = new LongHash(1, bigArrays); + this.longHash = new LongHash(1, blockFactory.bigArrays()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java index 49b16198a5d77..056c3985b8728 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java @@ -20,7 +20,6 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -33,12 +32,12 @@ final class LongLongBlockHash extends BlockHash { private final int emitBatchSize; private final LongLongHash hash; - LongLongBlockHash(DriverContext driverContext, int channel1, int channel2, int emitBatchSize) { - super(driverContext); + LongLongBlockHash(BlockFactory blockFactory, int channel1, int channel2, int emitBatchSize) { + super(blockFactory); this.channel1 = channel1; this.channel2 = channel2; this.emitBatchSize = emitBatchSize; - this.hash = new LongLongHash(1, bigArrays); + this.hash = new LongLongHash(1, blockFactory.bigArrays()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java index 0c658ade236fd..601d75d832004 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java @@ -11,10 +11,10 @@ import org.elasticsearch.common.util.BitArray; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; /** * Maps a {@link BooleanBlock} column to group ids. Assigns group @@ -24,8 +24,8 @@ final class NullBlockHash extends BlockHash { private final int channel; private boolean seenNull = false; - NullBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + NullBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java index b58c50b79311a..1e6a6b790bba8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java @@ -16,11 +16,11 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.BatchEncoder; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.core.Releasables; @@ -59,11 +59,11 @@ final class PackedValuesBlockHash extends BlockHash { private final BytesRefBuilder bytes = new BytesRefBuilder(); private final Group[] groups; - PackedValuesBlockHash(List specs, DriverContext driverContext, int emitBatchSize) { - super(driverContext); + PackedValuesBlockHash(List specs, BlockFactory blockFactory, int emitBatchSize) { + super(blockFactory); this.groups = specs.stream().map(Group::new).toArray(Group[]::new); this.emitBatchSize = emitBatchSize; - this.bytesRefHash = new BytesRefHash(1, bigArrays); + this.bytesRefHash = new BytesRefHash(1, blockFactory.bigArrays()); this.nullTrackingBytes = (groups.length + 7) / 8; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java index b1211c8ea5ff4..855066fcb9da5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java @@ -143,14 +143,11 @@ public Page getOutput() { } iterator.consume(); shard = blockFactory.newConstantIntBlockWith(iterator.slice.shardContext().index(), currentPagePos); - boolean singleSegmentNonDecreasing; if (iterator.slice.numLeaves() == 1) { - singleSegmentNonDecreasing = true; int segmentOrd = iterator.slice.getLeaf(0).leafReaderContext().ord; leaf = blockFactory.newConstantIntBlockWith(segmentOrd, currentPagePos).asVector(); } else { // Due to the multi segment nature of time series source operator singleSegmentNonDecreasing must be false - singleSegmentNonDecreasing = false; leaf = segmentsBuilder.build(); segmentsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); } @@ -161,10 +158,9 @@ public Page getOutput() { timestampIntervalBuilder = blockFactory.newLongVectorBuilder(Math.min(remainingDocs, maxPageSize)); tsids = tsOrdBuilder.build(); tsOrdBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); - page = new Page( currentPagePos, - new DocVector(shard.asVector(), leaf, docs, singleSegmentNonDecreasing).asBlock(), + new DocVector(shard.asVector(), leaf, docs, leaf.isConstant()).asBlock(), tsids.asBlock(), timestampIntervals.asBlock() ); @@ -228,15 +224,13 @@ protected boolean lessThan(Leaf a, Leaf b) { void consume() throws IOException { if (queue != null) { currentTsid = BytesRef.deepCopyOf(queue.top().timeSeriesHash); - boolean breakOnNextTsidChange = false; + if (queue.size() > 0) { + queue.top().reinitializeIfNeeded(Thread.currentThread()); + } while (queue.size() > 0) { - if (remainingDocs <= 0) { + if (remainingDocs <= 0 || currentPagePos >= maxPageSize) { break; } - if (currentPagePos > maxPageSize) { - breakOnNextTsidChange = true; - } - currentPagePos++; remainingDocs--; Leaf leaf = queue.top(); @@ -244,46 +238,34 @@ void consume() throws IOException { docsBuilder.appendInt(leaf.iterator.docID()); timestampIntervalBuilder.appendLong(leaf.timestamp); tsOrdBuilder.appendInt(globalTsidOrd); + final Leaf newTop; if (leaf.nextDoc()) { // TODO: updating the top is one of the most expensive parts of this operation. // Ideally we would do this a less as possible. Maybe the top can be updated every N docs? - Leaf newTop = queue.updateTop(); - if (newTop.timeSeriesHash.equals(currentTsid) == false) { - globalTsidOrd++; - currentTsid = BytesRef.deepCopyOf(newTop.timeSeriesHash); - if (breakOnNextTsidChange) { - break; - } - } + newTop = queue.updateTop(); } else { queue.pop(); + newTop = queue.size() > 0 ? queue.top() : null; + } + if (newTop != null && newTop.timeSeriesHash.equals(currentTsid) == false) { + newTop.reinitializeIfNeeded(Thread.currentThread()); + globalTsidOrd++; + currentTsid = BytesRef.deepCopyOf(newTop.timeSeriesHash); } } } else { - int previousTsidOrd = leaf.timeSeriesHashOrd; - boolean breakOnNextTsidChange = false; // Only one segment, so no need to use priority queue and use segment ordinals as tsid ord. + leaf.reinitializeIfNeeded(Thread.currentThread()); while (leaf.nextDoc()) { - if (remainingDocs <= 0) { - break; - } - if (currentPagePos > maxPageSize) { - breakOnNextTsidChange = true; - } - if (breakOnNextTsidChange) { - if (previousTsidOrd != leaf.timeSeriesHashOrd) { - break; - } - } - - currentPagePos++; - remainingDocs--; - tsOrdBuilder.appendInt(leaf.timeSeriesHashOrd); timestampIntervalBuilder.appendLong(leaf.timestamp); // Don't append segment ord, because there is only one segment. docsBuilder.appendInt(leaf.iterator.docID()); - previousTsidOrd = leaf.timeSeriesHashOrd; + currentPagePos++; + remainingDocs--; + if (remainingDocs <= 0 || currentPagePos >= maxPageSize) { + break; + } } } } @@ -299,37 +281,55 @@ boolean completed() { static class Leaf { private final int segmentOrd; - private final SortedDocValues tsids; - private final SortedNumericDocValues timestamps; - private final DocIdSetIterator iterator; + private final Weight weight; + private final LeafReaderContext leaf; + private SortedDocValues tsids; + private SortedNumericDocValues timestamps; + private DocIdSetIterator iterator; + private Thread createdThread; private long timestamp; private int timeSeriesHashOrd; private BytesRef timeSeriesHash; + private int docID = -1; Leaf(Weight weight, LeafReaderContext leaf) throws IOException { this.segmentOrd = leaf.ord; + this.weight = weight; + this.leaf = leaf; + this.createdThread = Thread.currentThread(); tsids = leaf.reader().getSortedDocValues("_tsid"); timestamps = leaf.reader().getSortedNumericDocValues("@timestamp"); iterator = weight.scorer(leaf).iterator(); } boolean nextDoc() throws IOException { - int docID = iterator.nextDoc(); + docID = iterator.nextDoc(); if (docID == DocIdSetIterator.NO_MORE_DOCS) { return false; } - boolean advanced = tsids.advanceExact(iterator.docID()); + boolean advanced = tsids.advanceExact(docID); assert advanced; timeSeriesHashOrd = tsids.ordValue(); timeSeriesHash = tsids.lookupOrd(timeSeriesHashOrd); - advanced = timestamps.advanceExact(iterator.docID()); + advanced = timestamps.advanceExact(docID); assert advanced; timestamp = timestamps.nextValue(); return true; } + void reinitializeIfNeeded(Thread executingThread) throws IOException { + if (executingThread != createdThread) { + tsids = leaf.reader().getSortedDocValues("_tsid"); + timestamps = leaf.reader().getSortedNumericDocValues("@timestamp"); + iterator = weight.scorer(leaf).iterator(); + if (docID != -1) { + iterator.advance(docID); + } + createdThread = executingThread; + } + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java index 6dcdd15fd1d1c..04b9d576fe0aa 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java @@ -47,7 +47,7 @@ public record HashAggregationOperatorFactory(List groups, List BlockHash.build(groups, driverContext, maxPageSize, false), + () -> BlockHash.build(groups, driverContext.blockFactory(), maxPageSize, false), driverContext ); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index c3a26cedf5bbe..226a0ac534942 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -478,7 +478,12 @@ private static class ValuesAggregator implements Releasable { ); this.aggregator = new HashAggregationOperator( aggregatorFactories, - () -> BlockHash.build(List.of(new GroupSpec(channelIndex, groupingElementType)), driverContext, maxPageSize, false), + () -> BlockHash.build( + List.of(new GroupSpec(channelIndex, groupingElementType)), + driverContext.blockFactory(), + maxPageSize, + false + ), driverContext ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index bce4d2b0a454a..34fb0f96b8722 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -240,7 +240,7 @@ public String toString() { List.of(CountAggregatorFunction.supplier(List.of(1, 2)).groupingAggregatorFactory(FINAL)), () -> BlockHash.build( List.of(new HashAggregationOperator.GroupSpec(0, ElementType.BYTES_REF)), - driverContext, + driverContext.blockFactory(), randomPageSize(), false ), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java new file mode 100644 index 0000000000000..b5525b985be90 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class ValuesDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceDoubleBlockSourceOperator(blockFactory, IntStream.range(0, size).mapToDouble(i -> randomDouble())); + } + + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of doubles"; + } + + @Override + public void assertSimpleOutput(List input, Block result) { + Object[] values = input.stream().flatMapToDouble(b -> allDoubles(b)).boxed().collect(Collectors.toSet()).toArray(Object[]::new); + assertThat((List) BlockUtils.toJavaObject(result, 0), containsInAnyOrder(values)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java new file mode 100644 index 0000000000000..4554a60b7a00c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Tuple; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class ValuesDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of doubles"; + } + + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new LongDoubleTupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) + ); + } + + @Override + public void assertSimpleGroup(List input, Block result, int position, Long group) { + Object[] values = input.stream() + .flatMapToDouble(p -> allDoubles(p, group)) + .boxed() + .collect(Collectors.toSet()) + .toArray(Object[]::new); + Object resultValue = BlockUtils.toJavaObject(result, position); + switch (values.length) { + case 0 -> assertThat(resultValue, nullValue()); + case 1 -> assertThat(resultValue, equalTo(values[0])); + default -> assertThat((List) resultValue, containsInAnyOrder(values)); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java new file mode 100644 index 0000000000000..9d421c7801a43 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class ValuesIntAggregatorFunctionTests extends AggregatorFunctionTestCase { + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceIntBlockSourceOperator(blockFactory, IntStream.range(0, size).map(i -> randomInt())); + } + + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesIntAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of ints"; + } + + @Override + public void assertSimpleOutput(List input, Block result) { + Object[] values = input.stream().flatMapToInt(b -> allInts(b)).boxed().collect(Collectors.toSet()).toArray(Object[]::new); + assertThat((List) BlockUtils.toJavaObject(result, 0), containsInAnyOrder(values)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java new file mode 100644 index 0000000000000..831e2c1fdfd68 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Tuple; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class ValuesIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesIntAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of ints"; + } + + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new LongIntBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomInt())) + ); + } + + @Override + public void assertSimpleGroup(List input, Block result, int position, Long group) { + Object[] values = input.stream().flatMapToInt(p -> allInts(p, group)).boxed().collect(Collectors.toSet()).toArray(Object[]::new); + Object resultValue = BlockUtils.toJavaObject(result, position); + switch (values.length) { + case 0 -> assertThat(resultValue, nullValue()); + case 1 -> assertThat(resultValue, equalTo(values[0])); + default -> assertThat((List) resultValue, containsInAnyOrder(values)); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java new file mode 100644 index 0000000000000..e2a77bed4f4cd --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class ValuesLongAggregatorFunctionTests extends AggregatorFunctionTestCase { + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLong())); + } + + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesLongAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of longs"; + } + + @Override + public void assertSimpleOutput(List input, Block result) { + Object[] values = input.stream().flatMapToLong(b -> allLongs(b)).boxed().collect(Collectors.toSet()).toArray(Object[]::new); + assertThat((List) BlockUtils.toJavaObject(result, 0), containsInAnyOrder(values)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java new file mode 100644 index 0000000000000..ab667b959c7ae --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.operator.TupleBlockSourceOperator; +import org.elasticsearch.core.Tuple; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class ValuesLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesLongAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of longs"; + } + + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) + ); + } + + @Override + public void assertSimpleGroup(List input, Block result, int position, Long group) { + Object[] values = input.stream().flatMapToLong(p -> allLongs(p, group)).boxed().collect(Collectors.toSet()).toArray(Object[]::new); + Object resultValue = BlockUtils.toJavaObject(result, position); + switch (values.length) { + case 0 -> assertThat(resultValue, nullValue()); + case 1 -> assertThat(resultValue, equalTo(values[0])); + default -> assertThat((List) resultValue, containsInAnyOrder(values)); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java index 2dc527ce213d6..a874836198be0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.MockBlockFactory; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.MultivalueDedupeTests; import org.elasticsearch.core.Releasables; @@ -202,10 +201,9 @@ private BlockHash newBlockHash(BlockFactory blockFactory, int emitBatchSize, Lis for (int c = 0; c < types.size(); c++) { specs.add(new HashAggregationOperator.GroupSpec(c, types.get(c))); } - DriverContext driverContext = new DriverContext(blockFactory.bigArrays(), blockFactory); return forcePackedHash - ? new PackedValuesBlockHash(specs, driverContext, emitBatchSize) - : BlockHash.build(specs, driverContext, emitBatchSize, true); + ? new PackedValuesBlockHash(specs, blockFactory, emitBatchSize) + : BlockHash.build(specs, blockFactory, emitBatchSize, true); } private static class KeyComparator implements Comparator> { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 4e392ca24dada..2dc46e71360c9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.TestBlockFactory; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -1156,11 +1155,10 @@ private void hash(Consumer callback, int emitBatchSize, Block... va for (int c = 0; c < values.length; c++) { specs.add(new HashAggregationOperator.GroupSpec(c, values[c].elementType())); } - DriverContext driverContext = new DriverContext(bigArrays, blockFactory); try ( BlockHash blockHash = forcePackedHash - ? new PackedValuesBlockHash(specs, driverContext, emitBatchSize) - : BlockHash.build(specs, driverContext, emitBatchSize, true) + ? new PackedValuesBlockHash(specs, blockFactory, emitBatchSize) + : BlockHash.build(specs, blockFactory, emitBatchSize, true) ) { hash(true, blockHash, callback, values); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index 3b47597d6ea2f..b397d36837d01 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -25,38 +25,51 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.RateLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.AnyOperatorTestCase; import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OperatorTestCase; +import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.BlockDocValuesReader; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.junit.After; import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.function.Function; -import static org.hamcrest.Matchers.either; +import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class TimeSeriesSortedSourceOperatorTests extends AnyOperatorTestCase { @@ -73,81 +86,28 @@ public void testSimple() { int numTimeSeries = 3; int numSamplesPerTS = 10; long timestampStart = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - List results = runDriver(1024, 1024, randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); - assertThat(results, hasSize(1)); - Page page = results.get(0); - assertThat(page.getBlockCount(), equalTo(5)); - - DocVector docVector = (DocVector) page.getBlock(0).asVector(); - assertThat(docVector.getPositionCount(), equalTo(numTimeSeries * numSamplesPerTS)); - - IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); - assertThat(tsidVector.getPositionCount(), equalTo(numTimeSeries * numSamplesPerTS)); - - LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); - assertThat(timestampVector.getPositionCount(), equalTo(numTimeSeries * numSamplesPerTS)); - - LongVector voltageVector = (LongVector) page.getBlock(3).asVector(); - assertThat(voltageVector.getPositionCount(), equalTo(numTimeSeries * numSamplesPerTS)); - - BytesRefVector hostnameVector = (BytesRefVector) page.getBlock(4).asVector(); - assertThat(hostnameVector.getPositionCount(), equalTo(numTimeSeries * numSamplesPerTS)); - + int maxPageSize = between(1, 1024); + List results = runDriver(1024, maxPageSize, randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); + // for now we emit at most one time series each page int offset = 0; - for (int expectedTsidOrd = 0; expectedTsidOrd < numTimeSeries; expectedTsidOrd++) { - String expectedHostname = String.format(Locale.ROOT, "host-%02d", expectedTsidOrd); - long expectedVoltage = 5L + expectedTsidOrd; - for (int j = 0; j < numSamplesPerTS; j++) { - long expectedTimestamp = timestampStart + ((numSamplesPerTS - j - 1) * 10_000L); - - assertThat(docVector.shards().getInt(offset), equalTo(0)); - assertThat(voltageVector.getLong(offset), equalTo(expectedVoltage)); - assertThat(hostnameVector.getBytesRef(offset, new BytesRef()).utf8ToString(), equalTo(expectedHostname)); - assertThat(tsidVector.getInt(offset), equalTo(expectedTsidOrd)); - assertThat(timestampVector.getLong(offset), equalTo(expectedTimestamp)); - offset++; - } - } - } - - public void testMaxPageSize() { - int numTimeSeries = 3; - int numSamplesPerTS = 10; - long timestampStart = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - List results = runDriver(1024, 1, randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); - // A time series shouldn't be split over multiple pages. - assertThat(results, hasSize(numTimeSeries)); - for (int i = 0; i < numTimeSeries; i++) { - Page page = results.get(i); + for (Page page : results) { assertThat(page.getBlockCount(), equalTo(5)); - DocVector docVector = (DocVector) page.getBlock(0).asVector(); - assertThat(docVector.getPositionCount(), equalTo(numSamplesPerTS)); - IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); - assertThat(tsidVector.getPositionCount(), equalTo(numSamplesPerTS)); - LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); - assertThat(timestampVector.getPositionCount(), equalTo(numSamplesPerTS)); - LongVector voltageVector = (LongVector) page.getBlock(3).asVector(); - assertThat(voltageVector.getPositionCount(), equalTo(numSamplesPerTS)); - BytesRefVector hostnameVector = (BytesRefVector) page.getBlock(4).asVector(); - assertThat(hostnameVector.getPositionCount(), equalTo(numSamplesPerTS)); - - int offset = 0; - int expectedTsidOrd = i; - String expectedHostname = String.format(Locale.ROOT, "host-%02d", expectedTsidOrd); - long expectedVoltage = 5L + expectedTsidOrd; - for (int j = 0; j < numSamplesPerTS; j++) { - long expectedTimestamp = timestampStart + ((numSamplesPerTS - j - 1) * 10_000L); - - assertThat(docVector.shards().getInt(offset), equalTo(0)); - assertThat(voltageVector.getLong(offset), equalTo(expectedVoltage)); - assertThat(hostnameVector.getBytesRef(offset, new BytesRef()).utf8ToString(), equalTo(expectedHostname)); - assertThat(tsidVector.getInt(offset), equalTo(expectedTsidOrd)); - assertThat(timestampVector.getLong(offset), equalTo(expectedTimestamp)); + for (int i = 0; i < page.getPositionCount(); i++) { + int expectedTsidOrd = offset / numSamplesPerTS; + String expectedHostname = String.format(Locale.ROOT, "host-%02d", expectedTsidOrd); + long expectedVoltage = 5L + expectedTsidOrd; + int sampleIndex = offset - expectedTsidOrd * numSamplesPerTS; + long expectedTimestamp = timestampStart + ((numSamplesPerTS - sampleIndex - 1) * 10_000L); + assertThat(docVector.shards().getInt(i), equalTo(0)); + assertThat(voltageVector.getLong(i), equalTo(expectedVoltage)); + assertThat(hostnameVector.getBytesRef(i, new BytesRef()).utf8ToString(), equalTo(expectedHostname)); + assertThat(tsidVector.getInt(i), equalTo(expectedTsidOrd)); + assertThat(timestampVector.getLong(i), equalTo(expectedTimestamp)); offset++; } } @@ -158,7 +118,7 @@ public void testLimit() { int numSamplesPerTS = 10; int limit = 1; long timestampStart = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - List results = runDriver(limit, 1024, randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); + List results = runDriver(limit, randomIntBetween(1, 1024), randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); assertThat(results, hasSize(1)); Page page = results.get(0); assertThat(page.getBlockCount(), equalTo(5)); @@ -186,59 +146,229 @@ public void testLimit() { } public void testRandom() { - int numDocs = 1024; - var ctx = driverContext(); - long timestampStart = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - var timeSeriesFactory = createTimeSeriesSourceOperator(Integer.MAX_VALUE, Integer.MAX_VALUE, randomBoolean(), writer -> { - int commitEvery = 64; - long timestamp = timestampStart; - for (int i = 0; i < numDocs; i++) { - String hostname = String.format(Locale.ROOT, "host-%02d", i % 20); - int voltage = i % 5; - writeTS(writer, timestamp, new Object[] { "hostname", hostname }, new Object[] { "voltage", voltage }); - if (i % commitEvery == 0) { - writer.commit(); - } - timestamp += 10_000; + record Doc(int host, long timestamp, long metric) {} + int numDocs = between(1, 5000); + List docs = new ArrayList<>(); + Map timestamps = new HashMap<>(); + long t0 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); + for (int i = 0; i < numDocs; i++) { + int tsid = randomIntBetween(0, 9); + long timestamp = timestamps.compute(tsid, (k, curr) -> { + long t = curr != null ? curr : t0; + return t + randomIntBetween(1, 5000); + }); + docs.add(new Doc(tsid, timestamp, randomIntBetween(1, 10000))); + } + int maxPageSize = between(1, 1024); + int limit = randomBoolean() ? between(1, 100000) : Integer.MAX_VALUE; + var timeSeriesFactory = createTimeSeriesSourceOperator(limit, maxPageSize, randomBoolean(), writer -> { + Randomness.shuffle(docs); + for (Doc doc : docs) { + writeTS(writer, doc.timestamp, new Object[] { "hostname", "h" + doc.host }, new Object[] { "metric", doc.metric }); } - return numDocs; + return docs.size(); }); + DriverContext driverContext = driverContext(); List results = new ArrayList<>(); - - var voltageField = new NumberFieldMapper.NumberFieldType("voltage", NumberFieldMapper.NumberType.LONG); + var metricField = new NumberFieldMapper.NumberFieldType("metric", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( new Driver( - ctx, - timeSeriesFactory.get(ctx), - List.of(ValuesSourceReaderOperatorTests.factory(reader, voltageField, ElementType.LONG).get(ctx)), + driverContext, + timeSeriesFactory.get(driverContext), + List.of(ValuesSourceReaderOperatorTests.factory(reader, metricField, ElementType.LONG).get(driverContext)), new TestResultPageSinkOperator(results::add), () -> {} ) ); - OperatorTestCase.assertDriverContext(ctx); - assertThat(results, hasSize(1)); - Page page = results.get(0); - assertThat(page.getBlockCount(), equalTo(4)); + docs.sort(Comparator.comparing(Doc::host).thenComparing(Comparator.comparingLong(Doc::timestamp).reversed())); + Map hostToTsidOrd = new HashMap<>(); + timestamps.keySet().stream().sorted().forEach(n -> hostToTsidOrd.put(n, hostToTsidOrd.size())); + int offset = 0; + for (int p = 0; p < results.size(); p++) { + Page page = results.get(p); + if (p < results.size() - 1) { + assertThat(page.getPositionCount(), equalTo(maxPageSize)); + } else { + assertThat(page.getPositionCount(), lessThanOrEqualTo(limit)); + assertThat(page.getPositionCount(), lessThanOrEqualTo(maxPageSize)); + } + assertThat(page.getBlockCount(), equalTo(4)); + DocVector docVector = (DocVector) page.getBlock(0).asVector(); + IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); + LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); + LongVector metricVector = (LongVector) page.getBlock(3).asVector(); + for (int i = 0; i < page.getPositionCount(); i++) { + Doc doc = docs.get(offset); + offset++; + assertThat(docVector.shards().getInt(0), equalTo(0)); + assertThat(tsidVector.getInt(i), equalTo(hostToTsidOrd.get(doc.host))); + assertThat(timestampVector.getLong(i), equalTo(doc.timestamp)); + assertThat(metricVector.getLong(i), equalTo(doc.metric)); + } + } + assertThat(offset, equalTo(Math.min(limit, numDocs))); + } - DocVector docVector = (DocVector) page.getBlock(0).asVector(); - assertThat(docVector.getPositionCount(), equalTo(numDocs)); + public void testBasicRate() { + long[] v1 = { 1, 1, 3, 0, 2, 9, 21, 3, 7, 7, 9, 12 }; + long[] t1 = { 1, 5, 11, 20, 21, 59, 88, 91, 92, 97, 99, 112 }; - IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); - assertThat(tsidVector.getPositionCount(), equalTo(numDocs)); + long[] v2 = { 7, 2, 0, 11, 24, 0, 4, 1, 10, 2 }; + long[] t2 = { 1, 2, 4, 5, 6, 8, 10, 11, 12, 14 }; - LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); - assertThat(timestampVector.getPositionCount(), equalTo(numDocs)); + long[] v3 = { 0, 1, 0, 1, 1, 4, 2, 2, 2, 2, 3, 5, 5 }; + long[] t3 = { 2, 3, 5, 7, 8, 9, 10, 12, 14, 15, 18, 20, 22 }; + List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); + long unit = between(1, 5); + Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(unit)); + assertThat(actualRates, equalTo(Map.of("p1", 35.0 * unit / 111.0, "p2", 42.0 * unit / 13.0, "p3", 10.0 * unit / 20.0))); + } - LongVector voltageVector = (LongVector) page.getBlock(3).asVector(); - assertThat(voltageVector.getPositionCount(), equalTo(numDocs)); - for (int i = 0; i < page.getBlockCount(); i++) { - assertThat(docVector.shards().getInt(0), equalTo(0)); - assertThat(voltageVector.getLong(i), either(greaterThanOrEqualTo(0L)).or(lessThanOrEqualTo(4L))); - assertThat(tsidVector.getInt(i), either(greaterThanOrEqualTo(0)).or(lessThan(20))); - assertThat(timestampVector.getLong(i), greaterThanOrEqualTo(timestampStart)); + public void testRandomRate() { + int numPods = between(1, 10); + List pods = new ArrayList<>(); + Map expectedRates = new HashMap<>(); + TimeValue unit = TimeValue.timeValueSeconds(1); + for (int p = 0; p < numPods; p++) { + int numValues = between(2, 100); + long[] values = new long[numValues]; + long[] times = new long[numValues]; + long t = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); + for (int i = 0; i < numValues; i++) { + values[i] = randomIntBetween(0, 100); + t += TimeValue.timeValueSeconds(between(1, 10)).millis(); + times[i] = t; + } + Pod pod = new Pod("p" + p, times, values); + pods.add(pod); + if (numValues == 1) { + expectedRates.put(pod.name, null); + } else { + expectedRates.put(pod.name, pod.expectedRate(unit)); + } + } + Map actualRates = runRateTest(pods, unit); + assertThat(actualRates, equalTo(expectedRates)); + } + + record Pod(String name, long[] times, long[] values) { + Pod { + assert times.length == values.length : times.length + "!=" + values.length; + } + + double expectedRate(TimeValue unit) { + double dv = 0; + for (int i = 0; i < values.length - 1; i++) { + if (values[i + 1] < values[i]) { + dv += values[i]; + } + } + dv += (values[values.length - 1] - values[0]); + long dt = times[times.length - 1] - times[0]; + return (dv * unit.millis()) / dt; } } + Map runRateTest(List pods, TimeValue unit) { + long unitInMillis = unit.millis(); + record Doc(String pod, long timestamp, long requests) { + + } + var sourceOperatorFactory = createTimeSeriesSourceOperator(Integer.MAX_VALUE, between(1, 100), randomBoolean(), writer -> { + List docs = new ArrayList<>(); + for (Pod pod : pods) { + for (int i = 0; i < pod.times.length; i++) { + docs.add(new Doc(pod.name, pod.times[i], pod.values[i])); + } + } + Randomness.shuffle(docs); + for (Doc doc : docs) { + writeTS(writer, doc.timestamp, new Object[] { "pod", doc.pod }, new Object[] { "requests", doc.requests }); + } + return docs.size(); + }); + var ctx = driverContext(); + HashAggregationOperator finalHash = new HashAggregationOperator( + List.of(new RateLongAggregatorFunctionSupplier(List.of(1, 2, 3), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL)), + () -> BlockHash.build( + List.of(new HashAggregationOperator.GroupSpec(0, ElementType.BYTES_REF)), + ctx.blockFactory(), + randomIntBetween(1, 1000), + randomBoolean() + ), + ctx + ); + List results = new ArrayList<>(); + var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); + var podField = new KeywordFieldMapper.KeywordFieldType("pod"); + if (randomBoolean()) { + HashAggregationOperator initialHash = new HashAggregationOperator( + List.of( + new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) + ), + () -> BlockHash.build( + List.of(new HashAggregationOperator.GroupSpec(3, ElementType.BYTES_REF)), + ctx.blockFactory(), + randomIntBetween(1, 1000), + randomBoolean() + ), + ctx + ); + OperatorTestCase.runDriver( + new Driver( + ctx, + sourceOperatorFactory.get(ctx), + List.of( + ValuesSourceReaderOperatorTests.factory(reader, podField, ElementType.BYTES_REF).get(ctx), + ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), + initialHash, + finalHash + ), + new TestResultPageSinkOperator(results::add), + () -> {} + ) + ); + } else { + var blockLoader = new BlockDocValuesReader.BytesRefsFromOrdsBlockLoader("pod"); + var shardContext = new ValuesSourceReaderOperator.ShardContext(reader, () -> SourceLoader.FROM_STORED_SOURCE); + var ordinalGrouping = new OrdinalsGroupingOperator( + shardIdx -> blockLoader, + List.of(shardContext), + ElementType.BYTES_REF, + 0, + "pod", + List.of( + new RateLongAggregatorFunctionSupplier(List.of(3, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) + ), + randomIntBetween(1, 1000), + ctx + ); + OperatorTestCase.runDriver( + new Driver( + ctx, + sourceOperatorFactory.get(ctx), + List.of( + ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), + ordinalGrouping, + finalHash + ), + new TestResultPageSinkOperator(results::add), + () -> {} + ) + ); + } + Map rates = new HashMap<>(); + for (Page result : results) { + BytesRefBlock keysBlock = result.getBlock(0); + DoubleBlock ratesBlock = result.getBlock(1); + for (int i = 0; i < result.getPositionCount(); i++) { + rates.put(keysBlock.getBytesRef(i, new BytesRef()).utf8ToString(), ratesBlock.getDouble(i)); + } + result.releaseBlocks(); + } + return rates; + } + @Override protected Operator.OperatorFactory simple() { return createTimeSeriesSourceOperator(1, 1, false, writer -> { @@ -289,6 +419,10 @@ List runDriver(int limit, int maxPageSize, boolean forceMerge, int numTime ) ); OperatorTestCase.assertDriverContext(ctx); + for (Page result : results) { + assertThat(result.getPositionCount(), lessThanOrEqualTo(maxPageSize)); + assertThat(result.getPositionCount(), lessThanOrEqualTo(limit)); + } return results; } @@ -298,7 +432,6 @@ TimeSeriesSortedSourceOperatorFactory createTimeSeriesSourceOperator( boolean forceMerge, CheckedFunction indexingLogic ) { - int numDocs; Sort sort = new Sort( new SortField(TimeSeriesIdFieldMapper.NAME, SortField.Type.STRING, false), new SortedNumericSortField(DataStreamTimestampFieldMapper.DEFAULT_PATH, SortField.Type.LONG, true) @@ -311,23 +444,18 @@ TimeSeriesSortedSourceOperatorFactory createTimeSeriesSourceOperator( ) ) { - numDocs = indexingLogic.apply(writer); + int numDocs = indexingLogic.apply(writer); if (forceMerge) { writer.forceMerge(1); } reader = writer.getReader(); + assertThat(reader.numDocs(), equalTo(numDocs)); } catch (IOException e) { throw new UncheckedIOException(e); } var ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0); Function queryFunction = c -> new MatchAllDocsQuery(); - return TimeSeriesSortedSourceOperatorFactory.create( - Math.min(numDocs, limit), - Math.min(numDocs, maxPageSize), - 1, - List.of(ctx), - queryFunction - ); + return TimeSeriesSortedSourceOperatorFactory.create(limit, maxPageSize, 1, List.of(ctx), queryFunction); } static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimensions, Object[] metrics) throws IOException { diff --git a/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java b/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java index 7431c5b8204d7..8728b605134ac 100644 --- a/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java +++ b/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java @@ -129,8 +129,11 @@ public void testAccessAfterClose() { protected EsqlQueryResponse run(EsqlQueryRequestBuilder request) { try { + // The variants here ensure API usage patterns if (randomBoolean()) { return request.execute().actionGet(30, SECONDS); + } else if (randomBoolean()) { + return client().execute(request.action(), request.request()).actionGet(30, SECONDS); } else { return ClientHelper.executeWithHeaders( Map.of("Foo", "bar"), diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index bb8163915c1c4..2dd64cf02446b 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql; import org.apache.http.HttpStatus; +import org.apache.http.util.EntityUtils; import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -31,6 +32,9 @@ import java.util.Locale; import java.util.Map; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class EsqlSecurityIT extends ESRestTestCase { @@ -47,6 +51,7 @@ public class EsqlSecurityIT extends ESRestTestCase { .user("user3", "x-pack-test-password", "user3", false) .user("user4", "x-pack-test-password", "user4", false) .user("user5", "x-pack-test-password", "user5", false) + .user("fls_user", "x-pack-test-password", "fls_user", false) .build(); @Override @@ -62,7 +67,11 @@ protected Settings restClientSettings() { private void indexDocument(String index, int id, double value, String org) throws IOException { Request indexDoc = new Request("PUT", index + "/_doc/" + id); - indexDoc.setJsonEntity("{\"value\":" + value + ",\"org\":\"" + org + "\"}"); + XContentBuilder builder = JsonXContent.contentBuilder().startObject(); + builder.field("value", value); + builder.field("org", org); + builder.field("partial", org + value); + indexDoc.setJsonEntity(Strings.toString(builder.endObject())); client().performRequest(indexDoc); } @@ -85,6 +94,11 @@ public void indexDocuments() throws IOException { indexDocument("index-user2", 1, 32.0, "marketing"); indexDocument("index-user2", 2, 40.0, "sales"); refresh("index-user2"); + + createIndex("indexpartial", Settings.EMPTY, mapping); + indexDocument("indexpartial", 1, 32.0, "marketing"); + indexDocument("indexpartial", 2, 40.0, "sales"); + refresh("indexpartial"); } public void testAllowedIndices() throws Exception { @@ -122,7 +136,7 @@ public void testUnauthorizedIndices() throws IOException { assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400)); } - public void testDLS() throws Exception { + public void testDocumentLevelSecurity() throws Exception { Response resp = runESQLCommand("user3", "from index | stats sum=sum(value)"); assertOK(resp); Map respMap = entityAsMap(resp); @@ -130,6 +144,69 @@ public void testDLS() throws Exception { assertThat(respMap.get("values"), equalTo(List.of(List.of(10.0)))); } + public void testFieldLevelSecurityAllow() throws Exception { + Response resp = runESQLCommand("fls_user", "FROM index* | SORT value | LIMIT 1"); + assertOK(resp); + assertMap( + entityAsMap(resp), + matchesMap().extraOk() + .entry( + "columns", + List.of( + matchesMap().entry("name", "partial").entry("type", "text"), + matchesMap().entry("name", "value").entry("type", "double") + ) + ) + .entry("values", List.of(List.of("sales10.0", 10.0))) + ); + } + + public void testFieldLevelSecurityAllowPartial() throws Exception { + Request request = new Request("GET", "/index*/_field_caps"); + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", "fls_user")); + request.addParameter("error_trace", "true"); + request.addParameter("pretty", "true"); + request.addParameter("fields", "*"); + + request = new Request("GET", "/index*/_search"); + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", "fls_user")); + request.addParameter("error_trace", "true"); + request.addParameter("pretty", "true"); + + Response resp = runESQLCommand("fls_user", "FROM index* | SORT partial | LIMIT 1"); + assertOK(resp); + assertMap( + entityAsMap(resp), + matchesMap().extraOk() + .entry( + "columns", + List.of( + matchesMap().entry("name", "partial").entry("type", "text"), + matchesMap().entry("name", "value").entry("type", "double") + ) + ) + .entry("values", List.of(List.of("engineering20.0", 20.0))) + ); + } + + public void testFieldLevelSecuritySpellingMistake() throws Exception { + ResponseException e = expectThrows( + ResponseException.class, + () -> runESQLCommand("fls_user", "FROM index* | SORT parial | LIMIT 1") + ); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + assertThat(EntityUtils.toString(e.getResponse().getEntity()), containsString("Unknown column [parial]")); + } + + public void testFieldLevelSecurityNotAllowed() throws Exception { + ResponseException e = expectThrows( + ResponseException.class, + () -> runESQLCommand("fls_user", "FROM index* | SORT org DESC | LIMIT 1") + ); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + assertThat(EntityUtils.toString(e.getResponse().getEntity()), containsString("Unknown column [org]")); + } + public void testRowCommand() throws Exception { String user = randomFrom("test-admin", "user1", "user2"); Response resp = runESQLCommand(user, "row a = 5, b = 2 | stats count=sum(b) by a"); @@ -283,6 +360,7 @@ protected Response runESQLCommand(String user, String command) throws IOExceptio Request request = new Request("POST", "_query"); request.setJsonEntity(Strings.toString(json)); request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); + request.addParameter("error_trace", "true"); return client().performRequest(request); } diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml b/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml index 7a89fa57f7102..7d134103afd28 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml @@ -51,9 +51,22 @@ user4: - names: ['index-user1', 'index', "test-enrich" ] privileges: - read + user5: cluster: [] indices: - names: ['index-user1', 'index', "test-enrich" ] privileges: - read + +fls_user: + cluster: [] + indices: + - names: [ 'index' ] + privileges: [ 'read' ] + field_security: + grant: [ value, partial ] + - names: [ 'indexpartial' ] + privileges: [ 'read' ] + field_security: + grant: [ value ] diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 9d22045522d19..0446a11240f6f 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -8,11 +8,18 @@ package org.elasticsearch.xpack.esql.qa.mixed; import org.elasticsearch.Version; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; import org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; +import org.junit.AfterClass; +import org.junit.Before; import org.junit.ClassRule; +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.ASYNC; @@ -27,12 +34,38 @@ protected String getTestRestCluster() { static final Version bwcVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); + private static final Set oldClusterFeatures = new HashSet<>(); + private static boolean oldClusterFeaturesInitialized = false; + + @Before + public void extractOldClusterFeatures() { + if (oldClusterFeaturesInitialized == false) { + oldClusterFeatures.addAll(testFeatureService.getAllSupportedFeatures()); + oldClusterFeaturesInitialized = true; + } + } + + protected static boolean oldClusterHasFeature(String featureId) { + assert oldClusterFeaturesInitialized; + return oldClusterFeatures.contains(featureId); + } + + protected static boolean oldClusterHasFeature(NodeFeature feature) { + return oldClusterHasFeature(feature.id()); + } + + @AfterClass + public static void cleanUp() { + oldClusterFeaturesInitialized = false; + oldClusterFeatures.clear(); + } + public MixedClusterEsqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase, Mode mode) { super(fileName, groupName, testName, lineNumber, testCase, mode); } @Override - protected void shouldSkipTest(String testName) { + protected void shouldSkipTest(String testName) throws IOException { super.shouldSkipTest(testName); assumeTrue("Test " + testName + " is skipped on " + bwcVersion, isEnabled(testName, bwcVersion)); if (mode == ASYNC) { @@ -42,6 +75,6 @@ protected void shouldSkipTest(String testName) { @Override protected boolean supportsAsync() { - return bwcVersion.onOrAfter(Version.V_8_13_0); + return oldClusterHasFeature(ASYNC_QUERY_FEATURE_ID); } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index e5e53f34df312..0ea445255f0d8 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -19,10 +19,13 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; import org.elasticsearch.xpack.ql.CsvSpecReader; import org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; import org.elasticsearch.xpack.ql.SpecReader; +import org.junit.AfterClass; import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -34,6 +37,7 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -62,6 +66,9 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase { @ClassRule public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster); + private static TestFeatureService remoteFeaturesService; + private static RestClient remoteFeaturesServiceClient; + @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { List urls = classpathResources("/*.csv-spec"); @@ -86,12 +93,34 @@ public MultiClusterSpecIT(String fileName, String groupName, String testName, In } @Override - protected void shouldSkipTest(String testName) { + protected void shouldSkipTest(String testName) throws IOException { super.shouldSkipTest(testName); + for (String feature : testCase.requiredFeatures) { + assumeTrue("Test " + testName + " requires " + feature, remoteFeaturesService().clusterHasFeature(feature)); + } assumeFalse("can't test with _index metadata", hasIndexMetadata(testCase.query)); assumeTrue("Test " + testName + " is skipped on " + Clusters.oldVersion(), isEnabled(testName, Clusters.oldVersion())); } + private TestFeatureService remoteFeaturesService() throws IOException { + if (remoteFeaturesService == null) { + HttpHost[] remoteHosts = parseClusterHosts(remoteCluster.getHttpAddresses()).toArray(HttpHost[]::new); + remoteFeaturesServiceClient = super.buildClient(restAdminSettings(), remoteHosts); + var remoteNodeVersions = readVersionsFromNodesInfo(remoteFeaturesServiceClient); + var semanticNodeVersions = remoteNodeVersions.stream() + .map(ESRestTestCase::parseLegacyVersion) + .flatMap(Optional::stream) + .collect(Collectors.toSet()); + remoteFeaturesService = createTestFeatureService(getClusterStateFeatures(remoteFeaturesServiceClient), semanticNodeVersions); + } + return remoteFeaturesService; + } + + @AfterClass + public static void closeRemoveFeaturesService() throws IOException { + IOUtils.close(remoteFeaturesServiceClient); + } + @Override protected String getTestRestCluster() { return localCluster.getHttpAddresses(); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 07f480ce9950e..349954450904d 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -54,6 +54,9 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { + // To avoid referencing the main module, we replicate EsqlFeatures.ASYNC_QUERY.id() here + protected static final String ASYNC_QUERY_FEATURE_ID = "esql.async_query"; + private static final Logger LOGGER = LogManager.getLogger(EsqlSpecTestCase.class); private final String fileName; private final String groupName; @@ -103,7 +106,7 @@ public void setup() throws IOException { } protected boolean supportsAsync() { - return Version.CURRENT.onOrAfter(Version.V_8_13_0); // the Async API was introduced in 8.13.0 + return clusterHasFeature(ASYNC_QUERY_FEATURE_ID); // the Async API was introduced in 8.13.0 } @AfterClass @@ -131,7 +134,7 @@ public final void test() throws Throwable { } } - protected void shouldSkipTest(String testName) { + protected void shouldSkipTest(String testName) throws IOException { for (String feature : testCase.requiredFeatures) { assumeTrue("Test " + testName + " requires " + feature, clusterHasFeature(feature)); } @@ -238,7 +241,11 @@ public static void assertRequestBreakerEmpty() throws Exception { Map node = (Map) n; Map breakers = (Map) node.get("breakers"); Map request = (Map) breakers.get("request"); - assertMap(request, matchesMap().extraOk().entry("estimated_size_in_bytes", 0).entry("estimated_size", "0b")); + assertMap( + "circuit breakers not reset to 0", + request, + matchesMap().extraOk().entry("estimated_size_in_bytes", 0).entry("estimated_size", "0b") + ); } }); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index cccd1a3f8854b..2d0a39da5a8b4 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -422,7 +422,7 @@ public void testWarningHeadersOnFailedConversions() throws IOException { for (int i = 1; i <= expectedWarnings; i++) { assertThat( warnings.get(i), - containsString("java.lang.NumberFormatException: For input string: \\\"keyword" + (2 * i - 1) + "\\\"") + containsString("org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [keyword" + (2 * i - 1) + "]") ); } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java index 25530e3d744ad..fc5242714b2cc 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java @@ -30,7 +30,7 @@ public record QueryExecuted(String query, int depth, List outputSchema, public static String sourceCommand(List availabeIndices) { return switch (randomIntBetween(0, 2)) { case 0 -> from(availabeIndices); - case 1 -> showFunctions(); + case 1 -> metaFunctions(); default -> row(); }; @@ -361,8 +361,8 @@ private static String from(List availabeIndices) { return result.toString(); } - private static String showFunctions() { - return "show functions"; + private static String metaFunctions() { + return "metadata functions"; } private static String indexPattern(String indexName) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java index f8f406b269a22..0b7e39608b9aa 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java @@ -11,9 +11,12 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.logging.Logger; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xpack.esql.CsvTestUtils.ActualResults; import org.elasticsearch.xpack.versionfield.Version; +import org.hamcrest.Description; import org.hamcrest.Matchers; +import org.hamcrest.StringDescription; import java.util.ArrayList; import java.util.Comparator; @@ -23,6 +26,7 @@ import java.util.Objects; import java.util.function.BiFunction; import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; @@ -184,7 +188,7 @@ public static void assertData( actualValues.sort(resultRowComparator(expected.columnTypes())); } var expectedValues = expected.values(); - ArrayList dataFailures = new ArrayList<>(); + List dataFailures = new ArrayList<>(); for (int row = 0; row < expectedValues.size(); row++) { try { @@ -230,7 +234,7 @@ public static void assertData( dataFailures.add(new DataFailure(row, column, transformedExpected, transformedActual)); } if (dataFailures.size() > 10) { - fail("Data mismatch: " + dataFailures); + dataFailure(dataFailures); } } @@ -247,7 +251,7 @@ public static void assertData( } } if (dataFailures.isEmpty() == false) { - fail("Data mismatch: " + dataFailures); + dataFailure(dataFailures); } if (expectedValues.size() < actualValues.size()) { fail( @@ -256,6 +260,27 @@ public static void assertData( } } + private static void dataFailure(List dataFailures) { + fail("Data mismatch:\n" + dataFailures.stream().map(f -> { + Description description = new StringDescription(); + ListMatcher expected; + if (f.expected instanceof List e) { + expected = ListMatcher.matchesList(e); + } else { + expected = ListMatcher.matchesList().item(f.expected); + } + List actualList; + if (f.actual instanceof List a) { + actualList = a; + } else { + actualList = List.of(f.actual); + } + expected.describeMismatch(actualList, description); + String prefix = "row " + f.row + " column " + f.column + ":"; + return prefix + description.toString().replace("\n", "\n" + prefix); + }).collect(Collectors.joining("\n"))); + } + private static Comparator> resultRowComparator(List types) { return (x, y) -> { for (int i = 0; i < x.size(); i++) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index c5730f3271945..ec181c552bf22 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -61,6 +61,7 @@ public class CsvTestsDataLoader { private static final TestsDataset HEIGHTS = new TestsDataset("heights", "mapping-heights.json", "heights.csv"); private static final TestsDataset DECADES = new TestsDataset("decades", "mapping-decades.json", "decades.csv"); private static final TestsDataset AIRPORTS = new TestsDataset("airports", "mapping-airports.json", "airports.csv"); + private static final TestsDataset AIRPORTS_MP = new TestsDataset("airports_mp", "mapping-airports.json", "airports_mp.csv"); private static final TestsDataset AIRPORTS_WEB = new TestsDataset("airports_web", "mapping-airports_web.json", "airports_web.csv"); private static final TestsDataset COUNTRIES_BBOX = new TestsDataset( "countries_bbox", @@ -91,6 +92,7 @@ public class CsvTestsDataLoader { Map.entry(HEIGHTS.indexName, HEIGHTS), Map.entry(DECADES.indexName, DECADES), Map.entry(AIRPORTS.indexName, AIRPORTS), + Map.entry(AIRPORTS_MP.indexName, AIRPORTS_MP), Map.entry(AIRPORTS_WEB.indexName, AIRPORTS_WEB), Map.entry(COUNTRIES_BBOX.indexName, COUNTRIES_BBOX), Map.entry(COUNTRIES_BBOX_WEB.indexName, COUNTRIES_BBOX_WEB), @@ -105,6 +107,7 @@ public class CsvTestsDataLoader { private static final EnrichConfig DECADES_ENRICH = new EnrichConfig("decades_policy", "enrich-policy-decades.json"); private static final EnrichConfig CITY_NAMES_ENRICH = new EnrichConfig("city_names", "enrich-policy-city_names.json"); private static final EnrichConfig CITY_BOUNDARIES_ENRICH = new EnrichConfig("city_boundaries", "enrich-policy-city_boundaries.json"); + private static final EnrichConfig CITY_AIRPORTS_ENRICH = new EnrichConfig("city_airports", "enrich-policy-city_airports.json"); public static final List ENRICH_SOURCE_INDICES = List.of( "languages", @@ -123,7 +126,8 @@ public class CsvTestsDataLoader { HEIGHTS_ENRICH, DECADES_ENRICH, CITY_NAMES_ENRICH, - CITY_BOUNDARIES_ENRICH + CITY_BOUNDARIES_ENRICH, + CITY_AIRPORTS_ENRICH ); /** @@ -281,6 +285,7 @@ private static void loadCsvData( CheckedBiFunction p, Logger logger ) throws IOException { + ArrayList failures = new ArrayList<>(); StringBuilder builder = new StringBuilder(); try (BufferedReader reader = org.elasticsearch.xpack.ql.TestUtils.reader(resource)) { String line; @@ -390,13 +395,19 @@ private static void loadCsvData( } lineNumber++; if (builder.length() > BULK_DATA_SIZE) { - sendBulkRequest(indexName, builder, client, logger); + sendBulkRequest(indexName, builder, client, logger, failures); builder.setLength(0); } } } if (builder.isEmpty() == false) { - sendBulkRequest(indexName, builder, client, logger); + sendBulkRequest(indexName, builder, client, logger, failures); + } + if (failures.isEmpty() == false) { + for (String failure : failures) { + logger.error(failure); + } + throw new IOException("Data loading failed with " + failures.size() + " errors: " + failures.get(0)); } } @@ -405,7 +416,8 @@ private static String quoteIfNecessary(String value) { return isQuoted ? value : "\"" + value + "\""; } - private static void sendBulkRequest(String indexName, StringBuilder builder, RestClient client, Logger logger) throws IOException { + private static void sendBulkRequest(String indexName, StringBuilder builder, RestClient client, Logger logger, List failures) + throws IOException { // The indexName is optional for a bulk request, but we use it for routing in MultiClusterSpecIT. builder.append("\n"); logger.debug("Sending bulk request of [{}] bytes for [{}]", builder.length(), indexName); @@ -422,14 +434,26 @@ private static void sendBulkRequest(String indexName, StringBuilder builder, Res if (Boolean.FALSE.equals(errors)) { logger.info("Data loading of [{}] bytes into [{}] OK", builder.length(), indexName); } else { - throw new IOException("Data loading of [" + indexName + "] failed with errors: " + errors); + addError(failures, indexName, builder, "errors: " + result); } } } else { - throw new IOException("Data loading of [" + indexName + "] failed with status: " + response.getStatusLine()); + addError(failures, indexName, builder, "status: " + response.getStatusLine()); } } + private static void addError(List failures, String indexName, StringBuilder builder, String message) { + failures.add( + format( + "Data loading of [{}] bytes into [{}] failed with {}: Data [{}...]", + builder.length(), + indexName, + message, + builder.substring(0, 100) + ) + ); + } + private static void forceMerge(RestClient client, Set indices, Logger logger) throws IOException { String pattern = String.join(",", indices); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_mp.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_mp.csv new file mode 100644 index 0000000000000..079ef2e419fff --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_mp.csv @@ -0,0 +1,8 @@ +abbrev:keyword,name:text, scalerank:integer,type:keyword, location:geo_point, country:keyword, city:keyword, city_location:geo_point +XXX, Atlantis Int'l, 1, mid, POINT(0 0), Atlantis, Atlantis, POINT(0 0) +LUH, Sahnewal, 9, small, POINT(75.9570722403652 30.8503598561702), India, Ludhiāna, POINT(75.85 30.91) +SSE, Solapur, 9, mid, POINT(75.9330597710755 17.625415183635), India, Solāpur, POINT(75.92 17.68) +IXR, Birsa Munda, 9, mid, POINT(85.3235970368767 23.3177245989962), India, Rānchi, POINT(85.33 23.36) +AWZ, Ahwaz, 9, mid, POINT(48.7471065435931 31.3431585560757), Iran, Ahvāz, POINT(48.6692 31.3203) +GWL, Gwalior, 9, [mid,military], POINT(78.2172186546348 26.285487697937), India, Gwalior, POINT(78.178 26.2215) +HOD, Hodeidah Int'l, 9, mid, POINT(42.97109630194 14.7552534413725), Yemen, Al Ḩudaydah, POINT(42.9511 14.8022) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index 7641bd3305b1d..2713660cd47d8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -235,14 +235,18 @@ emp_no:integer |languages:integer |byte2bool:boolean |short2bool:boolean 10030 |3 |true |true ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + row a = [true, false, true, false] | eval sa = mv_sort(a), sb = mv_sort(a, "DESC"); a:boolean | sa:boolean | sb:boolean [true, false, true, false] | [false, false, true, true] | [true, true, false, false] ; -mvSortEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSortEmp +required_feature: esql.mv_sort + FROM employees | eval sd = mv_sort(is_rehired, "DESC"), sa = mv_sort(is_rehired) | sort emp_no @@ -258,7 +262,9 @@ emp_no:integer | is_rehired:boolean | sa:boolean | sd:boolea 10005 | [false,false,false,true] | [false,false,false,true] | [true,false,false,false] ; -mvSlice#[skip:-8.13.99, reason:newly added in 8.14] +mvSlice +required_feature: esql.mv_sort + row a = [true, false, false, true] | eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3); @@ -266,7 +272,9 @@ a:boolean | a1:boolean | a2:boolean [true, false, false, true] | false | [false, true] ; -mvSliceEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmp +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(is_rehired, 0) | keep emp_no, is_rehired, a1 @@ -280,3 +288,60 @@ emp_no:integer | is_rehired:boolean | a1:boolean 10004 | true | true 10005 | [false,false,false,true] | false ; + +values +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS still_hired=MV_SORT(VALUES(still_hired)) +; + + still_hired:boolean +[false, true] +; + +valuesGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS still_hired=MV_SORT(VALUES(still_hired)) BY first_letter +| SORT first_letter +; + +still_hired:boolean | first_letter:keyword + false | A + true | B + true | C + true | G + true | K + false | P + [false, true] | S + true | T +; + +valuesGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS still_hired=MV_SORT(VALUES(still_hired)) BY job_positions +| SORT job_positions +; + +still_hired:boolean | job_positions:keyword + true | Accountant + true | Head Human Resources + [false, true] | Internship + true | Junior Developer + false | Principal Support Engineer + true | Purchase Manager + true | Reporting Analyst + [false, true] | Senior Python Developer + [false, true] | Senior Team Lead + true | Support Engineer + [false, true] | Tech Lead + [false, true] | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec index 64c5a7358ce22..f574722f691e5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec @@ -129,7 +129,9 @@ error_rate:double | hour:date ; -nullOnMultivaluesMathOperation#[skip:-8.13.99,reason:fixed in 8.14+] +nullOnMultivaluesMathOperation +required_feature: esql.disable_nullable_opts + ROW a = 5, b = [ 1, 2 ]| EVAL sum = a + b| LIMIT 1 | WHERE sum IS NULL; warning:Line 1:37: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:37: java.lang.IllegalArgumentException: single-value function encountered multi-value @@ -139,7 +141,9 @@ a:integer | b:integer | sum:integer ; -notNullOnMultivaluesMathOperation#[skip:-8.13.99,reason:fixed in 8.14+] +notNullOnMultivaluesMathOperation +required_feature: esql.disable_nullable_opts + ROW a = 5, b = [ 1, 2 ]| EVAL sum = a + b| LIMIT 1 | WHERE sum IS NOT NULL; warning:Line 1:37: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:37: java.lang.IllegalArgumentException: single-value function encountered multi-value @@ -148,7 +152,9 @@ a:integer | b:integer | sum:integer ; -nullOnMultivaluesComparisonOperation#[skip:-8.13.99,reason:fixed in 8.14+] +nullOnMultivaluesComparisonOperation +required_feature: esql.disable_nullable_opts + ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NULL; a:integer | b:integer | same:boolean @@ -156,14 +162,18 @@ a:integer | b:integer | same:boolean ; -notNullOnMultivaluesComparisonOperation#[skip:-8.13.99,reason:fixed in 8.14+] +notNullOnMultivaluesComparisonOperation +required_feature: esql.disable_nullable_opts + ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; a:integer | b:integer | same:boolean ; -notNullOnMultivaluesComparisonOperationWithPartialMatch#[skip:-8.13.99,reason:fixed in 8.14+] +notNullOnMultivaluesComparisonOperationWithPartialMatch +required_feature: esql.disable_nullable_opts + ROW a = 5, b = [ 5, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; a:integer | b:integer | same:boolean diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 4bb43146aaf0d..de7a48bcf6834 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -167,14 +167,14 @@ birth_date:date |bd:date 1964-06-02T00:00:00.000Z|1964-06-02T00:00:00.000Z ; -convertFromString +convertFromString#[skip:-8.13.99, reason: default date formatter is changed in 8.14] // tag::to_datetime-str[] ROW string = ["1953-09-02T00:00:00.000Z", "1964-06-02T00:00:00.000Z", "1964-06-02 00:00:00"] | EVAL datetime = TO_DATETIME(string) // end::to_datetime-str[] ; warning:Line 2:19: evaluation of [TO_DATETIME(string)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 2:19: java.lang.IllegalArgumentException: failed to parse date field [1964-06-02 00:00:00] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z'] +warning:Line 2:19: java.lang.IllegalArgumentException: failed to parse date field [1964-06-02 00:00:00] with format [strict_date_optional_time] // tag::to_datetime-str-result[] string:keyword |datetime:date @@ -1154,7 +1154,9 @@ FROM sample_data // end::docsNowWhere-result[] ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + row a = ["1985-01-01T00:00:00.000Z", "1986-01-01T00:00:00.000Z", "1987-01-01T00:00:00.000Z"] | eval datetime = TO_DATETIME(a) | eval sa = mv_sort(datetime), sd = mv_sort(datetime, "DESC") diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec index c97f49469fa24..ee43efa69447b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec @@ -152,22 +152,26 @@ a:keyword | a_lang:keyword ["1", "2"] | ["English", "French"] ; + enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] FROM sample_data | ENRICH client_cidr_policy ON client_ip WITH env -| KEEP client_ip, env +| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) +| KEEP client_ip, count_env, max_env +| SORT client_ip ; -client_ip:ip | env:keyword -172.21.3.15 | [Development, Production] -172.21.3.15 | [Development, Production] -172.21.3.15 | [Development, Production] -172.21.3.15 | [Development, Production] -172.21.0.5 | Development -172.21.2.113 | [Development, QA] -172.21.2.162 | [Development, QA] +client_ip:ip | count_env:i | max_env:keyword +172.21.0.5 | 1 | Development +172.21.2.113 | 2 | QA +172.21.2.162 | 2 | QA +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production ; + enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] FROM sample_data | ENRICH client_cidr_policy ON client_ip WITH env, client_cidr @@ -185,6 +189,7 @@ client_ip:ip | env:keyword | client_cidr:ip_range 172.21.2.162 | [Development, QA] | 172.21.2.0/24 ; + enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM employees | WHERE birth_date > "1960-01-01" @@ -205,6 +210,7 @@ birth_year:long | age_group:keyword | count:long 1960 | Senior | 8 ; + enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM employees | WHERE birth_date IS NOT NULL @@ -219,6 +225,7 @@ count:long | age_group:keyword 12 | Middle-aged ; + enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM employees | ENRICH heights_policy ON height WITH height_group = description @@ -235,6 +242,7 @@ Tall | 1.8 | 1.99 | 25 Very Tall | 2.0 | 2.1 | 20 ; + enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM employees | ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description @@ -253,6 +261,7 @@ null | 1980 | null | Radical Eighties | 4 1950 | 1980 | Nifty Fifties | Radical Eighties | 34 ; + spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM airports | WHERE abbrev == "CPH" @@ -265,6 +274,7 @@ abbrev:keyword | city:keyword | city_location:geo_point | country:keyword CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 ; + spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM airports | WHERE abbrev == "CPH" @@ -277,6 +287,7 @@ abbrev:keyword | city:keyword | city_location:geo_point | country:keyword CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 ; + spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] required_feature: esql.mv_warn @@ -288,7 +299,51 @@ FROM airports warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value - city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer POINT(1.396561 24.127649) | 872 | 88 | 1044 ; + + +spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*) BY airport_in_city +| SORT count ASC +; + +count:long | airport_in_city:boolean +114 | null +396 | true +455 | false +; + + +spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*), centroid=ST_CENTROID(location) BY airport_in_city +| SORT count ASC +; + +count:long | centroid:geo_point | airport_in_city:boolean +114 | POINT (-24.750062 31.575549) | null +396 | POINT (-2.534797 20.667712) | true +455 | POINT (3.090752 27.676442) | false +; + + +spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +FROM airports +| WHERE abbrev == "IDR" +| ENRICH city_airports ON name WITH city_name = city, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length +; + +abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i +IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-city_airports.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-city_airports.json new file mode 100644 index 0000000000000..a02f98509be22 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-city_airports.json @@ -0,0 +1,7 @@ +{ + "match": { + "indices": "airport_city_boundaries", + "match_field": "airport", + "enrich_fields": ["city", "region", "city_boundary"] + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 7d18d2616e376..9b06e9a0a8b23 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -200,7 +200,9 @@ Chirstian. |Chirstian.Koblick|Chirstian.KoblickChirstian.|Chirstian Kyoichi. |Kyoichi.Maliniak |Kyoichi.MaliniakKyoichi. |Kyoichi ; -roundArrays#[skip:-8.13.99, reason:Alert order changed in 8.14] +roundArrays +required_feature: esql.disable_nullable_opts + row a = [1.2], b = [2.4, 7.9] | eval c = round(a), d = round(b), e = round([1.2]), f = round([1.2, 4.6]), g = round([1.14], 1), h = round([1.14], [1, 2]); warning:Line 1:56: evaluation of [round(b)] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:56: java.lang.IllegalArgumentException: single-value function encountered multi-value diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 44e437b9683ce..9c343083275cd 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -55,14 +55,14 @@ emp_no:integer |hire_date:date |hire_double:double 10003 |1986-08-28T00:00:00.000Z|5.255712E11 ; -convertFromString +convertFromString#[skip:-8.13.99, reason:warning changed in 8.14] // tag::to_double-str[] ROW str1 = "5.20128E11", str2 = "foo" | EVAL dbl = TO_DOUBLE("520128000000"), dbl1 = TO_DOUBLE(str1), dbl2 = TO_DOUBLE(str2) // end::to_double-str[] ; warning:Line 2:72: evaluation of [TO_DOUBLE(str2)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 2:72: java.lang.NumberFormatException: For input string: \"foo\" +warning:Line 2:72: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [foo] // tag::to_double-str-result[] str1:keyword |str2:keyword |dbl:double |dbl1:double |dbl2:double @@ -240,7 +240,9 @@ row a = [1.1, 2.1, 2.1] | eval da = mv_dedupe(a); [1.1, 2.1, 2.1] | [1.1, 2.1] ; -mvSliceEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmp +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change, 0, 1) | keep emp_no, salary_change, a1 @@ -455,14 +457,18 @@ ROW deg = [90.0, 180.0, 270.0] // end::to_radians-result[] ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + row a = [4.0, 2.0, -3.0, 2.0] | eval sa = mv_sort(a), sd = mv_sort(a, "DESC"); a:double | sa:double | sd:double [4.0, 2.0, -3.0, 2.0] | [-3.0, 2.0, 2.0, 4.0] | [4.0, 2.0, 2.0, -3.0] ; -mvSortEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSortEmp +required_feature: esql.mv_sort + FROM employees | eval sd = mv_sort(salary_change, "DESC"), sa = mv_sort(salary_change) | sort emp_no @@ -481,3 +487,60 @@ emp_no:integer | salary_change:double | sa:double | sd:double 10008 | [-2.92,0.75,3.54,12.68] | [-2.92,0.75,3.54,12.68] | [12.68,3.54,0.75,-2.92] 10009 | null | null | null ; + +values +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS h=MV_SORT(VALUES(height)) +; + + h:double +[1.56, 1.7, 1.78, 1.83, 1.85, 2.03, 2.05, 2.08, 2.1] +; + +valuesGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS h=MV_SORT(VALUES(height)) BY first_letter +| SORT first_letter +; + + h:double | first_letter:keyword + 1.56 | A + 2.08 | B + 1.78 | C + 2.03 | G + 2.05 | K + 1.83 | P +[1.85, 2.1] | S + 1.70 | T +; + +valuesGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS h=MV_SORT(VALUES(height)) BY job_positions +| SORT job_positions +; + + h:double | job_positions:keyword + 2.03 | Accountant + 1.78 | Head Human Resources + [1.85, 2.1] | Internship + 2.1 | Junior Developer + 1.56 | Principal Support Engineer + 2.1 | Purchase Manager + 1.78 | Reporting Analyst +[1.85, 2.03, 2.1] | Senior Python Developer + [1.56, 2.08] | Senior Team Lead + 1.78 | Support Engineer + [1.56, 1.78] | Tech Lead +[1.7, 1.83, 2.05] | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index deb398661aa80..8657602e7b16f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -153,14 +153,14 @@ tf:boolean |t2l:long |f2l:long |tf2l:long [true, false] |1 |0 |[1, 0] ; -convertStringToLong +convertStringToLong#[skip:-8.13.99, reason:warning changed in 8.14] // tag::to_long-str[] ROW str1 = "2147483648", str2 = "2147483648.2", str3 = "foo" | EVAL long1 = TO_LONG(str1), long2 = TO_LONG(str2), long3 = TO_LONG(str3) // end::to_long-str[] ; warning:Line 2:62: evaluation of [TO_LONG(str3)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 2:62: java.lang.NumberFormatException: For input string: \"foo\" +warning:Line 2:62: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [foo] // tag::to_long-str-result[] @@ -238,16 +238,16 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer 2147483647 |2147483646.2 |2147483647 |2147483646 ; -convertStringToIntFail +convertStringToIntFail#[skip:-8.13.99, reason:warning changed in 8.14] required_feature: esql.mv_warn row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); warning:Line 1:79: evaluation of [to_integer(str1)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:79: java.lang.NumberFormatException: For input string: \"2147483647.2\" +warning:Line 1:79: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [2147483647.2] warning:Line 1:102: evaluation of [to_integer(str2)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:102: java.lang.NumberFormatException: For input string: \"2147483648\" +warning:Line 1:102: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [2147483648] warning:Line 1:126: evaluation of [to_integer(non)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:126: java.lang.NumberFormatException: For input string: \"no number\" +warning:Line 1:126: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [no number] str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer |noi:integer 2147483647.2 |2147483648 |no number |null |null |null @@ -416,7 +416,9 @@ row a = [1, 2, 2, 3] | eval da = mv_dedupe(a); [1, 2, 2, 3] | [1, 2, 3] ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + // tag::mv_sort[] ROW a = [4, 2, -3, 2] | EVAL sa = mv_sort(a), sd = mv_sort(a, "DESC") @@ -429,7 +431,9 @@ a:integer | sa:integer | sd:integer // end::mv_sort-result[] ; -mvSortEmpInt#[skip:-8.13.99, reason:newly added in 8.14] +mvSortEmpInt +required_feature: esql.mv_sort + FROM employees | eval sd = mv_sort(salary_change.int, "DESC"), sa = mv_sort(salary_change.int) | sort emp_no @@ -449,7 +453,9 @@ emp_no:integer | salary_change.int:integer | sa:integer | sd:integer 10009 | null | null | null ; -mvSortEmpLong#[skip:-8.13.99, reason:newly added in 8.14] +mvSortEmpLong +required_feature: esql.mv_sort + FROM employees | eval sd = mv_sort(salary_change.long, "DESC"), sa = mv_sort(salary_change.long) | sort emp_no @@ -469,7 +475,9 @@ emp_no:integer | salary_change.long:long | sa:long | sd:long 10009 | null | null | null ; -mvSlice#[skip:-8.13.99, reason:newly added in 8.14] +mvSlice +required_feature: esql.mv_sort + // tag::mv_slice_positive[] row a = [1, 2, 2, 3] | eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3) @@ -481,7 +489,9 @@ a:integer | a1:integer | a2:integer // end::mv_slice_positive-result[] ; -mvSliceNegativeOffset#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceNegativeOffset +required_feature: esql.mv_sort + // tag::mv_slice_negative[] row a = [1, 2, 2, 3] | eval a1 = mv_slice(a, -2), a2 = mv_slice(a, -3, -1) @@ -493,7 +503,9 @@ a:integer | a1:integer | a2:integer // end::mv_slice_negative-result[] ; -mvSliceSingle#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceSingle +required_feature: esql.mv_sort + row a = 1 | eval a1 = mv_slice(a, 0); @@ -501,7 +513,9 @@ a:integer | a1:integer 1 | 1 ; -mvSliceOutOfBound#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceOutOfBound +required_feature: esql.mv_sort + row a = [1, 2, 2, 3] | eval a1 = mv_slice(a, 4), a2 = mv_slice(a, 2, 6), a3 = mv_slice(a, 4, 6); @@ -509,7 +523,9 @@ a:integer | a1:integer | a2:integer | a3:integer [1, 2, 2, 3] | null | [2, 3] | null ; -mvSliceEmpInt#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpInt +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, 0, 1) | keep emp_no, salary_change.int, a1 @@ -524,7 +540,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | [-2, 13] ; -mvSliceEmpIntSingle#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpIntSingle +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, 1) | keep emp_no, salary_change.int, a1 @@ -539,7 +557,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | 13 ; -mvSliceEmpIntEndOutOfBound#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpIntEndOutOfBound +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, 1, 4) | keep emp_no, salary_change.int, a1 @@ -554,7 +574,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | 13 ; -mvSliceEmpIntOutOfBound#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpIntOutOfBound +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, 2, 4) | keep emp_no, salary_change.int, a1 @@ -569,7 +591,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | null ; -mvSliceEmpIntStartOutOfBoundNegative#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpIntStartOutOfBoundNegative +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, -5, -2) | keep emp_no, salary_change.int, a1 @@ -584,7 +608,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | -2 ; -mvSliceEmpIntOutOfBoundNegative#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpIntOutOfBoundNegative +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, -5, -3) | keep emp_no, salary_change.int, a1 @@ -599,7 +625,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | null ; -mvSliceEmpLong#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpLong +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.long, 0, 1) | keep emp_no, salary_change.long, a1 @@ -789,3 +817,174 @@ warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [1002000 x:long ; + +valuesLong +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages.long)) +; + + l:long +[1, 2, 3, 4, 5] +; + +valuesLongGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS l=MV_SORT(VALUES(languages.long)) BY first_letter +| SORT first_letter +; + + l:long | first_letter:keyword + 3 | A + 5 | B + 5 | C + 2 | G + 1 | K + 4 | P + [1, 2] | S + 4 | T +; + +valuesLongGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages.long)) BY job_positions +| SORT job_positions +; + + l:long | job_positions:keyword + 2 | Accountant + 5 | Head Human Resources + [1, 2] | Internship + 2 | Junior Developer + 3 | Principal Support Engineer + 2 | Purchase Manager + 5 | Reporting Analyst + [1, 2] | Senior Python Developer + [3, 5] | Senior Team Lead + 5 | Support Engineer + [3, 5] | Tech Lead + [1, 4] | null +; + +valuesInt +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages)) +; + + l:integer +[1, 2, 3, 4, 5] +; + +valuesIntGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS l=MV_SORT(VALUES(languages)) BY first_letter +| SORT first_letter +; + +l:integer | first_letter:keyword + 3 | A + 5 | B + 5 | C + 2 | G + 1 | K + 4 | P + [1, 2] | S + 4 | T +; + +valuesIntGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages)) BY job_positions +| SORT job_positions +; + + l:integer | job_positions:keyword + 2 | Accountant + 5 | Head Human Resources + [1, 2] | Internship + 2 | Junior Developer + 3 | Principal Support Engineer + 2 | Purchase Manager + 5 | Reporting Analyst + [1, 2] | Senior Python Developer + [3, 5] | Senior Team Lead + 5 | Support Engineer + [3, 5] | Tech Lead + [1, 4] | null +; + +valuesShort +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages.short)) +; + + l:integer +[1, 2, 3, 4, 5] +; + +valuesShortGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS l=MV_SORT(VALUES(languages.short)) BY first_letter +| SORT first_letter +; + +l:integer | first_letter:keyword + 3 | A + 5 | B + 5 | C + 2 | G + 1 | K + 4 | P + [1, 2] | S + 4 | T +; + +valuesShortGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages.short)) BY job_positions +| SORT job_positions +; + + l:integer | job_positions:keyword + 2 | Accountant + 5 | Head Human Resources + [1, 2] | Internship + 2 | Junior Developer + 3 | Principal Support Engineer + 2 | Purchase Manager + 5 | Reporting Analyst + [1, 2] | Senior Python Developer + [3, 5] | Senior Team Lead + 5 | Support Engineer + [3, 5] | Tech Lead + [1, 4] | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index c77c0e6747e87..09b17ed4112c9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -302,7 +302,9 @@ eth0 |127.0.0.3 eth0 |fe80::cae2:65ff:fece:fec1 ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + FROM hosts | eval sd = mv_sort(ip1, "DESC"), sa = mv_sort(ip1) | sort host desc, ip1 @@ -318,7 +320,9 @@ epsilon | fe80::cae2:65ff:fece:fec1 | fe80::ca epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe82::cae2:65ff:fece:fec0, fe81::cae2:65ff:fece:feb9] ; -mvSlice#[skip:-8.13.99, reason:newly added in 8.14] +mvSlice +required_feature: esql.mv_sort + from hosts | where host == "epsilon" | eval a1 = mv_slice(ip1, 0, 1) @@ -332,7 +336,9 @@ epsilon | fe80::cae2:65ff:fece:fec1 | fe80::ca epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -mvSlice#[skip:-8.13.99, reason:newly added in 8.14] +mvSlice +required_feature: esql.mv_sort + from hosts | where host == "epsilon" | eval a1 = mv_slice(ip1, 0, 1) @@ -346,7 +352,9 @@ epsilon | fe80::cae2:65ff:fece:fec1 | fe80::ca epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -mvZip#[skip:-8.13.99, reason:newly added in 8.14] +mvZip +required_feature: esql.mv_sort + from hosts | eval zip = mv_zip(to_string(description), to_string(ip0), "@@") | keep host, description, ip0, zip @@ -361,3 +369,45 @@ epsilon | epsilon gw instance | [fe80::cae2:65ff:fece:feb9, fe80: epsilon | [epsilon host, epsilon2 host] | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [epsilon host@@fe81::cae2:65ff:fece:feb9, epsilon2 host@@fe82::cae2:65ff:fece:fec0] epsilon | null | null | null ; + +values +required_feature: esql.agg_values + + FROM hosts +| STATS ip0=MV_SORT(VALUES(ip0)) +; + + ip0:ip +[::1, 127.0.0.1, fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1, fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] +; + +valuesGrouped +required_feature: esql.agg_values + + FROM hosts +| EVAL host=SUBSTRING(host, 0, 1) +| STATS ip0=MV_SORT(VALUES(ip0)) BY host +| SORT host +; + + ip0:ip | host:keyword + [::1, 127.0.0.1] | a + 127.0.0.1 | b +[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1, fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | e +fe80::cae2:65ff:fece:feb9 | g +; + +valuesGroupedByOrdinals +required_feature: esql.agg_values + + FROM hosts +| STATS ip0=MV_SORT(VALUES(ip0)) BY host +| SORT host +; + + ip0:ip | host:keyword + [::1, 127.0.0.1] | alpha + 127.0.0.1 | beta +[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1, fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | epsilon +fe80::cae2:65ff:fece:feb9 | gamma +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index d94d39f0a0c81..8491919b3ee93 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -1385,32 +1385,32 @@ emp_no:integer | min_plus_max:integer | are_equal:boolean ; docsAbs -//tag::docsAbs[] +//tag::abs[] ROW number = -1.0 | EVAL abs_number = ABS(number) -//end::docsAbs[] +//end::abs[] ; -//tag::docsAbs-result[] -number:double | abs_number:double --1.0 |1.0 -//end::docsAbs-result[] +//tag::abs-result[] +number:double | abs_number:double +-1.0 | 1.0 +//end::abs-result[] ; docsAbsEmployees -//tag::docsAbsEmployees[] +//tag::abs-employees[] FROM employees | KEEP first_name, last_name, height | EVAL abs_height = ABS(0.0 - height) -//end::docsAbsEmployees[] +//end::abs-employees[] | SORT first_name | LIMIT 3 ; -//tag::docsAbsEmployees-result[] +//tag::abs-employees-result[] first_name:keyword | last_name:keyword | height:double | abs_height:double Alejandro |McAlpine |1.48 |1.48 Amabile |Gomatam |2.09 |2.09 Anneke |Preusig |1.56 |1.56 -//end::docsAbsEmployees-result[] +//end::abs-employees-result[] ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec new file mode 100644 index 0000000000000..7d1617b208f34 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -0,0 +1,227 @@ +# TODO: switch this test to ``&format=csv&delimiter=|` output +metaFunctions#[skip:-8.13.99] +meta functions; + + name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean +abs |"double|integer|long|unsigned_long abs(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false +acos |"double acos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"Number between -1 and 1. If `null`, the function returns `null`." |double |"Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians." | false | false | false +asin |"double asin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"Number between -1 and 1. If `null`, the function returns `null`." |double |"Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians." | false | false | false +atan |"double atan(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." |double |"Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians." | false | false | false +atan2 |"double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["y coordinate. If `null`\, the function returns `null`.", "x coordinate. If `null`\, the function returns `null`."] |double | "The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians." | [false, false] | false | false +auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false +avg |"double avg(number:double|integer|long)" |number |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true +case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, trueValue] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false +ceil |"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false +cidr_match |"boolean cidr_match(ip:ip, blockX...:keyword|text)" |[ip, blockX] |[ip, "keyword|text"] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false +coalesce |"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" |first | "boolean|text|integer|keyword|long" | "Expression to evaluate" |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | false | true | false +concat |"keyword concat(string1:keyword|text, string2...:keyword|text)" |[string1, string2] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false +cos |"double cos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false +cosh |"double cosh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false +count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true +count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true +date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false +date_extract |"long date_extract(datePart:keyword|text, date:date)" |[datePart, date] |["keyword|text", date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false +date_format |"keyword date_format(?dateFormat:keyword|text, date:date)" |[dateFormat, date] |["keyword|text", date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false +date_parse |"date date_parse(?datePattern:keyword|text, dateString:keyword|text)"|[datePattern, dateString]|["keyword|text", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false +date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |["keyword", date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false +e |double e() | null | null | null |double | "Euler’s number." | null | false | false +ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false +floor |"double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false +greatest |"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" | first |"integer|long|double|boolean|keyword|text|ip|version" |"" |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the maximum value from many columns." | false | true | false +least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" | first |"integer|long|double|boolean|keyword|text|ip|version" |"" |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | false | true | false +left |"keyword left(string:keyword|text, length:integer)" |[string, length] |["keyword|text", "integer"] |["The string from which to return a substring.", "The number of characters to return."] |keyword | "Returns the substring that extracts 'length' chars from 'string' starting from the left." | [false, false] | false | false +length |"integer length(string:keyword|text)" |string |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false +log |"double log(?base:integer|unsigned_long|long|double, number:integer|unsigned_long|long|double)" |[base, number] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"]| ["", ""] |double | "Returns the logarithm of a number to a base." | [true, false] | false | false +log10 |"double log10(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false +ltrim |"keyword|text ltrim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false +max |"double|integer|long max(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The maximum value of a numeric field." | false | false | true +median |"double|integer|long median(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The value that is greater than half of all values and less than half of all values." | false | false | true +median_absolute_deviation|"double|integer|long median_absolute_deviation(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The median absolute deviation, a measure of variability." | false | false | true +min |"double|integer|long min(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The minimum value of a numeric field." | false | false | true +mv_avg |"double mv_avg(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false +mv_concat |"keyword mv_concat(string:text|keyword, delim:text|keyword)" |[string, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false +mv_count |"integer mv_count(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false +mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(field:boolean|date|double|integer|ip|keyword|long|text|version)" |field | "boolean|date|double|integer|ip|keyword|long|text|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|version" | "Remove duplicate values from a multivalued field." | false | false | false +mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false +mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false +mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false | false +mv_median |"double|integer|long|unsigned_long mv_median(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the median value." | false | false | false +mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false +mv_slice |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" |[field, start, end] | "[boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, integer, integer]" | "[A multivalued field, start index, end index (included)]" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" | "Returns a subset of the multivalued field using the start and end index values." | [false, false, true] | false | false +mv_sort |"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" | [field, order] | ["boolean|date|double|integer|ip|keyword|long|text|version", "keyword"] | ["A multivalued field", "sort order"] |"boolean|date|double|integer|ip|keyword|long|text|version" | "Sorts a multivalued field in lexicographical order." | [false, true] | false | false +mv_sum |"double|integer|long|unsigned_long mv_sum(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the sum of all of the values." | false | false | false +mv_zip |"keyword mv_zip(string1:keyword|text, string2:keyword|text, ?delim:keyword|text)" |[string1, string2, delim] | ["keyword|text", "keyword|text", "keyword|text"] | [A multivalued field, A multivalued field, delimiter] | "keyword" | "Combines the values from two multivalued fields with a delimiter that joins them together." | [false, false, true] | false | false +now |date now() | null |null | null |date | "Returns current date and time." | null | false | false +percentile |"double|integer|long percentile(number:double|integer|long, percentile:double|integer|long)" |[number, percentile] |["double|integer|long, double|integer|long"] |["", ""] |"double|integer|long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true +pi |double pi() | null | null | null |double | "The ratio of a circle’s circumference to its diameter." | null | false | false +pow |"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "Returns the value of a base raised to the power of an exponent." | [false, false] | false | false +replace |"keyword replace(string:keyword|text, regex:keyword|text, newString:keyword|text)" | [string, regex, newString] | ["keyword|text", "keyword|text", "keyword|text"] |["", "", ""] |keyword | "The function substitutes in the string any match of the regular expression with the replacement string." | [false, false, false]| false | false +right |"keyword right(string:keyword|text, length:integer)" |[string, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false +round |"double round(number:double, ?decimals:integer)" |[number, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false +rtrim |"keyword|text rtrim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false +sin |"double sin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"An angle, in radians" |double |Returns the trigonometric sine of an angle | false | false | false +sinh |"double sinh(number:double|integer|long|unsigned_long)"|number |"double|integer|long|unsigned_long" |"The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false | false +split |"keyword split(string:keyword|text, delim:keyword|text)" |[string, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false +sqrt |"double sqrt(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false +st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true +st_intersects |"boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the two geometries or geometry columns intersect." | [false, false] | false | false +st_x |"double st_x(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the x-coordinate from a point geometry." | false | false | false +st_y |"double st_y(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the y-coordinate from a point geometry." | false | false | false +starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false +substring |"keyword substring(string:keyword|text, start:integer, ?length:integer)" |[string, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false +sum |"long sum(number:double|integer|long)" |number |"double|integer|long" | "" |long | "The sum of a numeric field." | false | false | true +tan |"double tan(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false +tanh |"double tanh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false +tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false +to_bool |"boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false +to_boolean |"boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false +to_cartesianpoint |"cartesian_point to_cartesianpoint(field:cartesian_point|keyword|text)" |field |"cartesian_point|keyword|text" | |cartesian_point | "Converts an input value to a point value." |false |false | false +to_cartesianshape |"cartesian_shape to_cartesianshape(field:cartesian_point|cartesian_shape|keyword|text)" |field |"cartesian_point|cartesian_shape|keyword|text" | |cartesian_shape | "Converts an input value to a shape value." |false |false | false +to_datetime |"date to_datetime(field:date|keyword|text|double|long|unsigned_long|integer)" |field |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false +to_dbl |"double to_dbl(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false +to_degrees |"double to_degrees(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | |double | "Converts a number in radians to degrees." |false |false | false +to_double |"double to_double(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false +to_dt |"date to_dt(field:date|keyword|text|double|long|unsigned_long|integer)" |field |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false +to_geopoint |"geo_point to_geopoint(field:geo_point|keyword|text)" |field |"geo_point|keyword|text" | |geo_point | "Converts an input value to a geo_point value." |false |false | false +to_geoshape |"geo_shape to_geoshape(field:geo_point|geo_shape|keyword|text)" |field |"geo_point|geo_shape|keyword|text" | |geo_shape | "Converts an input value to a geo_shape value." |false |false | false +to_int |"integer to_int(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false +to_integer |"integer to_integer(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false +to_ip |"ip to_ip(field:ip|keyword|text)" |field |"ip|keyword|text" | |ip | "Converts an input string to an IP value." |false |false | false +to_long |"long to_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |long | "Converts an input value to a long value." |false |false | false +to_lower |"keyword|text to_lower(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to lower case." |false |false | false +to_radians |"double to_radians(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | |double | "Converts a number in degrees to radians." |false |false | false +to_str |"keyword to_str(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_string |"keyword to_string(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_ul |"unsigned_long to_ul(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_ulong |"unsigned_long to_ulong(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_unsigned_long |"unsigned_long to_unsigned_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_upper |"keyword|text to_upper(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to upper case." |false |false | false +to_ver |"version to_ver(field:keyword|text|version)" |field |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false +to_version |"version to_version(field:keyword|text|version)" |field |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false +trim |"keyword|text trim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" | "Removes leading and trailing whitespaces from a string." | false | false | false +values |"boolean|date|double|integer|ip|keyword|long|text|version values(field:boolean|date|double|integer|ip|keyword|long|text|version)" |field |"boolean|date|double|integer|ip|keyword|long|text|version" | |"boolean|date|double|integer|ip|keyword|long|text|version" |"Collect values for a field." |false |false |true +; + + +metaFunctionsSynopsis#[skip:-8.13.99] +meta functions | keep synopsis; + +synopsis:keyword +"double|integer|long|unsigned_long abs(number:double|integer|long|unsigned_long)" +"double acos(number:double|integer|long|unsigned_long)" +"double asin(number:double|integer|long|unsigned_long)" +"double atan(number:double|integer|long|unsigned_long)" +"double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" +"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" +"double avg(number:double|integer|long)" +"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" +"boolean cidr_match(ip:ip, blockX...:keyword|text)" +"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" +"keyword concat(string1:keyword|text, string2...:keyword|text)" +"double cos(number:double|integer|long|unsigned_long)" +"double cosh(number:double|integer|long|unsigned_long)" +"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" +"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" +"long date_extract(datePart:keyword|text, date:date)" +"keyword date_format(?dateFormat:keyword|text, date:date)" +"date date_parse(?datePattern:keyword|text, dateString:keyword|text)" +"date date_trunc(interval:keyword, date:date)" +double e() +"boolean ends_with(str:keyword|text, suffix:keyword|text)" +"double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" +"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" +"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" +"keyword left(string:keyword|text, length:integer)" +"integer length(string:keyword|text)" +"double log(?base:integer|unsigned_long|long|double, number:integer|unsigned_long|long|double)" +"double log10(number:double|integer|long|unsigned_long)" +"keyword|text ltrim(string:keyword|text)" +"double|integer|long max(number:double|integer|long)" +"double|integer|long median(number:double|integer|long)" +"double|integer|long median_absolute_deviation(number:double|integer|long)" +"double|integer|long min(number:double|integer|long)" +"double mv_avg(number:double|integer|long|unsigned_long)" +"keyword mv_concat(string:text|keyword, delim:text|keyword)" +"integer mv_count(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(field:boolean|date|double|integer|ip|keyword|long|text|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long mv_median(number:double|integer|long|unsigned_long)" +"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" +"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" +"double|integer|long|unsigned_long mv_sum(number:double|integer|long|unsigned_long)" +"keyword mv_zip(string1:keyword|text, string2:keyword|text, ?delim:keyword|text)" +date now() +"double|integer|long percentile(number:double|integer|long, percentile:double|integer|long)" +double pi() +"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" +"keyword replace(string:keyword|text, regex:keyword|text, newString:keyword|text)" +"keyword right(string:keyword|text, length:integer)" +"double round(number:double, ?decimals:integer)" +"keyword|text rtrim(string:keyword|text)" +"double sin(number:double|integer|long|unsigned_long)" +"double sinh(number:double|integer|long|unsigned_long)" +"keyword split(string:keyword|text, delim:keyword|text)" +"double sqrt(number:double|integer|long|unsigned_long)" +"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" +"boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" +"double st_x(point:geo_point|cartesian_point)" +"double st_y(point:geo_point|cartesian_point)" +"boolean starts_with(str:keyword|text, prefix:keyword|text)" +"keyword substring(string:keyword|text, start:integer, ?length:integer)" +"long sum(number:double|integer|long)" +"double tan(number:double|integer|long|unsigned_long)" +"double tanh(number:double|integer|long|unsigned_long)" +double tau() +"boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" +"boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" +"cartesian_point to_cartesianpoint(field:cartesian_point|keyword|text)" +"cartesian_shape to_cartesianshape(field:cartesian_point|cartesian_shape|keyword|text)" +"date to_datetime(field:date|keyword|text|double|long|unsigned_long|integer)" +"double to_dbl(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"double to_degrees(number:double|integer|long|unsigned_long)" +"double to_double(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"date to_dt(field:date|keyword|text|double|long|unsigned_long|integer)" +"geo_point to_geopoint(field:geo_point|keyword|text)" +"geo_shape to_geoshape(field:geo_point|geo_shape|keyword|text)" +"integer to_int(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"integer to_integer(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"ip to_ip(field:ip|keyword|text)" +"long to_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"keyword|text to_lower(str:keyword|text)" +"double to_radians(number:double|integer|long|unsigned_long)" +"keyword to_str(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"keyword to_string(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"unsigned_long to_ul(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"unsigned_long to_ulong(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"unsigned_long to_unsigned_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"keyword|text to_upper(str:keyword|text)" +"version to_ver(field:keyword|text|version)" +"version to_version(field:keyword|text|version)" +"keyword|text trim(string:keyword|text)" +"boolean|date|double|integer|ip|keyword|long|text|version values(field:boolean|date|double|integer|ip|keyword|long|text|version)" +; + + +metaFunctionsFiltered#[skip:-8.13.99] +META FUNCTIONS +| WHERE STARTS_WITH(name, "sin") +; + + name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean +sin | "double sin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false | false +sinh | "double sinh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false | false +; + + +// see https://github.com/elastic/elasticsearch/issues/102120 +countFunctions#[skip:-8.13.99] +meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; + +a:long | b:long | c:long +97 | 97 | 97 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/row.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/row.csv-spec index 4e5df6c535be7..3f441c94967d5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/row.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/row.csv-spec @@ -289,30 +289,30 @@ a:integer |b:integer |c:integer // end::in-with-expressions-result[] ; -convertMvToMvDifferentCardinality +convertMvToMvDifferentCardinality#[skip:-8.13.99, reason:warning changed in 8.14] row strings = ["1", "2", "three"] | eval ints = to_int(strings); warning:Line 1:49: evaluation of [to_int(strings)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:49: java.lang.NumberFormatException: For input string: \"three\" +warning:Line 1:49: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [three] strings:keyword |ints:integer [1, 2, three] |[1, 2] ; -convertMvToSv +convertMvToSv#[skip:-8.13.99, reason:warning changed in 8.14] row strings = ["1", "two"] | eval ints = to_int(strings); warning:Line 1:42: evaluation of [to_int(strings)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:42: java.lang.NumberFormatException: For input string: \"two\" +warning:Line 1:42: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [two] strings:keyword |ints:integer [1, two] |1 ; -convertMvToNull +convertMvToNull#[skip:-8.13.99, reason:warning changed in 8.14] row strings = ["one", "two"] | eval ints = to_int(strings); warning:Line 1:44: evaluation of [to_int(strings)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:44: java.lang.NumberFormatException: For input string: \"one\" -warning:Line 1:44: java.lang.NumberFormatException: For input string: \"two\" +warning:Line 1:44: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [one] +warning:Line 1:44: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [two] strings:keyword |ints:integer [one, two] |null diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 933c106c1a85b..e7981ea209ccf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -4,231 +4,3 @@ show info | stats v = count(version); v:long 1 ; - -# TODO: switch this test to ``&format=csv&delimiter=|` output -showFunctions#[skip:-8.13.99] -show functions; - - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -abs |"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false -acos |"double acos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "The arccosine of an angle, expressed in radians." | false | false | false -asin |"double asin(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "Inverse sine trigonometric function." | false | false | false -atan |"double atan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "A number" |double | "Inverse tangent trigonometric function." | false | false | false -atan2 |"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" |[y, x] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["y coordinate", "x coordinate"] |double | "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." | [false, false] | false | false -auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false -avg |"double avg(field:double|integer|long)" |field |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true -case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, rest] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false -ceil |"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false -cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false -coalesce |"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" |[expression, expressionX] |["boolean|text|integer|keyword|long", "boolean|text|integer|keyword|long"] |["Expression to evaluate", "Other expression to evaluate"] |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null." | [false, false] | true | false -concat |"keyword concat(first:keyword|text, rest...:keyword|text)" |[first, rest] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false -cos |"double cos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false -cosh |"double cosh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false -count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true -count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true -date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false -date_extract |long date_extract(date_part:keyword, field:date) |[date_part, field] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false -date_format |keyword date_format(?format:keyword, date:date) |[format, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false -date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false -date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |[keyword, date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false -e |double e() | null | null | null |double | "Euler’s number." | null | false | false -ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false -floor |"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false -greatest |"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the maximum value from many columns." | [false, false] | true | false -least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | [false, false] | true | false -left |"keyword left(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the left." | [false, false] | false | false -length |"integer length(str:keyword|text)" |str |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false -log |"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" |[base, value] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"]| ["", ""] |double | "Returns the logarithm of a value to a base." | [true, false] | false | false -log10 |"double log10(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false -ltrim |"keyword|text ltrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false -max |"double|integer|long max(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The maximum value of a numeric field." | false | false | true -median |"double|integer|long median(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The value that is greater than half of all values and less than half of all values." | false | false | true -median_absolute_deviation|"double|integer|long median_absolute_deviation(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The median absolute deviation, a measure of variability." | false | false | true -min |"double|integer|long min(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The minimum value of a numeric field." | false | false | true -mv_avg |"double mv_avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false -mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false -mv_count |"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false -mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|version" | "Remove duplicate values from a multivalued field." | false | false | false -mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false -mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false -mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false | false -mv_median |"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the median value." | false | false | false -mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false -mv_slice |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" |[v, start, end] | "[boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, integer, integer]" | "[A multivalued field, start index, end index (included)]" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" | "Returns a subset of the multivalued field using the start and end index values." | [false, false, true] | false | false -mv_sort |"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" | [field, order] | ["boolean|date|double|integer|ip|keyword|long|text|version", "keyword"] | ["A multivalued field", "sort order"] |"boolean|date|double|integer|ip|keyword|long|text|version" | "Sorts a multivalued field in lexicographical order." | [false, true] | false | false -mv_sum |"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the sum of all of the values." | false | false | false -mv_zip |"keyword mv_zip(mvLeft:keyword|text, mvRight:keyword|text, ?delim:keyword|text)" |[mvLeft, mvRight, delim] | ["keyword|text", "keyword|text", "keyword|text"] | [A multivalued field, A multivalued field, delimiter] | "keyword" | "Combines the values from two multivalued fields with a delimiter that joins them together." | [false, false, true] | false | false -now |date now() | null |null | null |date | "Returns current date and time." | null | false | false -percentile |"double|integer|long percentile(field:double|integer|long, percentile:double|integer|long)" |[field, percentile] |["double|integer|long, double|integer|long"] |["", ""] |"double|integer|long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true -pi |double pi() | null | null | null |double | "The ratio of a circle’s circumference to its diameter." | null | false | false -pow |"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "Returns the value of a base raised to the power of an exponent." | [false, false] | false | false -replace |"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" | [str, regex, newStr] | ["keyword|text", "keyword|text", "keyword|text"] |["", "", ""] |keyword | "The function substitutes in the string any match of the regular expression with the replacement string." | [false, false, false]| false | false -right |"keyword right(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false -round |"double round(value:double, ?decimals:integer)" |[value, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false -rtrim |"keyword|text rtrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false -sin |"double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" |"An angle, in radians" |double |Returns the trigonometric sine of an angle | false | false | false -sinh |"double sinh(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" |"The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false | false -split |"keyword split(str:keyword|text, delim:keyword|text)" |[str, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false -sqrt |"double sqrt(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false -st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true -st_x |"double st_x(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the x-coordinate from a point geometry." | false | false | false -st_y |"double st_y(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the y-coordinate from a point geometry." | false | false | false -starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false -substring |"keyword substring(str:keyword|text, start:integer, ?length:integer)" |[str, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false -sum |"long sum(field:double|integer|long)" |field |"double|integer|long" | "" |long | "The sum of a numeric field." | false | false | true -tan |"double tan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false -tanh |"double tanh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false -tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false -to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false -to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false -to_cartesianpoint |"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" |v |"cartesian_point|keyword|text" | |cartesian_point | "Converts an input value to a point value." |false |false | false -to_cartesianshape |"cartesian_shape to_cartesianshape(v:cartesian_point|cartesian_shape|keyword|text)" |v |"cartesian_point|cartesian_shape|keyword|text" | |cartesian_shape | "Converts an input value to a shape value." |false |false | false -to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false -to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false -to_degrees |"double to_degrees(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in radians to degrees." |false |false | false -to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false -to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false -to_geopoint |"geo_point to_geopoint(v:geo_point|keyword|text)" |v |"geo_point|keyword|text" | |geo_point | "Converts an input value to a geo_point value." |false |false | false -to_geoshape |"geo_shape to_geoshape(v:geo_point|geo_shape|keyword|text)" |v |"geo_point|geo_shape|keyword|text" | |geo_shape | "Converts an input value to a geo_shape value." |false |false | false -to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false -to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false -to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | "Converts an input string to an IP value." |false |false | false -to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |long | "Converts an input value to a long value." |false |false | false -to_lower |"keyword|text to_lower(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to lower case." |false |false | false -to_radians |"double to_radians(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in degrees to radians." |false |false | false -to_str |"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false -to_string |"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false -to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_upper |"keyword|text to_upper(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to upper case." |false |false | false -to_ver |"version to_ver(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false -to_version |"version to_version(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false -trim |"keyword|text trim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" | "Removes leading and trailing whitespaces from a string." | false | false | false -; - - -showFunctionsSynopsis#[skip:-8.13.99] -show functions | keep synopsis; - -synopsis:keyword -"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" -"double acos(n:double|integer|long|unsigned_long)" -"double asin(n:double|integer|long|unsigned_long)" -"double atan(n:double|integer|long|unsigned_long)" -"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" -"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" -"double avg(field:double|integer|long)" -"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" -"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" -boolean cidr_match(ip:ip, blockX...:keyword) -"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" -"keyword concat(first:keyword|text, rest...:keyword|text)" -"double cos(n:double|integer|long|unsigned_long)" -"double cosh(n:double|integer|long|unsigned_long)" -"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" -"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" -"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" -long date_extract(date_part:keyword, field:date) -keyword date_format(?format:keyword, date:date) -"date date_parse(?datePattern:keyword, dateString:keyword|text)" -"date date_trunc(interval:keyword, date:date)" -double e() -"boolean ends_with(str:keyword|text, suffix:keyword|text)" -"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" -"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -"keyword left(str:keyword|text, length:integer)" -"integer length(str:keyword|text)" -"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" -"double log10(n:double|integer|long|unsigned_long)" -"keyword|text ltrim(str:keyword|text)" -"double|integer|long max(field:double|integer|long)" -"double|integer|long median(field:double|integer|long)" -"double|integer|long median_absolute_deviation(field:double|integer|long)" -"double|integer|long min(field:double|integer|long)" -"double mv_avg(field:double|integer|long|unsigned_long)" -"keyword mv_concat(v:text|keyword, delim:text|keyword)" -"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" -"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" -"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" -"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" -"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" -"keyword mv_zip(mvLeft:keyword|text, mvRight:keyword|text, ?delim:keyword|text)" -date now() -"double|integer|long percentile(field:double|integer|long, percentile:double|integer|long)" -double pi() -"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" -"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" -"keyword right(str:keyword|text, length:integer)" -"double round(value:double, ?decimals:integer)" -"keyword|text rtrim(str:keyword|text)" -"double sin(n:double|integer|long|unsigned_long)" -"double sinh(n:double|integer|long|unsigned_long)" -"keyword split(str:keyword|text, delim:keyword|text)" -"double sqrt(n:double|integer|long|unsigned_long)" -"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" -"double st_x(point:geo_point|cartesian_point)" -"double st_y(point:geo_point|cartesian_point)" -"boolean starts_with(str:keyword|text, prefix:keyword|text)" -"keyword substring(str:keyword|text, start:integer, ?length:integer)" -"long sum(field:double|integer|long)" -"double tan(n:double|integer|long|unsigned_long)" -"double tanh(n:double|integer|long|unsigned_long)" -double tau() -"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" -"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" -"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" -"cartesian_shape to_cartesianshape(v:cartesian_point|cartesian_shape|keyword|text)" -"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" -"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"double to_degrees(v:double|integer|long|unsigned_long)" -"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" -"geo_point to_geopoint(v:geo_point|keyword|text)" -"geo_shape to_geoshape(v:geo_point|geo_shape|keyword|text)" -"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"ip to_ip(v:ip|keyword|text)" -"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"keyword|text to_lower(str:keyword|text)" -"double to_radians(v:double|integer|long|unsigned_long)" -"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"keyword|text to_upper(str:keyword|text)" -"version to_ver(v:keyword|text|version)" -"version to_version(v:keyword|text|version)" -"keyword|text trim(str:keyword|text)" -; - - -showFunctionsFiltered#[skip:-8.12.99] -// tag::showFunctionsFiltered[] -SHOW functions -| WHERE STARTS_WITH(name, "sin") -// end::showFunctionsFiltered[] -; - -// tag::showFunctionsFiltered-result[] - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -sin | "double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false | false -sinh | "double sinh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false | false -// end::showFunctionsFiltered-result[] -; - - -// see https://github.com/elastic/elasticsearch/issues/102120 -countFunctions#[skip:-8.13.99] -show functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; - -a:long | b:long | c:long -95 | 95 | 95 -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 02da586c6f357..495d0cbb8d7f0 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -71,7 +71,9 @@ c:geo_point POINT(39.58327988510707 20.619513023697994) ; -centroidFromString4#[skip:-8.13.99, reason:st_x and st_y added in 8.14] +centroidFromString4 +required_feature: esql.st_x_y + ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -82,7 +84,9 @@ c:geo_point | x:double | y:double POINT(39.58327988510707 20.619513023697994) | 39.58327988510707 | 20.619513023697994 ; -stXFromString#[skip:-8.13.99, reason:st_x and st_y added in 8.14] +stXFromString +required_feature: esql.st_x_y + // tag::st_x_y[] ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") | EVAL x = ST_X(point), y = ST_Y(point) @@ -113,7 +117,9 @@ WIIT | Bandar Lampung | POINT(105.2667 -5.45) | Indonesia ZAH | Zāhedān | POINT(60.8628 29.4964) | Iran | POINT(60.900708564915 29.4752941956573) | Zahedan Int'l | 9 | mid ; -stXFromAirportsSupportsNull#[skip:-8.13.99, reason:st_x and st_y added in 8.14] +stXFromAirportsSupportsNull +required_feature: esql.st_x_y + FROM airports | EVAL x = FLOOR(ABS(ST_X(city_location))/200), y = FLOOR(ABS(ST_Y(city_location))/100) | STATS c = count(*) BY x, y @@ -320,6 +326,173 @@ centroid:geo_point | count:long POINT(83.16847535921261 28.79002037679311) | 40 ; +centroidFromAirportsAfterKeywordPredicateCountryUK#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| WHERE country == "United Kingdom" +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (-2.597342072712148 54.33551226578214) | 17 +; + +centroidFromAirportsAfterIntersectsPredicateCountryUK#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (-2.597342072712148 54.33551226578214) | 17 +; + +intersectsAfterCentroidFromAirportsAfterKeywordPredicateCountryUK#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| WHERE country == "United Kingdom" +| STATS centroid = ST_CENTROID(location), count=COUNT() +| EVAL centroid_in_uk = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| EVAL centroid_in_iceland = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) +| KEEP centroid, count, centroid_in_uk, centroid_in_iceland +; + +centroid:geo_point | count:long | centroid_in_uk:boolean | centroid_in_iceland:boolean +POINT (-2.597342072712148 54.33551226578214) | 17 | true | false +; + +centroidFromAirportsAfterIntersectsEvalExpression#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| EVAL in_uk = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| EVAL in_iceland = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) +| STATS centroid = ST_CENTROID(location), count=COUNT() BY in_uk, in_iceland +| SORT count ASC +; + +centroid:geo_point | count:long | in_uk:boolean | in_iceland:boolean +POINT (-21.946634463965893 64.13187285885215) | 1 | false | true +POINT (-2.597342072712148 54.33551226578214) | 17 | true | false +POINT (0.04453958108176276 23.74658354606057) | 873 | false | false +; + +centroidFromAirportsAfterIntersectsPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (42.97109629958868 14.7552534006536) | 1 +; + +centroidFromAirportsAfterIntersectsCompoundPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND country == "Yemen" +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (42.97109629958868 14.7552534006536) | 1 +; + +pointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +; + +pointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +// tag::st_intersects-airports[] +FROM airports +| WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) +// end::st_intersects-airports[] +; + +// tag::st_intersects-airports-results[] +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +// end::st_intersects-airports-results[] +; + +literalPointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_INTERSECTS(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointIntersectsLiteralPolygonOneRow#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW intersects = ST_INTERSECTS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +intersects:boolean +true +; + +cityInCityBoundary#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airport_city_boundaries +| EVAL in_city = ST_INTERSECTS(city_location, city_boundary) +| STATS count=COUNT(*) BY in_city +| SORT count ASC +| EVAL cardinality = CASE(count < 10, "very few", count < 100, "few", "many") +| KEEP cardinality, in_city +; + +cardinality:k | in_city:boolean +"few" | false +"many" | true +; + +cityNotInCityBoundaryBiggest#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airport_city_boundaries +| WHERE NOT ST_INTERSECTS(city_location, city_boundary) +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| SORT boundary_wkt_length DESC +| KEEP abbrev, airport, city, city_location, boundary_wkt_length, city_boundary +| LIMIT 1 +; + +abbrev:keyword | airport:text | city:keyword | city_location:geo_point | boundary_wkt_length:integer | city_boundary:geo_shape +SYX | Sanya Phoenix Int'l | Sanya | POINT(109.5036 18.2533) | 598 | POLYGON((109.1802 18.4609, 109.2304 18.4483, 109.2311 18.4261, 109.2696 18.411, 109.2602 18.3581, 109.2273 18.348, 109.2286 18.2638, 109.2842 18.2665, 109.3518 18.2166, 109.4508 18.1936, 109.4895 18.2281, 109.5137 18.2283, 109.4914 18.2781, 109.5041 18.2948, 109.4809 18.3034, 109.5029 18.3422, 109.5249 18.3375, 109.4993 18.3632, 109.535 18.4007, 109.5104 18.4374, 109.5231 18.4474, 109.5321 18.53, 109.4992 18.5568, 109.4192 18.5646, 109.4029 18.6302, 109.3286 18.5772, 109.309 18.5191, 109.2913 18.5141, 109.2434 18.5607, 109.2022 18.5572, 109.1815 18.5163, 109.1908 18.4711, 109.1802 18.4609))) +; + +airportCityLocationPointIntersection#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports_mp +| WHERE ST_INTERSECTS(location, city_location) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +XXX | Atlantis | POINT(0 0) | Atlantis | POINT(0 0) | Atlantis Int'l | 1 | mid +; + +airportCityLocationPointIntersectionCentroid#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports_mp +| WHERE ST_INTERSECTS(location, city_location) +| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +; + +location:geo_point | city_location:geo_point | count:long +POINT (0 0) | POINT (0 0) | 1 +; + geoPointEquals#[skip:-8.12.99, reason:spatial type geo_point improved in 8.13] // tag::to_geopoint-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] @@ -437,7 +610,9 @@ c:cartesian_point POINT(3949.163965353159 1078.2645465797348) ; -stXFromCartesianString#[skip:-8.13.99, reason:st_x and st_y added in 8.14] +stXFromCartesianString +required_feature: esql.st_x_y + ROW point = TO_CARTESIANPOINT("POINT(4297.10986328125 -1475.530029296875)") | EVAL x = ST_X(point), y = ST_Y(point) ; @@ -534,6 +709,37 @@ centroid:cartesian_point | count:long POINT (726480.0130685265 3359566.331716279) | 849 ; +cartesianCentroidFromAirportsAfterIntersectsPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + +cartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +literalCartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_INTERSECTS(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + cartesianPointEquals#[skip:-8.12.99, reason:spatial type cartesian_point improved in 8.13] // tag::to_cartesianpoint-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec index 7209812e0569c..69e56c7efe55d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -54,7 +54,7 @@ abbrev:keyword | name:text | location:geo_shape | cou "VLC" | "Valencia" | POINT(-0.473474930771676 39.4914597884489) | "Spain" | "Paterna" | POINT(-0.4406 39.5028) ; -simpleLoadFromCityBoundaries#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +simpleLoadFromCityBoundaries#[skip:-8.13.99, reason:chunked CSV import support added in 8.14] FROM airport_city_boundaries | WHERE abbrev == "CPH" | EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) @@ -66,8 +66,121 @@ abbrev:keyword | region:text | city_location:geo_point | airport:tex CPH | Københavns Kommune | POINT(12.5683 55.6761) | Copenhagen | 265 ; -geo_shapeEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +pointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| EVAL location = TO_GEOSHAPE(location) +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +| KEEP abbrev, name, location, country, city, city_location +; + +abbrev:keyword | name:text | location:geo_shape | country:keyword | city:keyword | city_location:geo_point +HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen | Al Ḩudaydah | POINT(42.9511 14.8022) +; + +polygonIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airport_city_boundaries +| WHERE ST_INTERSECTS(city_boundary, TO_GEOSHAPE("POLYGON((109.4 18.1, 109.6 18.1, 109.6 18.3, 109.4 18.3, 109.4 18.1))")) +| KEEP abbrev, airport, region, city, city_location +| LIMIT 1 +; + +abbrev:keyword | airport:text | region:text | city:keyword | city_location:geo_point +SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(109.5036 18.2533) +; + +pointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| EVAL location = TO_GEOSHAPE(location) +| WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) +| KEEP abbrev, name, location, country, city, city_location +; + +abbrev:keyword | name:text | location:geo_shape | country:keyword | city:keyword | city_location:geo_point +HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen | Al Ḩudaydah | POINT(42.9511 14.8022) +; + +literalPointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_INTERSECTS(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointAsShapeIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOSHAPE(wkt) +| WHERE ST_INTERSECTS(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_shape +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointAsShapeIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOSHAPE(wkt) +| WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; +wkt:keyword | pt:geo_shape +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +shapeIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM countries_bbox +| WHERE ST_INTERSECTS(shape, TO_GEOSHAPE("POLYGON((29 -30, 31 -30, 31 -27.3, 29 -27.3, 29 -30))")) +| SORT id DESC +; + +id:keyword | name:keyword | shape:geo_shape +ZAF | South Africa | BBOX(16.483327, 37.892218, -22.136391, -46.969727) +SWZ | Swaziland | BBOX(30.798336, 32.133400, -25.728336, -27.316391) +LSO | Lesotho | BBOX(27.013973, 29.455554, -28.570691, -30.650527) +; + +literalPolygonIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))", "POLYGON((20 60, 6 60, 6 66, 20 66, 20 60))"] +| EVAL other = TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))") +| MV_EXPAND wkt +| EVAL shape = TO_GEOSHAPE(wkt) +| WHERE ST_INTERSECTS(shape, other) +| KEEP wkt, shape, other +; + +wkt:keyword | shape:geo_shape | other:geo_shape +"POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))" | POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60)) | POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64)) +; + +literalPolygonIntersectsLiteralPolygonOneRow#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW intersects = ST_INTERSECTS(TO_GEOSHAPE("POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))"), TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))")) +; + +intersects:boolean +true +; + +geo_shapeEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt | EVAL pt = to_geoshape(wkt) @@ -162,6 +275,53 @@ abbrev:keyword | name:text | scalerank:integer | type:keyword | location:cart "VLC" | "Valencia" | 8 | "mid" | POINT(-52706.98819688343 4792315.469321795) ; +cartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports_web +| EVAL location = TO_CARTESIANSHAPE(location) +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +| KEEP abbrev, name, location, scalerank, type +; + +abbrev:keyword | name:text | location:cartesian_shape | scalerank:i | type:k +HOD | Hodeidah Int'l | POINT (4783520.559160681 1661010.0197476079) | 9 | mid +; + +literalCartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANSHAPE(wkt) +| WHERE ST_INTERSECTS(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_shape +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +cartesianShapeIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM countries_bbox_web +| WHERE ST_INTERSECTS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) +| SORT id DESC +; + +id:keyword | name:keyword | shape:cartesian_shape +ZAF | South Africa | BBOX(1834915.5679635953, 4218142.412200545, -2527908.4975596936, -5937134.146607068) +SWZ | Swaziland | BBOX(3428455.080322901, 3577073.7249586442, -2965472.9128583763, -3163056.5390926218) +LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117.2692412077, -3587446.106149188) +; + +literalCartesianPolygonIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))", "POLYGON((2000 6000, 600 6000, 600 6600, 2000 6600, 2000 6000))"] +| MV_EXPAND wkt +| EVAL shape = TO_CARTESIANSHAPE(wkt) +| EVAL other = TO_CARTESIANSHAPE("POLYGON((-1500 6400, -1000 6400, -1000 6600, -1500 6600, -1500 6400))") +| WHERE ST_INTERSECTS(shape, other) +; + +wkt:keyword | shape:cartesian_shape | other:cartesian_shape +"POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))" | POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000)) | POLYGON((-1500 6400, -1000 6400, -1000 6600, -1500 6600, -1500 6400)) +; + cartesianshapeEquals#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 4aff4c689c077..91c79e64b2385 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1217,3 +1217,108 @@ FROM airports c:l 891 ; + +countMV#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS vals = COUNT(salary_change.int) +; + +vals:l +183 +; + +sumOfConst#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s1 = sum(1), s2point1 = sum(2.1), s_mv = sum([-1, 0, 3]) * 3, s_null = sum(null), rows = count(*) +; + +s1:l | s2point1:d | s_mv:l | s_null:d | rows:l +100 | 210.0 | 600 | null | 100 +; + +sumOfConstGrouped#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s2point1 = round(sum(2.1), 1), s_mv = sum([-1, 0, 3]), rows = count(*) by languages +| SORT languages +; + +s2point1:d | s_mv:l | rows:l | languages:i +31.5 | 30 | 15 | 1 +39.9 | 38 | 19 | 2 +35.7 | 34 | 17 | 3 +37.8 | 36 | 18 | 4 +44.1 | 42 | 21 | 5 +21.0 | 20 | 10 | null +; + +avgOfConst#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s1 = avg(1), s_mv = avg([-1, 0, 3]) * 3, s_null = avg(null) +; + +s1:d | s_mv:d | s_null:d +1.0 | 2.0 | null +; + +avgOfConstGrouped#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s2point1 = avg(2.1), s_mv = avg([-1, 0, 3]) * 3 by languages +| SORT languages +; + +s2point1:d | s_mv:d | languages:i +2.1 | 2.0 | 1 +2.1 | 2.0 | 2 +2.1 | 2.0 | 3 +2.1 | 2.0 | 4 +2.1 | 2.0 | 5 +2.1 | 2.0 | null +; + +minOfConst#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s1 = min(1), s_mv = min([-1, 0, 3]), s_null = min(null) +; + +s1:i | s_mv:i | s_null:null +1 | -1 | null +; + +minOfConstGrouped#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s2point1 = min(2.1), s_mv = min([-1, 0, 3]) by languages +| SORT languages +; + +s2point1:d | s_mv:i | languages:i +2.1 | -1 | 1 +2.1 | -1 | 2 +2.1 | -1 | 3 +2.1 | -1 | 4 +2.1 | -1 | 5 +2.1 | -1 | null +; + +maxOfConst#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s1 = max(1), s_mv = max([-1, 0, 3]), s_null = max(null) +; + +s1:i | s_mv:i | s_null:null +1 | 3 | null +; + +maxOfConstGrouped#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s2point1 = max(2.1), s_mv = max([-1, 0, 3]) by languages +| SORT languages +; + +s2point1:d | s_mv:i | languages:i +2.1 | 3 | 1 +2.1 | 3 | 2 +2.1 | 3 | 3 +2.1 | 3 | 4 +2.1 | 3 | 5 +2.1 | 3 | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 5a44f16dd60af..d9c9e535c2c45 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -714,14 +714,18 @@ ROW a=[10, 9, 8] // end::mv_concat-to_string-result[] ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + row a = ["Mon", "Tues", "Wed", "Thu", "Fri"] | eval sa = mv_sort(a), sd = mv_sort(a, "DESC"); a:keyword | sa:keyword | sd:keyword ["Mon", "Tues", "Wed", "Thu", "Fri"] | [Fri, Mon, Thu, Tues, Wed] | [Wed, Tues, Thu, Mon, Fri] ; -mvSortEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSortEmp +required_feature: esql.mv_sort + FROM employees | eval sd = mv_sort(job_positions, "DESC"), sa = mv_sort(job_positions) | sort emp_no @@ -737,7 +741,9 @@ emp_no:integer | job_positions:keyword 10005 | null | null | null ; -mvSliceEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmp +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.keyword, 0, 1) | keep emp_no, salary_change.keyword, a1 @@ -752,7 +758,9 @@ emp_no:integer | salary_change.keyword:keyword | a1:keyword 10005 | [-2.14,13.07] | [-2.14,13.07] ; -mvZip#[skip:-8.13.99, reason:newly added in 8.14] +mvZip +required_feature: esql.mv_sort + // tag::mv_zip[] ROW a = ["x", "y", "z"], b = ["1", "2"] | EVAL c = mv_zip(a, b, "-") @@ -766,7 +774,9 @@ a:keyword | b:keyword | c:keyword // end::mv_zip-result[] ; -mvZipEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvZipEmp +required_feature: esql.mv_sort + from employees | eval full_name = mv_zip(first_name, last_name, " "), full_name_2 = mv_zip(last_name, first_name), jobs = mv_zip(job_positions, salary_change.keyword, "#") | keep emp_no, full_name, full_name_2, job_positions, salary_change.keyword, jobs @@ -1098,3 +1108,63 @@ row a = "π/2 + a + B + Λ ºC" | eval lower = to_lower(a), upper = to_upper(a) a:keyword | upper:keyword | lower:keyword π/2 + a + B + Λ ºC | Π/2 + A + B + Λ ºC | π/2 + a + b + λ ºc ; + +values +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS first_name=MV_SORT(VALUES(first_name)) +; + + first_name:keyword +[Anneke, Bezalel, Chirstian, Georgi, Kyoichi, Parto, Saniya, Sumant, Tzvetan] +; + +valuesGrouped +required_feature: esql.agg_values + +// tag::values-grouped[] + FROM employees +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS first_name=MV_SORT(VALUES(first_name)) BY first_letter +| SORT first_letter +// end::values-grouped[] +; + +// tag::values-grouped-result[] + first_name:keyword | first_letter:keyword + [Alejandro, Amabile, Anneke, Anoosh, Arumugam] | A + [Basil, Berhard, Berni, Bezalel, Bojan, Breannda, Brendon] | B + [Charlene, Chirstian, Claudi, Cristinel] | C + [Danel, Divier, Domenick, Duangkaew] | D + [Ebbe, Eberhardt, Erez] | E + Florian | F + [Gao, Georgi, Georgy, Gino, Guoxiang] | G + [Heping, Hidefumi, Hilari, Hironobu, Hironoby, Hisao] | H + [Jayson, Jungsoon] | J + [Kazuhide, Kazuhito, Kendra, Kenroku, Kshitij, Kwee, Kyoichi] | K + [Lillian, Lucien] | L + [Magy, Margareta, Mary, Mayuko, Mayumi, Mingsen, Mokhtar, Mona, Moss] | M + Otmar | O + [Parto, Parviz, Patricio, Prasadram, Premal] | P + [Ramzi, Remzi, Reuven] | R +[Sailaja, Saniya, Sanjiv, Satosi, Shahaf, Shir, Somnath, Sreekrishna, Sudharsan, Sumant, Suzette] | S + [Tse, Tuval, Tzvetan] | T + [Udi, Uri] | U + [Valdiodio, Valter, Vishv] | V + Weiyi | W + Xinglin | X + [Yinghua, Yishay, Yongqiao] | Y + [Zhongwei, Zvonko] | Z + null | null +// end::values-grouped-result[] +; + + +splitBasedOnField +from employees | where emp_no == 10001 | eval split = split("fooMbar", gender) | keep gender, split; + +gender:keyword | split:keyword +M | [foo, bar] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec index df1fa6e67f279..c5e42186d976f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec @@ -310,3 +310,63 @@ v:v | version:v |version_text:s | id:i | m:i | g:v | i:v | c null | null | null | 13 | 0 | 1.3.0 | 0.1 | none null | null | null | 11 | 0 | 1.3.0 | 0.1 | none ; + +values +required_feature: esql.agg_values + + FROM apps +| STATS version=MV_SORT(VALUES(version)) +; + + version:version +[1, 1.2.3.4, 1.11.0, 2.1, 2.3.4, 2.12.0, 5.2.9-SNAPSHOT, 5.2.9, bad] +; + +valuesGrouped +required_feature: esql.agg_values + + FROM apps +| EVAL name=SUBSTRING(name, 0, 1) +| STATS version=MV_SORT(VALUES(version)) BY name +| SORT name +; + +version:version | name:keyword + [1, 1.2.3.4] | a + 2.1 | b + 2.3.4 | c + 2.12.0 | d + 1.11.0 | e + 5.2.9 | f + 5.2.9-SNAPSHOT | g + 1.2.3.4 | h + bad | i + 5.2.9 | j + null | k + null | l + 5.2.9 | m +; + +valuesGroupedByOrdinals +required_feature: esql.agg_values + + FROM apps +| STATS version=MV_SORT(VALUES(version)) BY name +| SORT name +; + +version:version | name:keyword + [1, 1.2.3.4] | aaaaa + 2.1 | bbbbb + 2.3.4 | ccccc + 2.12.0 | ddddd + 1.11.0 | eeeee + 5.2.9 | fffff + 5.2.9-SNAPSHOT | ggggg + 1.2.3.4 | hhhhh + bad | iiiii + 5.2.9 | jjjjj + null | kkkkk + null | lllll + 5.2.9 | mmmmm +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 115ae54833e14..17082e9855761 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -1038,8 +1038,8 @@ public void testShowInfo() { } } - public void testShowFunctions() { - try (EsqlQueryResponse results = run("show functions")) { + public void testMetaFunctions() { + try (EsqlQueryResponse results = run("meta functions")) { assertThat( results.columns(), equalTo( diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 3728eb624aaa0..23fa3f862a3ff 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -367,6 +367,9 @@ protected void doRun() throws Exception { request.pragmas(randomPragmas()); PlainActionFuture future = new PlainActionFuture<>(); client.execute(EsqlQueryAction.INSTANCE, request, future); + ExchangeService exchangeService = internalCluster().getInstance(ExchangeService.class, dataNode); + boolean waitedForPages; + final String sessionId; try { List foundTasks = new ArrayList<>(); assertBusy(() -> { @@ -380,12 +383,12 @@ protected void doRun() throws Exception { assertThat(tasks, hasSize(1)); foundTasks.addAll(tasks); }); - String sessionId = foundTasks.get(0).taskId().toString(); - ExchangeService exchangeService = internalCluster().getInstance(ExchangeService.class, dataNode); + sessionId = foundTasks.get(0).taskId().toString(); assertTrue(fetchingStarted.await(1, TimeUnit.MINUTES)); ExchangeSinkHandler exchangeSink = exchangeService.getSinkHandler(sessionId); - if (randomBoolean()) { - // do not fail exchange requests when we have some pages + waitedForPages = randomBoolean(); + if (waitedForPages) { + // do not fail exchange requests until we have some pages assertBusy(() -> assertThat(exchangeSink.bufferSize(), greaterThan(0))); } } finally { @@ -393,6 +396,12 @@ protected void doRun() throws Exception { } Exception failure = expectThrows(Exception.class, () -> future.actionGet().close()); assertThat(failure.getMessage(), containsString("failed to fetch pages")); + // If we proceed without waiting for pages, we might cancel the main request before starting the data-node request. + // As a result, the exchange sinks on data-nodes won't be removed until the inactive_timeout elapses, which is + // longer than the assertBusy timeout. + if (waitedForPages == false) { + exchangeService.finishSinkHandler(sessionId, failure); + } } finally { transportService.clearAllRules(); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java index 9aeeb10da2ad9..406361438fc42 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java @@ -16,7 +16,7 @@ public class TimeSeriesIT extends AbstractEsqlIntegTestCase { @Override protected EsqlQueryResponse run(EsqlQueryRequest request) { - assertTrue("timseries requires pragmas", canUseQueryPragmas()); + assumeTrue("timseries requires pragmas", canUseQueryPragmas()); var settings = Settings.builder().put(request.pragmas().getSettings()).put(QueryPragmas.TIME_SERIES_MODE.getKey(), "true").build(); request.pragmas(new QueryPragmas(settings)); return super.run(request); diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index ee2d449b21184..bc21a60a76ed8 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -10,6 +10,7 @@ GROK : 'grok' -> pushMode(EXPRESSION_MODE); INLINESTATS : 'inlinestats' -> pushMode(EXPRESSION_MODE); KEEP : 'keep' -> pushMode(PROJECT_MODE); LIMIT : 'limit' -> pushMode(EXPRESSION_MODE); +META : 'meta' -> pushMode(META_MODE); MV_EXPAND : 'mv_expand' -> pushMode(MVEXPAND_MODE); RENAME : 'rename' -> pushMode(RENAME_MODE); ROW : 'row' -> pushMode(EXPRESSION_MODE); @@ -364,13 +365,12 @@ MVEXPAND_WS ; // -// SHOW INFO +// SHOW commands // mode SHOW_MODE; SHOW_PIPE : PIPE -> type(PIPE), popMode; INFO : 'info'; -FUNCTIONS : 'functions'; SHOW_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) @@ -384,6 +384,26 @@ SHOW_WS : WS -> channel(HIDDEN) ; +// +// META commands +// +mode META_MODE; +META_PIPE : PIPE -> type(PIPE), popMode; + +FUNCTIONS : 'functions'; + +META_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +META_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +META_WS + : WS -> channel(HIDDEN) + ; + mode SETTING_MODE; SETTING_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET), popMode; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 4bf3584737d1d..5edc646fad10e 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -8,100 +8,104 @@ GROK=7 INLINESTATS=8 KEEP=9 LIMIT=10 -MV_EXPAND=11 -RENAME=12 -ROW=13 -SHOW=14 -SORT=15 -STATS=16 -WHERE=17 -UNKNOWN_CMD=18 -LINE_COMMENT=19 -MULTILINE_COMMENT=20 -WS=21 -EXPLAIN_WS=22 -EXPLAIN_LINE_COMMENT=23 -EXPLAIN_MULTILINE_COMMENT=24 -PIPE=25 -STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -COMMA=33 -DESC=34 -DOT=35 -FALSE=36 -FIRST=37 -LAST=38 -LP=39 -IN=40 -IS=41 -LIKE=42 -NOT=43 -NULL=44 -NULLS=45 -OR=46 -PARAM=47 -RLIKE=48 -RP=49 -TRUE=50 -EQ=51 -CIEQ=52 -NEQ=53 -LT=54 -LTE=55 -GT=56 -GTE=57 -PLUS=58 -MINUS=59 -ASTERISK=60 -SLASH=61 -PERCENT=62 -OPENING_BRACKET=63 -CLOSING_BRACKET=64 -UNQUOTED_IDENTIFIER=65 -QUOTED_IDENTIFIER=66 -EXPR_LINE_COMMENT=67 -EXPR_MULTILINE_COMMENT=68 -EXPR_WS=69 -METADATA=70 -FROM_UNQUOTED_IDENTIFIER=71 -FROM_LINE_COMMENT=72 -FROM_MULTILINE_COMMENT=73 -FROM_WS=74 -ID_PATTERN=75 -PROJECT_LINE_COMMENT=76 -PROJECT_MULTILINE_COMMENT=77 -PROJECT_WS=78 -AS=79 -RENAME_LINE_COMMENT=80 -RENAME_MULTILINE_COMMENT=81 -RENAME_WS=82 -ON=83 -WITH=84 -ENRICH_POLICY_NAME=85 -ENRICH_LINE_COMMENT=86 -ENRICH_MULTILINE_COMMENT=87 -ENRICH_WS=88 -ENRICH_FIELD_LINE_COMMENT=89 -ENRICH_FIELD_MULTILINE_COMMENT=90 -ENRICH_FIELD_WS=91 -MVEXPAND_LINE_COMMENT=92 -MVEXPAND_MULTILINE_COMMENT=93 -MVEXPAND_WS=94 -INFO=95 -FUNCTIONS=96 +META=11 +MV_EXPAND=12 +RENAME=13 +ROW=14 +SHOW=15 +SORT=16 +STATS=17 +WHERE=18 +UNKNOWN_CMD=19 +LINE_COMMENT=20 +MULTILINE_COMMENT=21 +WS=22 +EXPLAIN_WS=23 +EXPLAIN_LINE_COMMENT=24 +EXPLAIN_MULTILINE_COMMENT=25 +PIPE=26 +STRING=27 +INTEGER_LITERAL=28 +DECIMAL_LITERAL=29 +BY=30 +AND=31 +ASC=32 +ASSIGN=33 +COMMA=34 +DESC=35 +DOT=36 +FALSE=37 +FIRST=38 +LAST=39 +LP=40 +IN=41 +IS=42 +LIKE=43 +NOT=44 +NULL=45 +NULLS=46 +OR=47 +PARAM=48 +RLIKE=49 +RP=50 +TRUE=51 +EQ=52 +CIEQ=53 +NEQ=54 +LT=55 +LTE=56 +GT=57 +GTE=58 +PLUS=59 +MINUS=60 +ASTERISK=61 +SLASH=62 +PERCENT=63 +OPENING_BRACKET=64 +CLOSING_BRACKET=65 +UNQUOTED_IDENTIFIER=66 +QUOTED_IDENTIFIER=67 +EXPR_LINE_COMMENT=68 +EXPR_MULTILINE_COMMENT=69 +EXPR_WS=70 +METADATA=71 +FROM_UNQUOTED_IDENTIFIER=72 +FROM_LINE_COMMENT=73 +FROM_MULTILINE_COMMENT=74 +FROM_WS=75 +ID_PATTERN=76 +PROJECT_LINE_COMMENT=77 +PROJECT_MULTILINE_COMMENT=78 +PROJECT_WS=79 +AS=80 +RENAME_LINE_COMMENT=81 +RENAME_MULTILINE_COMMENT=82 +RENAME_WS=83 +ON=84 +WITH=85 +ENRICH_POLICY_NAME=86 +ENRICH_LINE_COMMENT=87 +ENRICH_MULTILINE_COMMENT=88 +ENRICH_WS=89 +ENRICH_FIELD_LINE_COMMENT=90 +ENRICH_FIELD_MULTILINE_COMMENT=91 +ENRICH_FIELD_WS=92 +MVEXPAND_LINE_COMMENT=93 +MVEXPAND_MULTILINE_COMMENT=94 +MVEXPAND_WS=95 +INFO=96 SHOW_LINE_COMMENT=97 SHOW_MULTILINE_COMMENT=98 SHOW_WS=99 -COLON=100 -SETTING=101 -SETTING_LINE_COMMENT=102 -SETTTING_MULTILINE_COMMENT=103 -SETTING_WS=104 +FUNCTIONS=100 +META_LINE_COMMENT=101 +META_MULTILINE_COMMENT=102 +META_WS=103 +COLON=104 +SETTING=105 +SETTING_LINE_COMMENT=106 +SETTTING_MULTILINE_COMMENT=107 +SETTING_WS=108 'dissect'=1 'drop'=2 'enrich'=3 @@ -112,53 +116,54 @@ SETTING_WS=104 'inlinestats'=8 'keep'=9 'limit'=10 -'mv_expand'=11 -'rename'=12 -'row'=13 -'show'=14 -'sort'=15 -'stats'=16 -'where'=17 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -','=33 -'desc'=34 -'.'=35 -'false'=36 -'first'=37 -'last'=38 -'('=39 -'in'=40 -'is'=41 -'like'=42 -'not'=43 -'null'=44 -'nulls'=45 -'or'=46 -'?'=47 -'rlike'=48 -')'=49 -'true'=50 -'=='=51 -'=~'=52 -'!='=53 -'<'=54 -'<='=55 -'>'=56 -'>='=57 -'+'=58 -'-'=59 -'*'=60 -'/'=61 -'%'=62 -']'=64 -'metadata'=70 -'as'=79 -'on'=83 -'with'=84 -'info'=95 -'functions'=96 -':'=100 +'meta'=11 +'mv_expand'=12 +'rename'=13 +'row'=14 +'show'=15 +'sort'=16 +'stats'=17 +'where'=18 +'|'=26 +'by'=30 +'and'=31 +'asc'=32 +'='=33 +','=34 +'desc'=35 +'.'=36 +'false'=37 +'first'=38 +'last'=39 +'('=40 +'in'=41 +'is'=42 +'like'=43 +'not'=44 +'null'=45 +'nulls'=46 +'or'=47 +'?'=48 +'rlike'=49 +')'=50 +'true'=51 +'=='=52 +'=~'=53 +'!='=54 +'<'=55 +'<='=56 +'>'=57 +'>='=58 +'+'=59 +'-'=60 +'*'=61 +'/'=62 +'%'=63 +']'=65 +'metadata'=71 +'as'=80 +'on'=84 +'with'=85 +'info'=96 +'functions'=100 +':'=104 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index b34b9bb103b83..a7d0097b6aec8 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -24,6 +24,7 @@ sourceCommand | fromCommand | rowCommand | showCommand + | metaCommand ; processingCommand @@ -244,7 +245,10 @@ subqueryExpression showCommand : SHOW INFO #showInfo - | SHOW FUNCTIONS #showFunctions + ; + +metaCommand + : META FUNCTIONS #metaFunctions ; enrichCommand diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 4bf3584737d1d..5edc646fad10e 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -8,100 +8,104 @@ GROK=7 INLINESTATS=8 KEEP=9 LIMIT=10 -MV_EXPAND=11 -RENAME=12 -ROW=13 -SHOW=14 -SORT=15 -STATS=16 -WHERE=17 -UNKNOWN_CMD=18 -LINE_COMMENT=19 -MULTILINE_COMMENT=20 -WS=21 -EXPLAIN_WS=22 -EXPLAIN_LINE_COMMENT=23 -EXPLAIN_MULTILINE_COMMENT=24 -PIPE=25 -STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -COMMA=33 -DESC=34 -DOT=35 -FALSE=36 -FIRST=37 -LAST=38 -LP=39 -IN=40 -IS=41 -LIKE=42 -NOT=43 -NULL=44 -NULLS=45 -OR=46 -PARAM=47 -RLIKE=48 -RP=49 -TRUE=50 -EQ=51 -CIEQ=52 -NEQ=53 -LT=54 -LTE=55 -GT=56 -GTE=57 -PLUS=58 -MINUS=59 -ASTERISK=60 -SLASH=61 -PERCENT=62 -OPENING_BRACKET=63 -CLOSING_BRACKET=64 -UNQUOTED_IDENTIFIER=65 -QUOTED_IDENTIFIER=66 -EXPR_LINE_COMMENT=67 -EXPR_MULTILINE_COMMENT=68 -EXPR_WS=69 -METADATA=70 -FROM_UNQUOTED_IDENTIFIER=71 -FROM_LINE_COMMENT=72 -FROM_MULTILINE_COMMENT=73 -FROM_WS=74 -ID_PATTERN=75 -PROJECT_LINE_COMMENT=76 -PROJECT_MULTILINE_COMMENT=77 -PROJECT_WS=78 -AS=79 -RENAME_LINE_COMMENT=80 -RENAME_MULTILINE_COMMENT=81 -RENAME_WS=82 -ON=83 -WITH=84 -ENRICH_POLICY_NAME=85 -ENRICH_LINE_COMMENT=86 -ENRICH_MULTILINE_COMMENT=87 -ENRICH_WS=88 -ENRICH_FIELD_LINE_COMMENT=89 -ENRICH_FIELD_MULTILINE_COMMENT=90 -ENRICH_FIELD_WS=91 -MVEXPAND_LINE_COMMENT=92 -MVEXPAND_MULTILINE_COMMENT=93 -MVEXPAND_WS=94 -INFO=95 -FUNCTIONS=96 +META=11 +MV_EXPAND=12 +RENAME=13 +ROW=14 +SHOW=15 +SORT=16 +STATS=17 +WHERE=18 +UNKNOWN_CMD=19 +LINE_COMMENT=20 +MULTILINE_COMMENT=21 +WS=22 +EXPLAIN_WS=23 +EXPLAIN_LINE_COMMENT=24 +EXPLAIN_MULTILINE_COMMENT=25 +PIPE=26 +STRING=27 +INTEGER_LITERAL=28 +DECIMAL_LITERAL=29 +BY=30 +AND=31 +ASC=32 +ASSIGN=33 +COMMA=34 +DESC=35 +DOT=36 +FALSE=37 +FIRST=38 +LAST=39 +LP=40 +IN=41 +IS=42 +LIKE=43 +NOT=44 +NULL=45 +NULLS=46 +OR=47 +PARAM=48 +RLIKE=49 +RP=50 +TRUE=51 +EQ=52 +CIEQ=53 +NEQ=54 +LT=55 +LTE=56 +GT=57 +GTE=58 +PLUS=59 +MINUS=60 +ASTERISK=61 +SLASH=62 +PERCENT=63 +OPENING_BRACKET=64 +CLOSING_BRACKET=65 +UNQUOTED_IDENTIFIER=66 +QUOTED_IDENTIFIER=67 +EXPR_LINE_COMMENT=68 +EXPR_MULTILINE_COMMENT=69 +EXPR_WS=70 +METADATA=71 +FROM_UNQUOTED_IDENTIFIER=72 +FROM_LINE_COMMENT=73 +FROM_MULTILINE_COMMENT=74 +FROM_WS=75 +ID_PATTERN=76 +PROJECT_LINE_COMMENT=77 +PROJECT_MULTILINE_COMMENT=78 +PROJECT_WS=79 +AS=80 +RENAME_LINE_COMMENT=81 +RENAME_MULTILINE_COMMENT=82 +RENAME_WS=83 +ON=84 +WITH=85 +ENRICH_POLICY_NAME=86 +ENRICH_LINE_COMMENT=87 +ENRICH_MULTILINE_COMMENT=88 +ENRICH_WS=89 +ENRICH_FIELD_LINE_COMMENT=90 +ENRICH_FIELD_MULTILINE_COMMENT=91 +ENRICH_FIELD_WS=92 +MVEXPAND_LINE_COMMENT=93 +MVEXPAND_MULTILINE_COMMENT=94 +MVEXPAND_WS=95 +INFO=96 SHOW_LINE_COMMENT=97 SHOW_MULTILINE_COMMENT=98 SHOW_WS=99 -COLON=100 -SETTING=101 -SETTING_LINE_COMMENT=102 -SETTTING_MULTILINE_COMMENT=103 -SETTING_WS=104 +FUNCTIONS=100 +META_LINE_COMMENT=101 +META_MULTILINE_COMMENT=102 +META_WS=103 +COLON=104 +SETTING=105 +SETTING_LINE_COMMENT=106 +SETTTING_MULTILINE_COMMENT=107 +SETTING_WS=108 'dissect'=1 'drop'=2 'enrich'=3 @@ -112,53 +116,54 @@ SETTING_WS=104 'inlinestats'=8 'keep'=9 'limit'=10 -'mv_expand'=11 -'rename'=12 -'row'=13 -'show'=14 -'sort'=15 -'stats'=16 -'where'=17 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -','=33 -'desc'=34 -'.'=35 -'false'=36 -'first'=37 -'last'=38 -'('=39 -'in'=40 -'is'=41 -'like'=42 -'not'=43 -'null'=44 -'nulls'=45 -'or'=46 -'?'=47 -'rlike'=48 -')'=49 -'true'=50 -'=='=51 -'=~'=52 -'!='=53 -'<'=54 -'<='=55 -'>'=56 -'>='=57 -'+'=58 -'-'=59 -'*'=60 -'/'=61 -'%'=62 -']'=64 -'metadata'=70 -'as'=79 -'on'=83 -'with'=84 -'info'=95 -'functions'=96 -':'=100 +'meta'=11 +'mv_expand'=12 +'rename'=13 +'row'=14 +'show'=15 +'sort'=16 +'stats'=17 +'where'=18 +'|'=26 +'by'=30 +'and'=31 +'asc'=32 +'='=33 +','=34 +'desc'=35 +'.'=36 +'false'=37 +'first'=38 +'last'=39 +'('=40 +'in'=41 +'is'=42 +'like'=43 +'not'=44 +'null'=45 +'nulls'=46 +'or'=47 +'?'=48 +'rlike'=49 +')'=50 +'true'=51 +'=='=52 +'=~'=53 +'!='=54 +'<'=55 +'<='=56 +'>'=57 +'>='=58 +'+'=59 +'-'=60 +'*'=61 +'/'=62 +'%'=63 +']'=65 +'metadata'=71 +'as'=80 +'on'=84 +'with'=85 +'info'=96 +'functions'=100 +':'=104 diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java index b1fc80b9260ad..209b15ef21a2f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java @@ -4,7 +4,6 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.convert; -import java.lang.NumberFormatException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -15,6 +14,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -40,7 +40,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0, scratchPad), positionCount); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -49,7 +49,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendDouble(evalValue(vector, p, scratchPad)); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -84,7 +84,7 @@ public Block evalBlock(Block b) { } builder.appendDouble(value); valuesAppended = true; - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java index 600fa293394f9..ef91bf890cd23 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java @@ -4,7 +4,6 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.convert; -import java.lang.NumberFormatException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -41,7 +40,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0, scratchPad), positionCount); - } catch (InvalidArgumentException | NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -50,7 +49,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendInt(evalValue(vector, p, scratchPad)); - } catch (InvalidArgumentException | NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -85,7 +84,7 @@ public Block evalBlock(Block b) { } builder.appendInt(value); valuesAppended = true; - } catch (InvalidArgumentException | NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java index e0eca6b6bcbff..0d7a2cb9d7459 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java @@ -4,7 +4,6 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.convert; -import java.lang.NumberFormatException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -15,6 +14,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -40,7 +40,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -49,7 +49,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendLong(evalValue(vector, p, scratchPad)); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -84,7 +84,7 @@ public Block evalBlock(Block b) { } builder.appendLong(value); valuesAppended = true; - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java deleted file mode 100644 index 1232e0dda7c0f..0000000000000 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.math; - -import java.lang.ArithmeticException; -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.expression.function.Warnings; -import org.elasticsearch.xpack.ql.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Pow}. - * This class is generated. Do not edit it. - */ -public final class PowIntEvaluator implements EvalOperator.ExpressionEvaluator { - private final Warnings warnings; - - private final EvalOperator.ExpressionEvaluator base; - - private final EvalOperator.ExpressionEvaluator exponent; - - private final DriverContext driverContext; - - public PowIntEvaluator(Source source, EvalOperator.ExpressionEvaluator base, - EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { - this.warnings = new Warnings(source); - this.base = base; - this.exponent = exponent; - this.driverContext = driverContext; - } - - @Override - public Block.Ref eval(Page page) { - try (Block.Ref baseRef = base.eval(page)) { - DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); - try (Block.Ref exponentRef = exponent.eval(page)) { - DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); - DoubleVector baseVector = baseBlock.asVector(); - if (baseVector == null) { - return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); - } - DoubleVector exponentVector = exponentBlock.asVector(); - if (exponentVector == null) { - return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); - } - return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); - } - } - } - - public IntBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - if (baseBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (baseBlock.getValueCount(p) != 1) { - if (baseBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (exponentBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (exponentBlock.getValueCount(p) != 1) { - if (exponentBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendInt(Pow.processInt(baseBlock.getDouble(baseBlock.getFirstValueIndex(p)), exponentBlock.getDouble(exponentBlock.getFirstValueIndex(p)))); - } catch (ArithmeticException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public IntBlock eval(int positionCount, DoubleVector baseVector, DoubleVector exponentVector) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendInt(Pow.processInt(baseVector.getDouble(p), exponentVector.getDouble(p))); - } catch (ArithmeticException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "PowIntEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(base, exponent); - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory base; - - private final EvalOperator.ExpressionEvaluator.Factory exponent; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, - EvalOperator.ExpressionEvaluator.Factory exponent) { - this.source = source; - this.base = base; - this.exponent = exponent; - } - - @Override - public PowIntEvaluator get(DriverContext context) { - return new PowIntEvaluator(source, base.get(context), exponent.get(context), context); - } - - @Override - public String toString() { - return "PowIntEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java deleted file mode 100644 index bd2e5f5e10ec2..0000000000000 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.math; - -import java.lang.ArithmeticException; -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.expression.function.Warnings; -import org.elasticsearch.xpack.ql.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Pow}. - * This class is generated. Do not edit it. - */ -public final class PowLongEvaluator implements EvalOperator.ExpressionEvaluator { - private final Warnings warnings; - - private final EvalOperator.ExpressionEvaluator base; - - private final EvalOperator.ExpressionEvaluator exponent; - - private final DriverContext driverContext; - - public PowLongEvaluator(Source source, EvalOperator.ExpressionEvaluator base, - EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { - this.warnings = new Warnings(source); - this.base = base; - this.exponent = exponent; - this.driverContext = driverContext; - } - - @Override - public Block.Ref eval(Page page) { - try (Block.Ref baseRef = base.eval(page)) { - DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); - try (Block.Ref exponentRef = exponent.eval(page)) { - DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); - DoubleVector baseVector = baseBlock.asVector(); - if (baseVector == null) { - return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); - } - DoubleVector exponentVector = exponentBlock.asVector(); - if (exponentVector == null) { - return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); - } - return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); - } - } - } - - public LongBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { - try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - if (baseBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (baseBlock.getValueCount(p) != 1) { - if (baseBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (exponentBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (exponentBlock.getValueCount(p) != 1) { - if (exponentBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendLong(Pow.processLong(baseBlock.getDouble(baseBlock.getFirstValueIndex(p)), exponentBlock.getDouble(exponentBlock.getFirstValueIndex(p)))); - } catch (ArithmeticException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public LongBlock eval(int positionCount, DoubleVector baseVector, DoubleVector exponentVector) { - try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendLong(Pow.processLong(baseVector.getDouble(p), exponentVector.getDouble(p))); - } catch (ArithmeticException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "PowLongEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(base, exponent); - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory base; - - private final EvalOperator.ExpressionEvaluator.Factory exponent; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, - EvalOperator.ExpressionEvaluator.Factory exponent) { - this.source = source; - this.base = base; - this.exponent = exponent; - } - - @Override - public PowLongEvaluator get(DriverContext context) { - return new PowLongEvaluator(source, base.get(context), exponent.get(context), context); - } - - @Override - public String toString() { - return "PowLongEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..e32357c42bf71 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..7bf47b766bd95 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector).asBlock(); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..979869dc86c56 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsCartesianSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsCartesianSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialIntersectsCartesianSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..6c47745d6af37 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsCartesianSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processCartesianSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processCartesianSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsCartesianSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialIntersectsCartesianSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..8d87884d04077 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsGeoPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsGeoPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialIntersectsGeoPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialIntersectsGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..45e9daf5bc453 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,151 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsGeoPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsGeoPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialIntersectsGeoPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialIntersectsGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..f043ff4104bbb --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsGeoSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsGeoSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialIntersectsGeoSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialIntersectsGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..9f5f1c7cc9674 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsGeoSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processGeoSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processGeoSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsGeoSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialIntersectsGeoSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialIntersectsGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java similarity index 72% rename from x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java rename to x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java index bdb8bfd0f613a..fb95bbc1acef9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java @@ -2,13 +2,13 @@ // or more contributor license agreements. Licensed under the Elastic License // 2.0; you may not use this file except in compliance with the Elastic License // 2.0. -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; +package org.elasticsearch.xpack.esql.expression.function.scalar.string; import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -22,22 +22,25 @@ import org.elasticsearch.xpack.ql.tree.Source; /** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RegexMatch}. + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link AutomataMatch}. * This class is generated. Do not edit it. */ -public final class RegexMatchEvaluator implements EvalOperator.ExpressionEvaluator { +public final class AutomataMatchEvaluator implements EvalOperator.ExpressionEvaluator { private final Warnings warnings; private final EvalOperator.ExpressionEvaluator input; - private final CharacterRunAutomaton pattern; + private final ByteRunAutomaton automaton; + + private final String pattern; private final DriverContext driverContext; - public RegexMatchEvaluator(Source source, EvalOperator.ExpressionEvaluator input, - CharacterRunAutomaton pattern, DriverContext driverContext) { + public AutomataMatchEvaluator(Source source, EvalOperator.ExpressionEvaluator input, + ByteRunAutomaton automaton, String pattern, DriverContext driverContext) { this.warnings = new Warnings(source); this.input = input; + this.automaton = automaton; this.pattern = pattern; this.driverContext = driverContext; } @@ -68,7 +71,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock inputBlock) { result.appendNull(); continue position; } - result.appendBoolean(RegexMatch.process(inputBlock.getBytesRef(inputBlock.getFirstValueIndex(p), inputScratch), pattern)); + result.appendBoolean(AutomataMatch.process(inputBlock.getBytesRef(inputBlock.getFirstValueIndex(p), inputScratch), automaton, pattern)); } return result.build(); } @@ -78,7 +81,7 @@ public BooleanVector eval(int positionCount, BytesRefVector inputVector) { try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { BytesRef inputScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(RegexMatch.process(inputVector.getBytesRef(p, inputScratch), pattern)); + result.appendBoolean(AutomataMatch.process(inputVector.getBytesRef(p, inputScratch), automaton, pattern)); } return result.build(); } @@ -86,7 +89,7 @@ public BooleanVector eval(int positionCount, BytesRefVector inputVector) { @Override public String toString() { - return "RegexMatchEvaluator[" + "input=" + input + ", pattern=" + pattern + "]"; + return "AutomataMatchEvaluator[" + "input=" + input + ", pattern=" + pattern + "]"; } @Override @@ -99,23 +102,26 @@ static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory input; - private final CharacterRunAutomaton pattern; + private final ByteRunAutomaton automaton; + + private final String pattern; public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory input, - CharacterRunAutomaton pattern) { + ByteRunAutomaton automaton, String pattern) { this.source = source; this.input = input; + this.automaton = automaton; this.pattern = pattern; } @Override - public RegexMatchEvaluator get(DriverContext context) { - return new RegexMatchEvaluator(source, input.get(context), pattern, context); + public AutomataMatchEvaluator get(DriverContext context) { + return new AutomataMatchEvaluator(source, input.get(context), automaton, pattern, context); } @Override public String toString() { - return "RegexMatchEvaluator[" + "input=" + input + ", pattern=" + pattern + "]"; + return "AutomataMatchEvaluator[" + "input=" + input + ", pattern=" + pattern + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java index 176b89f80c910..5488efda7834f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java @@ -17,21 +17,20 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.lucene.UnsupportedValueSource; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.esql.action.ColumnInfo; -import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; -import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.spatialToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; abstract class PositionToXContent { protected final Block block; @@ -109,7 +108,7 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { BytesRef val = ((BytesRefBlock) block).getBytesRef(valueIndex, scratch); - return builder.value(DocValueFormat.IP.format(val)); + return builder.value(ipToString(val)); } }; case "date" -> new PositionToXContent(block) { @@ -117,21 +116,14 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { long longVal = ((LongBlock) block).getLong(valueIndex); - return builder.value(UTC_DATE_TIME_FORMATTER.formatMillis(longVal)); + return builder.value(dateTimeToString(longVal)); } }; - case "geo_point", "geo_shape" -> new PositionToXContent(block) { + case "geo_point", "geo_shape", "cartesian_point", "cartesian_shape" -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { - return builder.value(GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); - } - }; - case "cartesian_point", "cartesian_shape" -> new PositionToXContent(block) { - @Override - protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) - throws IOException { - return builder.value(CARTESIAN.wkbToWkt(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); + return builder.value(spatialToString(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); } }; case "boolean" -> new PositionToXContent(block) { @@ -146,7 +138,7 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { BytesRef val = ((BytesRefBlock) block).getBytesRef(valueIndex, scratch); - return builder.value(new Version(val).toString()); + return builder.value(versionToString(val)); } }; case "null" -> new PositionToXContent(block) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index f407d5c4b5e6e..f467512fd6c0b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -21,7 +21,6 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.UnsupportedValueSource; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -30,7 +29,6 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; -import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; import java.io.UncheckedIOException; @@ -39,12 +37,16 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.spatialToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToIP; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToVersion; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; -import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; /** * Collection of static utility methods for helping transform response data between pages and values. @@ -127,16 +129,17 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef case "keyword", "text" -> ((BytesRefBlock) block).getBytesRef(offset, scratch).utf8ToString(); case "ip" -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); - yield DocValueFormat.IP.format(val); + yield ipToString(val); } case "date" -> { long longVal = ((LongBlock) block).getLong(offset); - yield UTC_DATE_TIME_FORMATTER.formatMillis(longVal); + yield dateTimeToString(longVal); } case "boolean" -> ((BooleanBlock) block).getBoolean(offset); - case "version" -> new Version(((BytesRefBlock) block).getBytesRef(offset, scratch)).toString(); - case "geo_point", "geo_shape" -> GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); - case "cartesian_point", "cartesian_shape" -> CARTESIAN.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "version" -> versionToString(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "geo_point", "geo_shape", "cartesian_point", "cartesian_shape" -> spatialToString( + ((BytesRefBlock) block).getBytesRef(offset, scratch) + ); case "unsupported" -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; case "_source" -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); @@ -168,21 +171,23 @@ static Page valuesToPage(BlockFactory blockFactory, List columns, Li var builder = results.get(c); var value = row.get(c); switch (dataTypes.get(c)) { - case "unsigned_long" -> ((LongBlock.Builder) builder).appendLong(asLongUnsigned(((Number) value).longValue())); + case "unsigned_long" -> ((LongBlock.Builder) builder).appendLong( + longToUnsignedLong(((Number) value).longValue(), true) + ); case "long" -> ((LongBlock.Builder) builder).appendLong(((Number) value).longValue()); case "integer" -> ((IntBlock.Builder) builder).appendInt(((Number) value).intValue()); case "double" -> ((DoubleBlock.Builder) builder).appendDouble(((Number) value).doubleValue()); case "keyword", "text", "unsupported" -> ((BytesRefBlock.Builder) builder).appendBytesRef( new BytesRef(value.toString()) ); - case "ip" -> ((BytesRefBlock.Builder) builder).appendBytesRef(parseIP(value.toString())); + case "ip" -> ((BytesRefBlock.Builder) builder).appendBytesRef(stringToIP(value.toString())); case "date" -> { - long longVal = UTC_DATE_TIME_FORMATTER.parseMillis(value.toString()); + long longVal = dateTimeToLong(value.toString()); ((LongBlock.Builder) builder).appendLong(longVal); } case "boolean" -> ((BooleanBlock.Builder) builder).appendBoolean(((Boolean) value)); case "null" -> builder.appendNull(); - case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(value.toString()).toBytesRef()); + case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(stringToVersion(new BytesRef(value.toString()))); case "_source" -> { @SuppressWarnings("unchecked") Map o = (Map) value; @@ -195,14 +200,9 @@ static Page valuesToPage(BlockFactory blockFactory, List columns, Li throw new UncheckedIOException(e); } } - case "geo_point", "geo_shape" -> { - // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here - BytesRef wkb = GEO.wktToWkb(value.toString()); - ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); - } - case "cartesian_point", "cartesian_shape" -> { + case "geo_point", "geo_shape", "cartesian_point", "cartesian_shape" -> { // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here - BytesRef wkb = CARTESIAN.wktToWkb(value.toString()); + BytesRef wkb = stringToSpatial(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); } default -> throw EsqlIllegalArgumentException.illegalDataType(dataTypes.get(c)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 3540aa83638a1..14344502f165a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.VerificationException; @@ -83,6 +82,7 @@ import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.GEO_MATCH_TYPE; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.resolveFunction; @@ -94,6 +94,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.NESTED; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; public class Analyzer extends ParameterizedRuleExecutor { // marker list of attributes for plans that do not have any concrete fields to return, but have other computed columns to return @@ -605,7 +606,7 @@ private LogicalPlan resolveEnrich(Enrich enrich, List childrenOutput) } private static final DataType[] GEO_TYPES = new DataType[] { GEO_POINT, GEO_SHAPE }; - private static final DataType[] NON_GEO_TYPES = new DataType[] { KEYWORD, IP, LONG, INTEGER, FLOAT, DOUBLE, DATETIME }; + private static final DataType[] NON_GEO_TYPES = new DataType[] { KEYWORD, TEXT, IP, LONG, INTEGER, FLOAT, DOUBLE, DATETIME }; private DataType[] allowedEnrichTypes(String matchType) { return matchType.equals(GEO_MATCH_TYPE) ? GEO_TYPES : NON_GEO_TYPES; @@ -773,7 +774,7 @@ private static Expression stringToDate(Expression stringExpression) { Long millis = null; // TODO: better control over this string format - do we want this to be flexible or always redirect folks to use date parsing try { - millis = str == null ? null : DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(str); + millis = str == null ? null : dateTimeToLong(str); } catch (Exception ex) { // in case of exception, millis will be null which will trigger an error } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java index 3039196a75f24..29371d81304f5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java @@ -29,7 +29,6 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; -import java.net.InetAddress; import java.util.ArrayList; import java.util.List; import java.util.function.IntFunction; @@ -129,14 +128,13 @@ private IntFunction blockToJavaObject() { BytesRefBlock bytesRefBlock = (BytesRefBlock) block; if (inputDataType == IP) { yield offset -> { - bytesRefBlock.getBytesRef(offset, scratch); - if (ipBytes.length != scratch.length) { + final var bytes = bytesRefBlock.getBytesRef(offset, scratch); + if (ipBytes.length != bytes.length) { // Lucene only support 16-byte IP addresses, even IPv4 is encoded in 16 bytes - throw new IllegalStateException("Cannot decode IP field from bytes of length " + scratch.length); + throw new IllegalStateException("Cannot decode IP field from bytes of length " + bytes.length); } - System.arraycopy(scratch.bytes, scratch.offset, ipBytes, 0, scratch.length); - InetAddress ip = InetAddressPoint.decode(ipBytes); - return ip; + System.arraycopy(bytes.bytes, bytes.offset, ipBytes, 0, bytes.length); + return InetAddressPoint.decode(ipBytes); }; } yield offset -> bytesRefBlock.getBytesRef(offset, new BytesRef()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index 1087e9d33b805..c26f722d9f765 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.ComparisonMapper; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InMapper; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveEqualsMapper; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RegexMapper; import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -48,7 +47,6 @@ public final class EvalMapper { ComparisonMapper.LESS_THAN, ComparisonMapper.LESS_THAN_OR_EQUAL, InMapper.IN_MAPPER, - RegexMapper.REGEX_MATCH, new InsensitiveEqualsMapper(), new BooleanLogic(), new Nots(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java deleted file mode 100644 index f37751e18858f..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; - -import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; -import org.elasticsearch.xpack.esql.evaluator.EvalMapper; -import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; -import org.elasticsearch.xpack.esql.planner.Layout; -import org.elasticsearch.xpack.ql.expression.predicate.regex.AbstractStringPattern; -import org.elasticsearch.xpack.ql.expression.predicate.regex.RegexMatch; - -public abstract class RegexMapper extends ExpressionMapper> { - - public static final ExpressionMapper REGEX_MATCH = new RegexMapper() { - @Override - public ExpressionEvaluator.Factory map(RegexMatch expression, Layout layout) { - return dvrCtx -> new RegexMatchEvaluator( - expression.source(), - EvalMapper.toEvaluator(expression.field(), layout).get(dvrCtx), - new CharacterRunAutomaton(((AbstractStringPattern) expression.pattern()).createAutomaton()), - dvrCtx - ); - } - }; -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatch.java deleted file mode 100644 index ac1bf2031750f..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatch.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; - -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.ann.Fixed; - -public class RegexMatch { - @Evaluator - static boolean process(BytesRef input, @Fixed CharacterRunAutomaton pattern) { - if (input == null) { - return false; - } - return pattern.run(input.utf8ToString()); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java index e774ba36b16e6..85d5357d7c1ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java @@ -11,17 +11,31 @@ import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; import java.util.Locale; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; public class EsqlTypeResolutions { + public static Expression.TypeResolution isStringAndExact(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { + Expression.TypeResolution resolution = TypeResolutions.isString(e, operationName, paramOrd); + if (resolution.unresolved()) { + return resolution; + } + + return isExact(e, operationName, paramOrd); + } + public static Expression.TypeResolution isExact(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { if (e instanceof FieldAttribute fa) { if (DataTypes.isString(fa.dataType())) { @@ -45,7 +59,29 @@ public static Expression.TypeResolution isExact(Expression e, String operationNa return Expression.TypeResolution.TYPE_RESOLVED; } + private static final String[] SPATIAL_TYPE_NAMES = new String[] { + GEO_POINT.typeName(), + CARTESIAN_POINT.typeName(), + GEO_SHAPE.typeName(), + CARTESIAN_SHAPE.typeName() }; + private static final String[] POINT_TYPE_NAMES = new String[] { GEO_POINT.typeName(), CARTESIAN_POINT.typeName() }; + private static final String[] NON_SPATIAL_TYPE_NAMES = EsqlDataTypes.types() + .stream() + .filter(EsqlDataTypes::isRepresentable) + .filter(t -> EsqlDataTypes.isSpatial(t) == false) + .map(DataType::esType) + .toArray(String[]::new); + public static Expression.TypeResolution isSpatialPoint(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { - return isType(e, EsqlDataTypes::isSpatialPoint, operationName, paramOrd, "geo_point or cartesian_point"); + return isType(e, EsqlDataTypes::isSpatialPoint, operationName, paramOrd, POINT_TYPE_NAMES); } + + public static Expression.TypeResolution isSpatial(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { + return isType(e, EsqlDataTypes::isSpatial, operationName, paramOrd, SPATIAL_TYPE_NAMES); + } + + public static Expression.TypeResolution isNotSpatial(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { + return isType(e, t -> EsqlDataTypes.isSpatial(t) == false, operationName, paramOrd, NON_SPATIAL_TYPE_NAMES); + } + } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/SurrogateExpression.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/SurrogateExpression.java index e7f507e3983a7..bf48d1d806e18 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/SurrogateExpression.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/SurrogateExpression.java @@ -15,5 +15,8 @@ */ public interface SurrogateExpression { + /** + * Returns the expression to be replaced by or {@code null} if this cannot be replaced. + */ Expression surrogate(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index fac0121bd05b4..6f1f01bbe632c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; @@ -78,6 +79,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; @@ -94,7 +96,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; -import org.elasticsearch.xpack.esql.plan.logical.show.ShowFunctions; +import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.ql.expression.function.FunctionDefinition; import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.session.Configuration; @@ -103,7 +105,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; -import java.util.stream.Collectors; public final class EsqlFunctionRegistry extends FunctionRegistry { @@ -127,7 +128,8 @@ private FunctionDefinition[][] functions() { def(MedianAbsoluteDeviation.class, MedianAbsoluteDeviation::new, "median_absolute_deviation"), def(Min.class, Min::new, "min"), def(Percentile.class, Percentile::new, "percentile"), - def(Sum.class, Sum::new, "sum") }, + def(Sum.class, Sum::new, "sum"), + def(Values.class, Values::new, "values") }, // math new FunctionDefinition[] { def(Abs.class, Abs::new, "abs"), @@ -178,9 +180,11 @@ private FunctionDefinition[][] functions() { def(DateTrunc.class, DateTrunc::new, "date_trunc"), def(Now.class, Now::new, "now") }, // spatial - new FunctionDefinition[] { def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid") }, - new FunctionDefinition[] { def(StX.class, StX::new, "st_x") }, - new FunctionDefinition[] { def(StY.class, StY::new, "st_y") }, + new FunctionDefinition[] { + def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid"), + def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), + def(StX.class, StX::new, "st_x"), + def(StY.class, StY::new, "st_y") }, // conditional new FunctionDefinition[] { def(Case.class, Case::new, "case") }, // null @@ -243,7 +247,7 @@ public record FunctionDescription( ) { public String fullSignature() { StringBuilder builder = new StringBuilder(); - builder.append(ShowFunctions.withPipes(returnType)); + builder.append(MetaFunctions.withPipes(returnType)); builder.append(" "); builder.append(name); builder.append("("); @@ -260,16 +264,25 @@ public String fullSignature() { builder.append("..."); } builder.append(":"); - builder.append(ShowFunctions.withPipes(arg.type())); + builder.append(MetaFunctions.withPipes(arg.type())); } builder.append(")"); return builder.toString(); } + /** + * The name of every argument. + */ public List argNames() { - return args.stream().map(ArgSignature::name).collect(Collectors.toList()); + return args.stream().map(ArgSignature::name).toList(); } + /** + * The description of every argument. + */ + public List argDescriptions() { + return args.stream().map(ArgSignature::description).toList(); + } } public static FunctionDescription description(FunctionDefinition def) { @@ -278,7 +291,7 @@ public static FunctionDescription description(FunctionDefinition def) { return new FunctionDescription(def.name(), List.of(), null, null, false, false); } Constructor constructor = constructors[0]; - FunctionInfo functionInfo = constructor.getAnnotation(FunctionInfo.class); + FunctionInfo functionInfo = functionInfo(def); String functionDescription = functionInfo == null ? "" : functionInfo.description().replace('\n', ' '); String[] returnType = functionInfo == null ? new String[] { "?" } : functionInfo.returnType(); var params = constructor.getParameters(); // no multiple c'tors supported @@ -301,4 +314,12 @@ public static FunctionDescription description(FunctionDefinition def) { return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic, isAggregation); } + public static FunctionInfo functionInfo(FunctionDefinition def) { + var constructors = def.clazz().getConstructors(); + if (constructors.length == 0) { + return null; + } + Constructor constructor = constructors[0]; + return constructor.getAnnotation(FunctionInfo.class); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java new file mode 100644 index 0000000000000..0cee9d2c53cde --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * An example of using a function that is rendered in the docs. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.CONSTRUCTOR) +public @interface Example { + /** + * The test file that contains the example. + */ + String file(); + + /** + * The tag that fences this example. + */ + String tag(); +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java index cd2e710498e5e..5c3c9423f32ea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java @@ -18,9 +18,29 @@ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.CONSTRUCTOR) public @interface FunctionInfo { + /** + * The type(s) this function returns. + */ String[] returnType(); + /** + * The description of the function rendered in {@code META FUNCTIONS} + * and the docs. + */ String description() default ""; + /** + * A {@code NOTE} that's added after the {@link #description} in the docs. + */ + String note() default ""; + + /** + * Is this an aggregation (true) or a scalar function (false). + */ boolean isAggregation() default false; + + /** + * Examples of using this function that are rendered in the docs. + */ + Example[] examples() default {}; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index 784d97f820428..3ea0721d52c00 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; @@ -26,7 +27,7 @@ public class Avg extends AggregateFunction implements SurrogateExpression { @FunctionInfo(returnType = "double", description = "The average of a numeric field.", isAggregation = true) - public Avg(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public Avg(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } @@ -60,6 +61,7 @@ public Avg replaceChildren(List newChildren) { public Expression surrogate() { var s = source(); var field = field(); - return new Div(s, new Sum(s, field), new Count(s, field), dataType()); + + return field().foldable() ? new MvAvg(s, field) : new Div(s, new Sum(s, field), new Count(s, field), dataType()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index d8ec5300c061f..3e8030322caa7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -11,8 +11,10 @@ import org.elasticsearch.compute.aggregation.MaxDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,10 +22,10 @@ import java.util.List; -public class Max extends NumericAggregate { +public class Max extends NumericAggregate implements SurrogateExpression { @FunctionInfo(returnType = { "double", "integer", "long" }, description = "The maximum value of a numeric field.", isAggregation = true) - public Max(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public Max(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } @@ -61,4 +63,9 @@ protected AggregatorFunctionSupplier intSupplier(List inputChannels) { protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { return new MaxDoubleAggregatorFunctionSupplier(inputChannels); } + + @Override + public Expression surrogate() { + return field().foldable() ? new MvMax(source(), field()) : null; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java index a6f4e30a62459..7612510f056f2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java @@ -31,7 +31,7 @@ public class Median extends AggregateFunction implements SurrogateExpression { description = "The value that is greater than half of all values and less than half of all values.", isAggregation = true ) - public Median(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public Median(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java index ecf1a47ee9eb3..db7979ef0359c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java @@ -27,7 +27,7 @@ public class MedianAbsoluteDeviation extends NumericAggregate { description = "The median absolute deviation, a measure of variability.", isAggregation = true ) - public MedianAbsoluteDeviation(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public MedianAbsoluteDeviation(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 8fdce6d959b98..c69d2f4a1fc2d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -11,8 +11,10 @@ import org.elasticsearch.compute.aggregation.MinDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,10 +22,10 @@ import java.util.List; -public class Min extends NumericAggregate { +public class Min extends NumericAggregate implements SurrogateExpression { @FunctionInfo(returnType = { "double", "integer", "long" }, description = "The minimum value of a numeric field.", isAggregation = true) - public Min(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public Min(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } @@ -61,4 +63,9 @@ protected AggregatorFunctionSupplier intSupplier(List inputChannels) { protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { return new MinDoubleAggregatorFunctionSupplier(inputChannels); } + + @Override + public Expression surrogate() { + return field().foldable() ? new MvMin(source(), field()) : null; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index 96385d534edcd..22592f067ba99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -36,7 +36,7 @@ public class Percentile extends NumericAggregate { ) public Percentile( Source source, - @Param(name = "field", type = { "double", "integer", "long" }) Expression field, + @Param(name = "number", type = { "double", "integer", "long" }) Expression field, @Param(name = "percentile", type = { "double", "integer", "long" }) Expression percentile ) { super(source, field, List.of(percentile)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index d09762947a597..805724bfcd16c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -10,12 +10,18 @@ import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.StringUtils; import java.util.List; @@ -26,10 +32,10 @@ /** * Sum all values of a field in matching documents. */ -public class Sum extends NumericAggregate { +public class Sum extends NumericAggregate implements SurrogateExpression { @FunctionInfo(returnType = "long", description = "The sum of a numeric field.", isAggregation = true) - public Sum(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public Sum(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } @@ -63,4 +69,15 @@ protected AggregatorFunctionSupplier intSupplier(List inputChannels) { protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { return new SumDoubleAggregatorFunctionSupplier(inputChannels); } + + @Override + public Expression surrogate() { + var s = source(); + var field = field(); + + // SUM(const) is equivalent to MV_SUM(const)*COUNT(*). + return field.foldable() + ? new Mul(s, new MvSum(s, field), new Count(s, new Literal(s, StringUtils.WILDCARD, DataTypes.KEYWORD))) + : null; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java new file mode 100644 index 0000000000000..d0d614a665794 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.planner.ToAggregator; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; + +public class Values extends AggregateFunction implements ToAggregator { + @FunctionInfo( + returnType = { "boolean|date|double|integer|ip|keyword|long|text|version" }, + description = "Collect values for a field.", + isAggregation = true + ) + public Values( + Source source, + @Param(name = "field", type = { "boolean|date|double|integer|ip|keyword|long|text|version" }) Expression v + ) { + super(source, v); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Values::new, field()); + } + + @Override + public Values replaceChildren(List newChildren) { + return new Values(source(), newChildren.get(0)); + } + + @Override + public DataType dataType() { + return field().dataType(); + } + + @Override + protected TypeResolution resolveType() { + return EsqlTypeResolutions.isNotSpatial(field(), sourceText(), DEFAULT); + } + + @Override + public AggregatorFunctionSupplier supplier(List inputChannels) { + DataType type = field().dataType(); + if (type == DataTypes.INTEGER) { + return new ValuesIntAggregatorFunctionSupplier(inputChannels); + } + if (type == DataTypes.LONG) { + return new ValuesLongAggregatorFunctionSupplier(inputChannels); + } + if (type == DataTypes.DOUBLE) { + return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); + } + if (DataTypes.isString(type) || type == DataTypes.IP || type == DataTypes.VERSION) { + return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels); + } + if (type == DataTypes.BOOLEAN) { + return new ValuesBooleanAggregatorFunctionSupplier(inputChannels); + } + // TODO cartesian_point, geo_point + throw EsqlIllegalArgumentException.illegalDataType(type); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 66756ffa14c60..f00e69ddaabe4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -66,7 +66,7 @@ public Case( Source source, @Param(name = "condition", type = { "boolean" }) Expression first, @Param( - name = "rest", + name = "trueValue", type = { "boolean", "cartesian_point", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 9e89746c4a9dd..1794258402aed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -43,7 +43,8 @@ public class Greatest extends EsqlScalarFunction implements OptionalArgument { public Greatest( Source source, @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) List rest + @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< + Expression> rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index fa0e393ed52b9..6b4208f7b3d85 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -43,7 +43,8 @@ public class Least extends EsqlScalarFunction implements OptionalArgument { public Least( Source source, @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) List rest + @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< + Expression> rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index 388ab970205ad..cd9fcb0390937 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -16,10 +16,11 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; -import java.math.BigInteger; import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToBoolean; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToBoolean; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; @@ -27,7 +28,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class ToBoolean extends AbstractConvertFunction { @@ -44,7 +44,7 @@ public class ToBoolean extends AbstractConvertFunction { @FunctionInfo(returnType = "boolean", description = "Converts an input value to a boolean value.") public ToBoolean( Source source, - @Param(name = "v", type = { "boolean", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param(name = "field", type = { "boolean", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field ) { super(source, field); } @@ -71,7 +71,7 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString") static boolean fromKeyword(BytesRef keyword) { - return Boolean.parseBoolean(keyword.utf8ToString()); + return stringToBoolean(keyword.utf8ToString()); } @ConvertEvaluator(extraName = "FromDouble") @@ -86,8 +86,7 @@ static boolean fromLong(long l) { @ConvertEvaluator(extraName = "FromUnsignedLong") static boolean fromUnsignedLong(long ul) { - Number n = unsignedLongAsNumber(ul); - return n instanceof BigInteger || n.longValue() != 0; + return unsignedLongToBoolean(ul); } @ConvertEvaluator(extraName = "FromInt") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java index 3756c322abc4e..4ef666b03dfb3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java @@ -19,10 +19,10 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; public class ToCartesianPoint extends AbstractConvertFunction { @@ -33,7 +33,7 @@ public class ToCartesianPoint extends AbstractConvertFunction { ); @FunctionInfo(returnType = "cartesian_point", description = "Converts an input value to a point value.") - public ToCartesianPoint(Source source, @Param(name = "v", type = { "cartesian_point", "keyword", "text" }) Expression field) { + public ToCartesianPoint(Source source, @Param(name = "field", type = { "cartesian_point", "keyword", "text" }) Expression field) { super(source, field); } @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return CARTESIAN.wktToWkb(in.utf8ToString()); + return stringToSpatial(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java index 51294b7834fc4..6bd57a1dd2641 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java @@ -19,11 +19,11 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; public class ToCartesianShape extends AbstractConvertFunction { @@ -37,7 +37,7 @@ public class ToCartesianShape extends AbstractConvertFunction { @FunctionInfo(returnType = "cartesian_shape", description = "Converts an input value to a shape value.") public ToCartesianShape( Source source, - @Param(name = "v", type = { "cartesian_point", "cartesian_shape", "keyword", "text" }) Expression field + @Param(name = "field", type = { "cartesian_point", "cartesian_shape", "keyword", "text" }) Expression field ) { super(source, field); } @@ -64,6 +64,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return CARTESIAN.wktToWkb(in.utf8ToString()); + return stringToSpatial(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index 1ff8bc39e36f4..a1dae4a06b0bd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -11,7 +11,6 @@ import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,6 +19,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; @@ -43,7 +43,7 @@ public class ToDatetime extends AbstractConvertFunction { @FunctionInfo(returnType = "date", description = "Converts an input value to a date value.") public ToDatetime( Source source, - @Param(name = "v", type = { "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param(name = "field", type = { "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field ) { super(source, field); } @@ -70,6 +70,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static long fromKeyword(BytesRef in) { - return DateParse.process(in, DateParse.DEFAULT_FORMATTER); + return dateTimeToLong(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index c5e7b473f4e56..8332841518f01 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -41,7 +41,7 @@ public class ToDegrees extends AbstractConvertFunction implements EvaluatorMappe ); @FunctionInfo(returnType = "double", description = "Converts a number in radians to degrees.") - public ToDegrees(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public ToDegrees(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index 6a984abdad50f..005f27abc2a56 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,6 +20,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToDouble; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -27,7 +30,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class ToDouble extends AbstractConvertFunction { @@ -45,7 +47,10 @@ public class ToDouble extends AbstractConvertFunction { @FunctionInfo(returnType = "double", description = "Converts an input value to a double value.") public ToDouble( Source source, - @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param( + name = "field", + type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" } + ) Expression field ) { super(source, field); } @@ -75,14 +80,14 @@ static double fromBoolean(boolean bool) { return bool ? 1d : 0d; } - @ConvertEvaluator(extraName = "FromString", warnExceptions = { NumberFormatException.class }) + @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class }) static double fromKeyword(BytesRef in) { - return Double.parseDouble(in.utf8ToString()); + return stringToDouble(in.utf8ToString()); } @ConvertEvaluator(extraName = "FromUnsignedLong") static double fromUnsignedLong(long l) { - return unsignedLongAsNumber(l).doubleValue(); + return unsignedLongToDouble(l); } @ConvertEvaluator(extraName = "FromLong") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java index 16ea1235ccf59..96e366be25e44 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java @@ -19,10 +19,10 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public class ToGeoPoint extends AbstractConvertFunction { @@ -33,7 +33,7 @@ public class ToGeoPoint extends AbstractConvertFunction { ); @FunctionInfo(returnType = "geo_point", description = "Converts an input value to a geo_point value.") - public ToGeoPoint(Source source, @Param(name = "v", type = { "geo_point", "keyword", "text" }) Expression field) { + public ToGeoPoint(Source source, @Param(name = "field", type = { "geo_point", "keyword", "text" }) Expression field) { super(source, field); } @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return GEO.wktToWkb(in.utf8ToString()); + return stringToSpatial(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java index 3a5d5f2012df6..d8381547b1651 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java @@ -19,11 +19,11 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public class ToGeoShape extends AbstractConvertFunction { @@ -35,7 +35,7 @@ public class ToGeoShape extends AbstractConvertFunction { ); @FunctionInfo(returnType = "geo_shape", description = "Converts an input value to a geo_shape value.") - public ToGeoShape(Source source, @Param(name = "v", type = { "geo_point", "geo_shape", "keyword", "text" }) Expression field) { + public ToGeoShape(Source source, @Param(name = "field", type = { "geo_point", "geo_shape", "keyword", "text" }) Expression field) { super(source, field); } @@ -61,6 +61,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return GEO.wktToWkb(in.utf8ToString()); + return stringToSpatial(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index fc6a5f5c69afa..acb9ef7b46d63 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -19,10 +19,10 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToIP; import static org.elasticsearch.xpack.ql.type.DataTypes.IP; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; public class ToIP extends AbstractConvertFunction { @@ -33,7 +33,7 @@ public class ToIP extends AbstractConvertFunction { ); @FunctionInfo(returnType = "ip", description = "Converts an input string to an IP value.") - public ToIP(Source source, @Param(name = "v", type = { "ip", "keyword", "text" }) Expression field) { + public ToIP(Source source, @Param(name = "field", type = { "ip", "keyword", "text" }) Expression field) { super(source, field); } @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef asString) { - return parseIP(asString.utf8ToString()); + return stringToIP(asString); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index 2288ddcc33a55..32d83de8da846 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -20,6 +20,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToInt; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; @@ -29,7 +31,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class ToInteger extends AbstractConvertFunction { @@ -47,7 +48,10 @@ public class ToInteger extends AbstractConvertFunction { @FunctionInfo(returnType = "integer", description = "Converts an input value to an integer value.") public ToInteger( Source source, - @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param( + name = "field", + type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" } + ) Expression field ) { super(source, field); } @@ -77,18 +81,9 @@ static int fromBoolean(boolean bool) { return bool ? 1 : 0; } - @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class, NumberFormatException.class }) + @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class }) static int fromKeyword(BytesRef in) { - String asString = in.utf8ToString(); - try { - return Integer.parseInt(asString); - } catch (NumberFormatException nfe) { - try { - return fromDouble(Double.parseDouble(asString)); - } catch (Exception e) { - throw nfe; - } - } + return stringToInt(in.utf8ToString()); } @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class }) @@ -98,12 +93,7 @@ static int fromDouble(double dbl) { @ConvertEvaluator(extraName = "FromUnsignedLong", warnExceptions = { InvalidArgumentException.class }) static int fromUnsignedLong(long ul) { - Number n = unsignedLongAsNumber(ul); - int i = n.intValue(); - if (i != n.longValue()) { - throw new InvalidArgumentException("[{}] out of [integer] range", n); - } - return i; + return unsignedLongToInt(ul); } @ConvertEvaluator(extraName = "FromLong", warnExceptions = { InvalidArgumentException.class }) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index ee7658b07b7f9..b8dea5d8b42ea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -20,8 +20,9 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; -import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToLong; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -30,7 +31,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class ToLong extends AbstractConvertFunction { @@ -48,7 +48,10 @@ public class ToLong extends AbstractConvertFunction { @FunctionInfo(returnType = "long", description = "Converts an input value to a long value.") public ToLong( Source source, - @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param( + name = "field", + type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" } + ) Expression field ) { super(source, field); } @@ -78,18 +81,9 @@ static long fromBoolean(boolean bool) { return bool ? 1L : 0L; } - @ConvertEvaluator(extraName = "FromString", warnExceptions = { NumberFormatException.class }) + @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class }) static long fromKeyword(BytesRef in) { - String asString = in.utf8ToString(); - try { - return Long.parseLong(asString); - } catch (NumberFormatException nfe) { - try { - return fromDouble(Double.parseDouble(asString)); - } catch (Exception e) { - throw nfe; - } - } + return stringToLong(in.utf8ToString()); } @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class }) @@ -99,7 +93,7 @@ static long fromDouble(double dbl) { @ConvertEvaluator(extraName = "FromUnsignedLong", warnExceptions = { InvalidArgumentException.class }) static long fromUnsignedLong(long ul) { - return safeToLong(unsignedLongAsNumber(ul)); + return unsignedLongToLong(ul); } @ConvertEvaluator(extraName = "FromInt") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java index ac31cf3759ad9..0fe8795060c78 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java @@ -40,7 +40,7 @@ public class ToRadians extends AbstractConvertFunction implements EvaluatorMappe ); @FunctionInfo(returnType = "double", description = "Converts a number in degrees to radians.") - public ToRadians(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public ToRadians(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 688996dd1db00..a15d610f2b517 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -9,7 +9,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -17,11 +16,16 @@ import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.versionfield.Version; import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.numericBooleanToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.spatialToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; @@ -36,10 +40,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; -import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public class ToString extends AbstractConvertFunction implements EvaluatorMapper { @@ -64,7 +64,7 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper public ToString( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "cartesian_point", @@ -107,61 +107,61 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromBoolean") static BytesRef fromBoolean(boolean bool) { - return new BytesRef(String.valueOf(bool)); + return numericBooleanToString(bool); } @ConvertEvaluator(extraName = "FromIP") static BytesRef fromIP(BytesRef ip) { - return new BytesRef(DocValueFormat.IP.format(ip)); + return new BytesRef(ipToString(ip)); } @ConvertEvaluator(extraName = "FromDatetime") static BytesRef fromDatetime(long datetime) { - return new BytesRef(UTC_DATE_TIME_FORMATTER.formatMillis(datetime)); + return new BytesRef(dateTimeToString(datetime)); } @ConvertEvaluator(extraName = "FromDouble") static BytesRef fromDouble(double dbl) { - return new BytesRef(String.valueOf(dbl)); + return numericBooleanToString(dbl); } @ConvertEvaluator(extraName = "FromLong") static BytesRef fromDouble(long lng) { - return new BytesRef(String.valueOf(lng)); + return numericBooleanToString(lng); } @ConvertEvaluator(extraName = "FromInt") static BytesRef fromDouble(int integer) { - return new BytesRef(String.valueOf(integer)); + return numericBooleanToString(integer); } @ConvertEvaluator(extraName = "FromVersion") static BytesRef fromVersion(BytesRef version) { - return new BytesRef(new Version(version).toString()); + return new BytesRef(versionToString(version)); } @ConvertEvaluator(extraName = "FromUnsignedLong") static BytesRef fromUnsignedLong(long lng) { - return new BytesRef(unsignedLongAsNumber(lng).toString()); + return unsignedLongToString(lng); } @ConvertEvaluator(extraName = "FromGeoPoint") static BytesRef fromGeoPoint(BytesRef wkb) { - return new BytesRef(GEO.wkbToWkt(wkb)); + return new BytesRef(spatialToString(wkb)); } @ConvertEvaluator(extraName = "FromCartesianPoint") static BytesRef fromCartesianPoint(BytesRef wkb) { - return new BytesRef(CARTESIAN.wkbToWkt(wkb)); + return new BytesRef(spatialToString(wkb)); } @ConvertEvaluator(extraName = "FromCartesianShape") static BytesRef fromCartesianShape(BytesRef wkb) { - return new BytesRef(GEO.wkbToWkt(wkb)); + return new BytesRef(spatialToString(wkb)); } @ConvertEvaluator(extraName = "FromGeoShape") static BytesRef fromGeoShape(BytesRef wkb) { - return new BytesRef(CARTESIAN.wkbToWkt(wkb)); + return new BytesRef(spatialToString(wkb)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index 656d99ee8ab80..8127fd2103051 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -20,7 +20,11 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.booleanToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.doubleToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.intToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToUnsignedLong; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -29,9 +33,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.ONE_AS_UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; public class ToUnsignedLong extends AbstractConvertFunction { @@ -49,7 +50,10 @@ public class ToUnsignedLong extends AbstractConvertFunction { @FunctionInfo(returnType = "unsigned_long", description = "Converts an input value to an unsigned long value.") public ToUnsignedLong( Source source, - @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param( + name = "field", + type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" } + ) Expression field ) { super(source, field); } @@ -76,27 +80,26 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromBoolean") static long fromBoolean(boolean bool) { - return bool ? ONE_AS_UNSIGNED_LONG : ZERO_AS_UNSIGNED_LONG; + return booleanToUnsignedLong(bool); } @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class, NumberFormatException.class }) static long fromKeyword(BytesRef in) { - String asString = in.utf8ToString(); - return asLongUnsigned(safeToUnsignedLong(asString)); + return stringToUnsignedLong(in.utf8ToString()); } @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class }) static long fromDouble(double dbl) { - return asLongUnsigned(safeToUnsignedLong(dbl)); + return doubleToUnsignedLong(dbl); } @ConvertEvaluator(extraName = "FromLong", warnExceptions = { InvalidArgumentException.class }) static long fromLong(long lng) { - return asLongUnsigned(safeToUnsignedLong(lng)); + return longToUnsignedLong(lng, false); } @ConvertEvaluator(extraName = "FromInt", warnExceptions = { InvalidArgumentException.class }) static long fromInt(int i) { - return fromLong(i); + return intToUnsignedLong(i); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index e196a91e3bac2..b0e0b385ee3c4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -15,11 +15,11 @@ import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.versionfield.Version; import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToVersion; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; @@ -33,7 +33,7 @@ public class ToVersion extends AbstractConvertFunction { ); @FunctionInfo(returnType = "version", description = "Converts an input string to a version value.") - public ToVersion(Source source, @Param(name = "v", type = { "keyword", "text", "version" }) Expression v) { + public ToVersion(Source source, @Param(name = "field", type = { "keyword", "text", "version" }) Expression v) { super(source, v); } @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString") static BytesRef fromKeyword(BytesRef asString) { - return new Version(asString.utf8ToString()).toBytesRef(); + return stringToVersion(asString); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 4ee178852fcd4..544f284791919 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -8,13 +8,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -24,15 +24,15 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import java.time.Instant; import java.time.ZoneId; import java.time.temporal.ChronoField; import java.util.List; -import java.util.Locale; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.EsqlConverter.STRING_TO_CHRONO_FIELD; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.chronoToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; public class DateExtract extends EsqlConfigurationFunction { @@ -43,14 +43,14 @@ public DateExtract( Source source, // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser // used in the CSVTests and fixing it is not trivial - @Param(name = "date_part", type = { "keyword" }, description = """ + @Param(name = "datePart", type = { "keyword", "text" }, description = """ Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.""") Expression chronoFieldExp, - @Param(name = "field", type = "date", description = "Date expression") Expression field, + @Param(name = "date", type = "date", description = "Date expression") Expression field, Configuration configuration ) { super(source, List.of(chronoFieldExp, field), configuration); @@ -76,13 +76,12 @@ private ChronoField chronoField() { // TODO: move the slimmed down code here to toEvaluator? if (chronoField == null) { Expression field = children().get(0); - if (field.foldable() && field.dataType() == DataTypes.KEYWORD) { - try { - BytesRef br = BytesRefs.toBytesRef(field.fold()); - chronoField = ChronoField.valueOf(br.utf8ToString().toUpperCase(Locale.ROOT)); - } catch (Exception e) { - return null; + try { + if (field.foldable() && EsqlDataTypes.isString(field.dataType())) { + chronoField = (ChronoField) STRING_TO_CHRONO_FIELD.convert(field.fold()); } + } catch (Exception e) { + return null; } } return chronoField; @@ -90,13 +89,12 @@ private ChronoField chronoField() { @Evaluator(warnExceptions = { IllegalArgumentException.class }) static long process(long value, BytesRef chronoField, @Fixed ZoneId zone) { - ChronoField chrono = ChronoField.valueOf(chronoField.utf8ToString().toUpperCase(Locale.ROOT)); - return Instant.ofEpochMilli(value).atZone(zone).getLong(chrono); + return chronoToLong(value, chronoField, zone); } @Evaluator(extraName = "Constant") static long process(long value, @Fixed ChronoField chronoField, @Fixed ZoneId zone) { - return Instant.ofEpochMilli(value).atZone(zone).getLong(chronoField); + return chronoToLong(value, chronoField, zone); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index 60e1aabed3cdd..6a6e523f81974 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; import org.elasticsearch.xpack.ql.session.Configuration; @@ -28,11 +29,12 @@ import java.util.Locale; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; -import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; public class DateFormat extends EsqlConfigurationFunction implements OptionalArgument { @@ -42,7 +44,7 @@ public class DateFormat extends EsqlConfigurationFunction implements OptionalArg @FunctionInfo(returnType = "keyword", description = "Returns a string representation of a date, in the provided format.") public DateFormat( Source source, - @Param(optional = true, name = "format", type = { "keyword" }, description = "A valid date pattern") Expression format, + @Param(optional = true, name = "dateFormat", type = { "keyword", "text" }, description = "A valid date pattern") Expression format, @Param(name = "date", type = { "date" }, description = "Date expression") Expression date, Configuration configuration ) { @@ -83,39 +85,33 @@ public boolean foldable() { @Evaluator(extraName = "Constant") static BytesRef process(long val, @Fixed DateFormatter formatter) { - return new BytesRef(formatter.formatMillis(val)); + return new BytesRef(dateTimeToString(val, formatter)); } @Evaluator static BytesRef process(long val, BytesRef formatter, @Fixed Locale locale) { - return process(val, toFormatter(formatter, locale)); + return new BytesRef(dateTimeToString(val, toFormatter(formatter, locale))); } @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); if (format == null) { - return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), UTC_DATE_TIME_FORMATTER, dvrCtx); + return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, DEFAULT_DATE_TIME_FORMATTER); } - if (format.dataType() != DataTypes.KEYWORD) { + if (EsqlDataTypes.isString(format.dataType()) == false) { throw new IllegalArgumentException("unsupported data type for format [" + format.dataType() + "]"); } if (format.foldable()) { DateFormatter formatter = toFormatter(format.fold(), ((EsqlConfiguration) configuration()).locale()); - return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), formatter, dvrCtx); + return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, formatter); } var formatEvaluator = toEvaluator.apply(format); - return dvrCtx -> new DateFormatEvaluator( - source(), - fieldEvaluator.get(dvrCtx), - formatEvaluator.get(dvrCtx), - ((EsqlConfiguration) configuration()).locale(), - dvrCtx - ); + return new DateFormatEvaluator.Factory(source(), fieldEvaluator, formatEvaluator, ((EsqlConfiguration) configuration()).locale()); } private static DateFormatter toFormatter(Object format, Locale locale) { - DateFormatter result = format == null ? UTC_DATE_TIME_FORMATTER : DateFormatter.forPattern(((BytesRef) format).utf8ToString()); + DateFormatter result = format == null ? DEFAULT_DATE_TIME_FORMATTER : DateFormatter.forPattern(((BytesRef) format).utf8ToString()); return result.withLocale(locale); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index 2c5a83b74eb83..b356dbccbeb4c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; @@ -28,22 +29,23 @@ import java.util.function.Function; import static org.elasticsearch.common.time.DateFormatter.forPattern; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.ql.util.DateUtils.UTC; public class DateParse extends EsqlScalarFunction implements OptionalArgument { - public static final DateFormatter DEFAULT_FORMATTER = toFormatter(new BytesRef("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"), UTC); private final Expression field; private final Expression format; @FunctionInfo(returnType = "date", description = "Parses a string into a date value") public DateParse( Source source, - @Param(name = "datePattern", type = { "keyword" }, description = "A valid date pattern", optional = true) Expression first, + @Param(name = "datePattern", type = { "keyword", "text" }, description = "A valid date pattern", optional = true) Expression first, @Param(name = "dateString", type = { "keyword", "text" }, description = "A string representing a date") Expression second ) { super(source, second != null ? List.of(first, second) : List.of(first)); @@ -83,13 +85,12 @@ public boolean foldable() { @Evaluator(extraName = "Constant", warnExceptions = { IllegalArgumentException.class }) public static long process(BytesRef val, @Fixed DateFormatter formatter) throws IllegalArgumentException { - String dateString = val.utf8ToString(); - return formatter.parseMillis(dateString); + return dateTimeToLong(val.utf8ToString(), formatter); } @Evaluator(warnExceptions = { IllegalArgumentException.class }) static long process(BytesRef val, BytesRef formatter, @Fixed ZoneId zoneId) throws IllegalArgumentException { - return process(val, toFormatter(formatter, zoneId)); + return dateTimeToLong(val.utf8ToString(), toFormatter(formatter, zoneId)); } @Override @@ -97,9 +98,9 @@ public ExpressionEvaluator.Factory toEvaluator(Function matches + @Param(name = "blockX", type = { "keyword", "text" }, description = "CIDR block to test the IP against.") List matches ) { super(source, CollectionUtils.combine(singletonList(ipField), matches)); this.ipField = ipField; @@ -76,11 +76,10 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var ipEvaluatorSupplier = toEvaluator.apply(ipField); - return dvrCtx -> new CIDRMatchEvaluator( + return new CIDRMatchEvaluator.Factory( source(), - ipEvaluatorSupplier.get(dvrCtx), - matches.stream().map(x -> toEvaluator.apply(x).get(dvrCtx)).toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx + ipEvaluatorSupplier, + matches.stream().map(x -> toEvaluator.apply(x)).toArray(EvalOperator.ExpressionEvaluator.Factory[]::new) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index d2f93abb27234..3b66543f4bfd0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -22,8 +23,19 @@ import java.util.function.Function; public class Abs extends UnaryScalarFunction { - @FunctionInfo(returnType = { "double", "integer", "long", "unsigned_long" }, description = "Returns the absolute value.") - public Abs(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "Returns the absolute value.", + examples = { @Example(file = "math", tag = "abs"), @Example(file = "math", tag = "abs-employees") } + ) + public Abs( + Source source, + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n + ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java index d2e0e8f025665..e4982fa69826f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,10 +22,18 @@ * Inverse cosine trigonometric function. */ public class Acos extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "The arccosine of an angle, expressed in radians.") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians.", + examples = { @Example(file = "floats", tag = "acos") } + ) public Acos( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "Number between -1 and 1") Expression n + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Number between -1 and 1. If `null`, the function returns `null`." + ) Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java index 38b70cea0350c..c1c1e72633d6a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,10 +22,19 @@ * Inverse cosine trigonometric function. */ public class Asin extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Inverse sine trigonometric function.") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input\n" + + "numeric expression as an angle, expressed in radians.", + examples = @Example(file = "floats", tag = "asin") + ) public Asin( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "Number between -1 and 1") Expression n + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Number between -1 and 1. If `null`, the function returns `null`." + ) Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java index 071379820922a..6cd3d4b9ffb65 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,10 +22,19 @@ * Inverse cosine trigonometric function. */ public class Atan extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Inverse tangent trigonometric function.") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input\n" + + "numeric expression as an angle, expressed in radians.", + examples = @Example(file = "floats", tag = "atan") + ) public Atan( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "A number") Expression n + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index b69a536c2df84..47a17a90d2d7c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -34,12 +35,22 @@ public class Atan2 extends EsqlScalarFunction { @FunctionInfo( returnType = "double", - description = "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." + description = "The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the\n" + + "origin to the point (x , y) in the Cartesian plane, expressed in radians.", + examples = @Example(file = "floats", tag = "atan2") ) public Atan2( Source source, - @Param(name = "y", type = { "double", "integer", "long", "unsigned_long" }, description = "y coordinate") Expression y, - @Param(name = "x", type = { "double", "integer", "long", "unsigned_long" }, description = "x coordinate") Expression x + @Param( + name = "y_coordinate", + type = { "double", "integer", "long", "unsigned_long" }, + description = "y coordinate. If `null`, the function returns `null`." + ) Expression y, + @Param( + name = "x_coordinate", + type = { "double", "integer", "long", "unsigned_long" }, + description = "x coordinate. If `null`, the function returns `null`." + ) Expression x ) { super(source, List.of(y, x)); this.y = y; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index 635bee26370e2..b9aeff7f1d935 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -7,11 +7,10 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Rounding; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -35,6 +34,7 @@ import java.util.function.Function; import static org.elasticsearch.xpack.esql.expression.Validations.isFoldable; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FOURTH; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; @@ -213,9 +213,7 @@ public void validate(Failures failures) { private long foldToLong(Expression e) { Object value = Foldables.valueOf(e); - return DataTypes.isDateTime(e.dataType()) - ? ((Number) value).longValue() - : DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(BytesRefs.toString(value)); + return DataTypes.isDateTime(e.dataType()) ? ((Number) value).longValue() : dateTimeToLong(((BytesRef) value).utf8ToString()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java index 1c9f42de2f640..60bb904ab4849 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java @@ -11,12 +11,13 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongToDouble; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.intToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; public class Cast { /** @@ -84,15 +85,12 @@ static double castUnsignedLongToDouble(long v) { @Evaluator(extraName = "IntToUnsignedLong") static long castIntToUnsignedLong(int v) { - return castLongToUnsignedLong(v); + return intToUnsignedLong(v); } @Evaluator(extraName = "LongToUnsignedLong") // TODO: catch-to-null in evaluator? static long castLongToUnsignedLong(long v) { - if (v < 0) { - throw new InvalidArgumentException("[" + v + "] out of [unsigned_long] range"); - } - return v; + return longToUnsignedLong(v, false); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java index 36021ba2bfe3d..3ab9b1fc2cb1a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -30,8 +31,21 @@ *

*/ public class Ceil extends UnaryScalarFunction { - @FunctionInfo(returnType = { "double", "integer", "long", "unsigned_long" }, description = "Round a number up to the nearest integer.") - public Ceil(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "Round a number up to the nearest integer.", + note = "This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to " + + "the integer similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil].", + examples = @Example(file = "math", tag = "ceil") + ) + public Ceil( + Source source, + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n + ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java index 0ecc0381636ef..e928f1ae2713e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java @@ -24,7 +24,7 @@ public class Cos extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric cosine of an angle") public Cos( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java index 78d982acc7bb6..55250a3ac720f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java @@ -25,7 +25,7 @@ public class Cosh extends AbstractTrigonometricFunction { public Cosh( Source source, @Param( - name = "n", + name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "The number who's hyperbolic cosine is to be returned" ) Expression n diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java index 9e5efaa611dc9..d4d71bb59ec77 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java @@ -34,7 +34,7 @@ public class Floor extends UnaryScalarFunction { returnType = { "double", "integer", "long", "unsigned_long" }, description = "Round a number down to the nearest integer." ) - public Floor(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + public Floor(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java index 1994d0a1efc3d..f434437e12d5b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java @@ -32,11 +32,11 @@ public class Log extends EsqlScalarFunction implements OptionalArgument { private final Expression base, value; - @FunctionInfo(returnType = "double", description = "Returns the logarithm of a value to a base.") + @FunctionInfo(returnType = "double", description = "Returns the logarithm of a number to a base.") public Log( Source source, @Param(name = "base", type = { "integer", "unsigned_long", "long", "double" }, optional = true) Expression base, - @Param(name = "value", type = { "integer", "unsigned_long", "long", "double" }) Expression value + @Param(name = "number", type = { "integer", "unsigned_long", "long", "double" }) Expression value ) { super(source, value != null ? Arrays.asList(base, value) : Arrays.asList(base)); this.value = value != null ? value : base; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index 1e8df6a07696f..46df37c685cf7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -23,12 +23,13 @@ import java.util.List; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; public class Log10 extends UnaryScalarFunction { @FunctionInfo(returnType = "double", description = "Returns the log base 10.") - public Log10(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + public Log10(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } @@ -74,7 +75,7 @@ static double processUnsignedLong(long val) { if (val == NumericUtils.ZERO_AS_UNSIGNED_LONG) { throw new ArithmeticException("Log of non-positive number"); } - return Math.log10(NumericUtils.unsignedLongToDouble(val)); + return Math.log10(unsignedLongToDouble(val)); } @Evaluator(extraName = "Int", warnExceptions = ArithmeticException.class) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index bdaa2cd4fc433..2edb1c7c3a159 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -29,12 +29,12 @@ import java.util.function.BiFunction; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asUnsignedLong; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class Round extends EsqlScalarFunction implements OptionalArgument { @@ -47,7 +47,7 @@ public class Round extends EsqlScalarFunction implements OptionalArgument { @FunctionInfo(returnType = "double", description = "Rounds a number to the closest number with the specified number of digits.") public Round( Source source, - @Param(name = "value", type = "double", description = "The numeric value to round") Expression field, + @Param(name = "number", type = "double", description = "The numeric value to round") Expression field, @Param( optional = true, name = "decimals", @@ -99,10 +99,9 @@ static long processUnsignedLong(long val, long decimals) { Number ul = unsignedLongAsNumber(val); if (ul instanceof BigInteger bi) { BigInteger rounded = Maths.round(bi, decimals); - BigInteger unsignedLong = asUnsignedLong(rounded); - return asLongUnsigned(unsignedLong); + return bigIntegerToUnsignedLong(rounded); } else { - return asLongUnsigned(Maths.round(ul.longValue(), decimals)); + return longToUnsignedLong(Maths.round(ul.longValue(), decimals), false); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index b3f204cfc09c8..d8b36a3d38856 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -25,7 +25,7 @@ public class Sin extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric sine of an angle") public Sin( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java index 25221043f297d..0c46002f56af6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java @@ -25,7 +25,7 @@ public class Sinh extends AbstractTrigonometricFunction { public Sinh( Source source, @Param( - name = "n", + name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "The number to return the hyperbolic sine of" ) Expression n diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index 35620ff926d41..17882f1baa81d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -18,17 +18,17 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.util.NumericUtils; import java.util.List; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; public class Sqrt extends UnaryScalarFunction { @FunctionInfo(returnType = "double", description = "Returns the square root of a number.") - public Sqrt(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + public Sqrt(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } @@ -71,7 +71,7 @@ static double process(long val) { @Evaluator(extraName = "UnsignedLong") static double processUnsignedLong(long val) { - return Math.sqrt(NumericUtils.unsignedLongToDouble(val)); + return Math.sqrt(unsignedLongToDouble(val)); } @Evaluator(extraName = "Int", warnExceptions = ArithmeticException.class) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java index 528a0ae0a0e71..002de2ddfc277 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java @@ -24,7 +24,7 @@ public class Tan extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric tangent of an angle") public Tan( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java index c77bbaedf91b8..932677ef0b26d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java @@ -25,7 +25,7 @@ public class Tanh extends AbstractTrigonometricFunction { public Tanh( Source source, @Param( - name = "n", + name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "The number to return the hyperbolic tangent of" ) Expression n diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java index 5e9a4e2a75878..5265d5bcad660 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java @@ -23,9 +23,9 @@ import java.util.List; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isRepresentable; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongToDouble; /** * Reduce a multivalued field to a single valued field containing the average value. @@ -35,7 +35,7 @@ public class MvAvg extends AbstractMultivalueFunction { returnType = "double", description = "Converts a multivalued field into a single valued field containing the average of all of the values." ) - public MvAvg(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public MvAvg(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java index 5df0ac03206c4..d88c3fb1c0759 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java @@ -39,7 +39,7 @@ public class MvConcat extends BinaryScalarFunction implements EvaluatorMapper { ) public MvConcat( Source source, - @Param(name = "v", type = { "text", "keyword" }, description = "values to join") Expression field, + @Param(name = "string", type = { "text", "keyword" }, description = "values to join") Expression field, @Param(name = "delim", type = { "text", "keyword" }, description = "delimiter") Expression delim ) { super(source, field, delim); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index 4fa89e66982e4..625e0a120372b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -35,7 +35,7 @@ public class MvCount extends AbstractMultivalueFunction { public MvCount( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "cartesian_point", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index dc5fa0036f789..38b5ab187565a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -32,7 +32,10 @@ public class MvDedupe extends AbstractMultivalueFunction { ) public MvDedupe( Source source, - @Param(name = "v", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }) Expression field + @Param( + name = "field", + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" } + ) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java index 0f6bd847d68ed..1a6efd2924903 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java @@ -54,7 +54,7 @@ public class MvFirst extends AbstractMultivalueFunction { public MvFirst( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "cartesian_point", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java index 2881854d17f6f..dff0108e465cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java @@ -54,7 +54,7 @@ public class MvLast extends AbstractMultivalueFunction { public MvLast( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "cartesian_point", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java index 0b08b99ca0687..b19888f94c6b9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java @@ -36,7 +36,7 @@ public class MvMax extends AbstractMultivalueFunction { public MvMax( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" } ) Expression v ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java index 66a8ec13b4475..8f65d15134cfa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java @@ -26,10 +26,10 @@ import java.util.Arrays; import java.util.List; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToBigInteger; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isRepresentable; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsBigInteger; /** * Reduce a multivalued field to a single valued field containing the average value. @@ -39,7 +39,7 @@ public class MvMedian extends AbstractMultivalueFunction { returnType = { "double", "integer", "long", "unsigned_long" }, description = "Converts a multivalued field into a single valued field containing the median value." ) - public MvMedian(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public MvMedian(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } @@ -156,9 +156,9 @@ static long finishUnsignedLong(Longs longs) { Arrays.sort(longs.values, 0, longs.count); int middle = longs.count / 2; longs.count = 0; - BigInteger a = unsignedLongAsBigInteger(longs.values[middle - 1]); - BigInteger b = unsignedLongAsBigInteger(longs.values[middle]); - return asLongUnsigned(a.add(b).shiftRight(1).longValue()); + BigInteger a = unsignedLongToBigInteger(longs.values[middle - 1]); + BigInteger b = unsignedLongToBigInteger(longs.values[middle]); + return bigIntegerToUnsignedLong(a.add(b).shiftRight(1)); } /** @@ -169,9 +169,9 @@ static long ascendingUnsignedLong(LongBlock values, int firstValue, int count) { if (count % 2 == 1) { return values.getLong(middle); } - BigInteger a = unsignedLongAsBigInteger(values.getLong(middle - 1)); - BigInteger b = unsignedLongAsBigInteger(values.getLong(middle)); - return asLongUnsigned(a.add(b).shiftRight(1).longValue()); + BigInteger a = unsignedLongToBigInteger(values.getLong(middle - 1)); + BigInteger b = unsignedLongToBigInteger(values.getLong(middle)); + return bigIntegerToUnsignedLong(a.add(b).shiftRight(1)); } static class Ints { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java index 91a48e539042f..45eb038616b09 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java @@ -36,7 +36,7 @@ public class MvMin extends AbstractMultivalueFunction { public MvMin( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" } ) Expression field ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index b7868b33102a3..e054fc7e00e24 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -35,6 +35,7 @@ import java.util.Objects; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; @@ -67,7 +68,7 @@ public class MvSlice extends ScalarFunction implements OptionalArgument, Evaluat public MvSlice( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "cartesian_point", @@ -129,8 +130,8 @@ public EvalOperator.ExpressionEvaluator.Factory toEvaluator( Function toEvaluator ) { if (start.foldable() && end.foldable()) { - int startOffset = Integer.parseInt(String.valueOf(start.fold())); - int endOffset = Integer.parseInt(String.valueOf(end.fold())); + int startOffset = stringToInt(String.valueOf(start.fold())); + int endOffset = stringToInt(String.valueOf(end.fold())); checkStartEnd(startOffset, endOffset); } return switch (PlannerUtils.toElementType(field.dataType())) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index 96d7ad905c8d0..ebe23d0d79e7c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -34,7 +34,7 @@ public class MvSum extends AbstractMultivalueFunction { returnType = { "double", "integer", "long", "unsigned_long" }, description = "Converts a multivalued field into a single valued field containing the sum of all of the values." ) - public MvSum(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public MvSum(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java index 6227efeced36e..88e006b1dfd8d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java @@ -48,8 +48,8 @@ public class MvZip extends ScalarFunction implements OptionalArgument, Evaluator ) public MvZip( Source source, - @Param(name = "mvLeft", type = { "keyword", "text" }, description = "A multivalued field") Expression mvLeft, - @Param(name = "mvRight", type = { "keyword", "text" }, description = "A multivalued field") Expression mvRight, + @Param(name = "string1", type = { "keyword", "text" }, description = "A multivalued field") Expression mvLeft, + @Param(name = "string2", type = { "keyword", "text" }, description = "A multivalued field") Expression mvRight, @Param(name = "delim", type = { "keyword", "text" }, description = "delimiter", optional = true) Expression delim ) { super(source, delim == null ? Arrays.asList(mvLeft, mvRight, COMMA) : Arrays.asList(mvLeft, mvRight, delim)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index d4166f8dea5a2..98dc0c7e83d93 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -43,19 +44,21 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { @FunctionInfo( returnType = { "boolean", "text", "integer", "keyword", "long" }, - description = "Returns the first of its arguments that is not null." + description = "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", + examples = { @Example(file = "null", tag = "coalesce") } ) public Coalesce( Source source, @Param( - name = "expression", + name = "first", type = { "boolean", "text", "integer", "keyword", "long" }, description = "Expression to evaluate" ) Expression first, @Param( - name = "expressionX", + name = "rest", type = { "boolean", "text", "integer", "keyword", "long" }, - description = "Other expression to evaluate" + description = "Other expression to evaluate", + optional = true ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index c0caaf8b180ce..f30425158b1b3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -73,7 +73,7 @@ * *
  • * Add your function to {@link org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry}. - * This links it into the language and {@code SHOW FUNCTIONS}. Also add your function to + * This links it into the language and {@code META FUNCTIONS}. Also add your function to * {@link org.elasticsearch.xpack.esql.io.stream.PlanNamedTypes}. This makes your function * serializable over the wire. Mostly you can copy existing implementations for both. *
  • diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java new file mode 100644 index 0000000000000..ccdd68e1806c1 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java @@ -0,0 +1,212 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.common.TriFunction; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.Map; +import java.util.function.Function; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; + +/** + * SpatialRelatesFunction classes, like SpatialIntersects, support various combinations of incoming types, which can be sourced from + * constant literals (foldable), or from the index, which could provide either source values or doc-values. This class is used to + * create the appropriate evaluator for the given combination of types. + * @param + * @param + */ +abstract class SpatialEvaluatorFactory { + protected final TriFunction factoryCreator; + + SpatialEvaluatorFactory(TriFunction factoryCreator) { + this.factoryCreator = factoryCreator; + } + + public abstract EvalOperator.ExpressionEvaluator.Factory get( + SpatialSourceSupplier function, + Function toEvaluator + ); + + public static EvalOperator.ExpressionEvaluator.Factory makeSpatialEvaluator( + SpatialSourceSupplier s, + Map> evaluatorRules, + Function toEvaluator + ) { + var evaluatorKey = new SpatialEvaluatorKey( + s.crsType(), + s.leftDocValues(), + s.rightDocValues(), + fieldKey(s.left()), + fieldKey(s.right()) + ); + SpatialEvaluatorFactory factory = evaluatorRules.get(evaluatorKey); + if (factory == null) { + evaluatorKey = evaluatorKey.swapSides(); + factory = evaluatorRules.get(evaluatorKey); + if (factory == null) { + throw evaluatorKey.unsupported(); + } + return factory.get(new SwappedSpatialSourceSupplier(s), toEvaluator); + } + return factory.get(s, toEvaluator); + } + + protected static SpatialEvaluatorFieldKey fieldKey(Expression expression) { + return new SpatialEvaluatorFieldKey(expression.dataType(), expression.foldable()); + } + + /** + * This interface defines a supplier of the key information needed by the spatial evaluator factories. + * The SpatialRelatesFunction will use this to supply the necessary information to the factories. + * When we need to swap left and right sides around, we can use a SwappableSpatialSourceSupplier. + */ + interface SpatialSourceSupplier { + Source source(); + + Expression left(); + + Expression right(); + + SpatialRelatesFunction.SpatialCrsType crsType(); + + boolean leftDocValues(); + + boolean rightDocValues(); + } + + protected static class SwappedSpatialSourceSupplier implements SpatialSourceSupplier { + private final SpatialSourceSupplier delegate; + + public SwappedSpatialSourceSupplier(SpatialSourceSupplier delegate) { + this.delegate = delegate; + } + + @Override + public Source source() { + return delegate.source(); + } + + @Override + public SpatialRelatesFunction.SpatialCrsType crsType() { + return delegate.crsType(); + } + + @Override + public boolean leftDocValues() { + return delegate.leftDocValues(); + } + + @Override + public boolean rightDocValues() { + return delegate.rightDocValues(); + } + + @Override + public Expression left() { + return delegate.right(); + } + + @Override + public Expression right() { + return delegate.left(); + } + } + + protected static class SpatialEvaluatorFactoryWithFields extends SpatialEvaluatorFactory< + EvalOperator.ExpressionEvaluator.Factory, + EvalOperator.ExpressionEvaluator.Factory> { + SpatialEvaluatorFactoryWithFields( + TriFunction< + Source, + EvalOperator.ExpressionEvaluator.Factory, + EvalOperator.ExpressionEvaluator.Factory, + EvalOperator.ExpressionEvaluator.Factory> factoryCreator + ) { + super(factoryCreator); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory get( + SpatialSourceSupplier s, + Function toEvaluator + ) { + return factoryCreator.apply(s.source(), toEvaluator.apply(s.left()), toEvaluator.apply(s.right())); + } + } + + protected static class SpatialEvaluatorWithConstantFactory extends SpatialEvaluatorFactory< + EvalOperator.ExpressionEvaluator.Factory, + Component2D> { + + SpatialEvaluatorWithConstantFactory( + TriFunction< + Source, + EvalOperator.ExpressionEvaluator.Factory, + Component2D, + EvalOperator.ExpressionEvaluator.Factory> factoryCreator + ) { + super(factoryCreator); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory get( + SpatialSourceSupplier s, + Function toEvaluator + ) { + return factoryCreator.apply(s.source(), toEvaluator.apply(s.left()), asLuceneComponent2D(s.crsType(), s.right())); + } + } + + protected record SpatialEvaluatorFieldKey(DataType dataType, boolean isConstant) {} + + protected record SpatialEvaluatorKey( + SpatialRelatesFunction.SpatialCrsType crsType, + boolean leftDocValues, + boolean rightDocValues, + SpatialEvaluatorFieldKey left, + SpatialEvaluatorFieldKey right + ) { + SpatialEvaluatorKey(SpatialRelatesFunction.SpatialCrsType crsType, SpatialEvaluatorFieldKey left, SpatialEvaluatorFieldKey right) { + this(crsType, false, false, left, right); + } + + SpatialEvaluatorKey withLeftDocValues() { + return new SpatialEvaluatorKey(crsType, true, false, left, right); + } + + SpatialEvaluatorKey swapSides() { + return new SpatialEvaluatorKey(crsType, rightDocValues, leftDocValues, right, left); + } + + static SpatialEvaluatorKey fromSourceAndConstant(DataType left, DataType right) { + return new SpatialEvaluatorKey( + SpatialRelatesFunction.SpatialCrsType.fromDataType(left), + new SpatialEvaluatorFieldKey(left, false), + new SpatialEvaluatorFieldKey(right, true) + ); + } + + static SpatialEvaluatorKey fromSources(DataType left, DataType right) { + return new SpatialEvaluatorKey( + SpatialRelatesFunction.SpatialCrsType.fromDataType(left), + new SpatialEvaluatorFieldKey(left, false), + new SpatialEvaluatorFieldKey(right, false) + ); + } + + UnsupportedOperationException unsupported() { + return new UnsupportedOperationException("Unsupported spatial relation combination: " + this); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java new file mode 100644 index 0000000000000..831c041caaa94 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -0,0 +1,226 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; + +public class SpatialIntersects extends SpatialRelatesFunction { + protected static final SpatialRelations GEO = new SpatialRelations( + ShapeField.QueryRelation.INTERSECTS, + SpatialCoordinateTypes.GEO, + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "ST_Intersects") + ); + protected static final SpatialRelations CARTESIAN = new SpatialRelations( + ShapeField.QueryRelation.INTERSECTS, + SpatialCoordinateTypes.CARTESIAN, + CoordinateEncoder.CARTESIAN, + new CartesianShapeIndexer("ST_Intersects") + ); + + @FunctionInfo(returnType = { "boolean" }, description = "Returns whether the two geometries or geometry columns intersect.") + public SpatialIntersects( + Source source, + @Param( + name = "geomA", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression left, + @Param( + name = "geomB", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression right + ) { + this(source, left, right, false, false); + } + + private SpatialIntersects(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right, leftDocValues, rightDocValues); + } + + @Override + public ShapeField.QueryRelation queryRelation() { + return ShapeField.QueryRelation.INTERSECTS; + } + + @Override + public SpatialIntersects withDocValues(Set attributes) { + // Only update the docValues flags if the field is found in the attributes + boolean leftDV = leftDocValues || foundField(left(), attributes); + boolean rightDV = rightDocValues || foundField(right(), attributes); + return new SpatialIntersects(source(), left(), right(), leftDV, rightDV); + } + + @Override + protected SpatialIntersects replaceChildren(Expression newLeft, Expression newRight) { + return new SpatialIntersects(source(), newLeft, newRight, leftDocValues, rightDocValues); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialIntersects::new, left(), right()); + } + + @Override + public Object fold() { + try { + GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType, left()); + Component2D component2D = asLuceneComponent2D(crsType, right()); + return (crsType == SpatialCrsType.GEO) + ? GEO.geometryRelatesGeometry(docValueReader, component2D) + : CARTESIAN.geometryRelatesGeometry(docValueReader, component2D); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to fold constant fields: " + e.getMessage(), e); + } + } + + @Override + protected Map> evaluatorRules() { + return evaluatorMap; + } + + private static final Map> evaluatorMap = new HashMap<>(); + + static { + // Support geo_point and geo_shape from source and constant combinations + for (DataType spatialType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + for (DataType otherType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields(SpatialIntersectsGeoSourceAndSourceEvaluator.Factory::new) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialIntersectsGeoSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + + // Support cartesian_point and cartesian_shape from source and constant combinations + for (DataType spatialType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + for (DataType otherType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialIntersectsCartesianSourceAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialIntersectsCartesianSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + } + + @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return GEO.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return GEO.pointRelatesGeometry(leftValue, geometry); + } + + @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return CARTESIAN.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndSource") + static boolean processCartesianPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return CARTESIAN.pointRelatesGeometry(leftValue, geometry); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java new file mode 100644 index 0000000000000..cdd21682d0db7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -0,0 +1,297 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.index.mapper.ShapeIndexer; +import org.elasticsearch.lucene.spatial.Component2DVisitor; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.function.scalar.BinaryScalarFunction; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; + +import static org.apache.lucene.document.ShapeField.QueryRelation.DISJOINT; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatial; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.ql.type.DataTypes.isNull; + +public abstract class SpatialRelatesFunction extends BinaryScalarFunction + implements + EvaluatorMapper, + SpatialEvaluatorFactory.SpatialSourceSupplier { + protected SpatialCrsType crsType; + protected final boolean leftDocValues; + protected final boolean rightDocValues; + + protected SpatialRelatesFunction(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right); + this.leftDocValues = leftDocValues; + this.rightDocValues = rightDocValues; + } + + public abstract ShapeField.QueryRelation queryRelation(); + + @Override + public DataType dataType() { + return DataTypes.BOOLEAN; + } + + @Override + public SpatialCrsType crsType() { + if (crsType == null) { + resolveType(); + } + return crsType; + } + + @Override + protected TypeResolution resolveType() { + if (left().foldable() && right().foldable() == false || isNull(left().dataType())) { + // Left is literal, but right is not, check the left field's type against the right field + return resolveType(right(), left(), SECOND, FIRST); + } else { + // All other cases check the right against the left + return resolveType(left(), right(), FIRST, SECOND); + } + } + + private TypeResolution resolveType( + Expression leftExpression, + Expression rightExpression, + TypeResolutions.ParamOrdinal leftOrdinal, + TypeResolutions.ParamOrdinal rightOrdinal + ) { + TypeResolution leftResolution = isSpatial(leftExpression, sourceText(), leftOrdinal); + TypeResolution rightResolution = isSpatial(rightExpression, sourceText(), rightOrdinal); + if (leftResolution.resolved()) { + return resolveType(leftExpression, rightExpression, rightOrdinal); + } else if (rightResolution.resolved()) { + return resolveType(rightExpression, leftExpression, leftOrdinal); + } else { + return leftResolution; + } + } + + protected TypeResolution resolveType( + Expression spatialExpression, + Expression otherExpression, + TypeResolutions.ParamOrdinal otherParamOrdinal + ) { + if (isNull(spatialExpression.dataType())) { + return isSpatial(otherExpression, sourceText(), otherParamOrdinal); + } + TypeResolution resolution = isSameSpatialType(spatialExpression.dataType(), otherExpression, sourceText(), otherParamOrdinal); + if (resolution.unresolved()) { + return resolution; + } + crsType = SpatialCrsType.fromDataType(spatialExpression.dataType()); + return TypeResolution.TYPE_RESOLVED; + } + + public static TypeResolution isSameSpatialType( + DataType spatialDataType, + Expression expression, + String operationName, + TypeResolutions.ParamOrdinal paramOrd + ) { + return isType( + expression, + dt -> EsqlDataTypes.isSpatial(dt) && spatialCRSCompatible(spatialDataType, dt), + operationName, + paramOrd, + compatibleTypeNames(spatialDataType) + ); + } + + private static final String[] GEO_TYPE_NAMES = new String[] { GEO_POINT.typeName(), GEO_SHAPE.typeName() }; + private static final String[] CARTESIAN_TYPE_NAMES = new String[] { GEO_POINT.typeName(), GEO_SHAPE.typeName() }; + + private static boolean spatialCRSCompatible(DataType spatialDataType, DataType otherDataType) { + return EsqlDataTypes.isSpatialGeo(spatialDataType) && EsqlDataTypes.isSpatialGeo(otherDataType) + || EsqlDataTypes.isSpatialGeo(spatialDataType) == false && EsqlDataTypes.isSpatialGeo(otherDataType) == false; + } + + static String[] compatibleTypeNames(DataType spatialDataType) { + return EsqlDataTypes.isSpatialGeo(spatialDataType) ? GEO_TYPE_NAMES : CARTESIAN_TYPE_NAMES; + } + + @Override + public boolean foldable() { + return left().foldable() && right().foldable(); + } + + /** + * Mark the function as expecting the specified fields to arrive as doc-values. + */ + public abstract SpatialRelatesFunction withDocValues(Set attributes); + + /** + * Push-down to Lucene is only possible if one field is an indexed spatial field, and the other is a constant spatial or string column. + */ + public boolean canPushToSource(Predicate isAggregatable) { + // The use of foldable here instead of SpatialEvaluatorFieldKey.isConstant is intentional to match the behavior of the + // Lucene pushdown code in EsqlTranslationHandler::SpatialRelatesTranslator + // We could enhance both places to support ReferenceAttributes that refer to constants, but that is a larger change + return isPushableFieldAttribute(left(), isAggregatable) && right().foldable() + || isPushableFieldAttribute(right(), isAggregatable) && left().foldable(); + } + + private static boolean isPushableFieldAttribute(Expression exp, Predicate isAggregatable) { + return exp instanceof FieldAttribute fa + && fa.getExactInfo().hasExact() + && isAggregatable.test(fa) + && EsqlDataTypes.isSpatial(fa.dataType()); + } + + @Override + public int hashCode() { + // NB: the hashcode is currently used for key generation so + // to avoid clashes between aggs with the same arguments, add the class name as variation + return Objects.hash(getClass(), children(), leftDocValues, rightDocValues); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + SpatialRelatesFunction other = (SpatialRelatesFunction) obj; + return Objects.equals(other.children(), children()) + && Objects.equals(other.leftDocValues, leftDocValues) + && Objects.equals(other.rightDocValues, rightDocValues); + } + return false; + } + + public boolean leftDocValues() { + return leftDocValues; + } + + public boolean rightDocValues() { + return rightDocValues; + } + + /** + * Produce a map of rules defining combinations of incoming types to the evaluator factory that should be used. + */ + protected abstract Map> evaluatorRules(); + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + return SpatialEvaluatorFactory.makeSpatialEvaluator(this, evaluatorRules(), toEvaluator); + } + + /** + * When performing local physical plan optimization, it is necessary to know if this function has a field attribute. + * This is because the planner might push down a spatial aggregation to lucene, which results in the field being provided + * as doc-values instead of source values, and this function needs to know if it should use doc-values or not. + */ + public boolean hasFieldAttribute(Set foundAttributes) { + return foundField(left(), foundAttributes) || foundField(right(), foundAttributes); + } + + protected boolean foundField(Expression expression, Set foundAttributes) { + return expression instanceof FieldAttribute field && foundAttributes.contains(field); + } + + protected enum SpatialCrsType { + GEO, + CARTESIAN, + UNSPECIFIED; + + public static SpatialCrsType fromDataType(DataType dataType) { + return EsqlDataTypes.isSpatialGeo(dataType) ? SpatialCrsType.GEO + : EsqlDataTypes.isSpatial(dataType) ? SpatialCrsType.CARTESIAN + : SpatialCrsType.UNSPECIFIED; + } + } + + protected static class SpatialRelations { + protected final ShapeField.QueryRelation queryRelation; + protected final SpatialCoordinateTypes spatialCoordinateType; + protected final CoordinateEncoder coordinateEncoder; + protected final ShapeIndexer shapeIndexer; + protected final SpatialCrsType crsType; + + protected SpatialRelations( + ShapeField.QueryRelation queryRelation, + SpatialCoordinateTypes spatialCoordinateType, + CoordinateEncoder encoder, + ShapeIndexer shapeIndexer + ) { + this.queryRelation = queryRelation; + this.spatialCoordinateType = spatialCoordinateType; + this.coordinateEncoder = encoder; + this.shapeIndexer = shapeIndexer; + this.crsType = spatialCoordinateType.equals(SpatialCoordinateTypes.GEO) ? SpatialCrsType.GEO : SpatialCrsType.CARTESIAN; + } + + protected boolean geometryRelatesGeometry(BytesRef left, BytesRef right) throws IOException { + Component2D rightComponent2D = asLuceneComponent2D(crsType, fromBytesRef(right)); + return geometryRelatesGeometry(left, rightComponent2D); + } + + private Geometry fromBytesRef(BytesRef bytesRef) { + return SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(bytesRef); + } + + protected boolean geometryRelatesGeometry(BytesRef left, Component2D rightComponent2D) throws IOException { + Geometry leftGeom = fromBytesRef(left); + // We already have a Component2D for the right geometry, so we need to convert the left geometry to a doc-values byte array + return geometryRelatesGeometry(asGeometryDocValueReader(coordinateEncoder, shapeIndexer, leftGeom), rightComponent2D); + } + + protected boolean geometryRelatesGeometry(GeometryDocValueReader reader, Component2D rightComponent2D) throws IOException { + var visitor = Component2DVisitor.getVisitor(rightComponent2D, queryRelation, coordinateEncoder); + reader.visit(visitor); + return visitor.matches(); + } + + protected boolean pointRelatesGeometry(long encoded, Geometry geometry) { + Component2D component2D = asLuceneComponent2D(crsType, geometry); + return pointRelatesGeometry(encoded, component2D); + } + + protected boolean pointRelatesGeometry(long encoded, Component2D component2D) { + // This code path exists for doc-values points, and we could consider re-using the point class to reduce garbage creation + Point point = spatialCoordinateType.longAsPoint(encoded); + return geometryRelatesPoint(component2D, point); + } + + private boolean geometryRelatesPoint(Component2D component2D, Point point) { + boolean contains = component2D.contains(point.getX(), point.getY()); + return queryRelation == DISJOINT ? contains == false : contains; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java new file mode 100644 index 0000000000000..e088dbf7a70ec --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.geo.LatLonGeometry; +import org.apache.lucene.geo.XYGeometry; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.LuceneGeometriesUtils; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.geometry.Circle; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.ShapeType; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.index.mapper.ShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CentroidCalculator; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.lucene.spatial.GeometryDocValueWriter; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; + +import static org.elasticsearch.xpack.ql.planner.ExpressionTranslators.valueOf; + +public class SpatialRelatesUtils { + + /** + * This function is used to convert a spatial constant to a lucene Component2D. + * When both left and right sides are constants, we convert the left to a doc-values byte array and the right to a Component2D. + */ + static Component2D asLuceneComponent2D(SpatialRelatesFunction.SpatialCrsType crsType, Expression expression) { + return asLuceneComponent2D(crsType, makeGeometryFromLiteral(expression)); + } + + static Component2D asLuceneComponent2D(SpatialRelatesFunction.SpatialCrsType crsType, Geometry geometry) { + if (crsType == SpatialRelatesFunction.SpatialCrsType.GEO) { + var luceneGeometries = LuceneGeometriesUtils.toLatLonGeometry(geometry, true, t -> {}); + return LatLonGeometry.create(luceneGeometries); + } else { + var luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); + return XYGeometry.create(luceneGeometries); + } + } + + /** + * This function is used to convert a spatial constant to a doc-values byte array. + * When both left and right sides are constants, we convert the left to a doc-values byte array and the right to a Component2D. + */ + static GeometryDocValueReader asGeometryDocValueReader(SpatialRelatesFunction.SpatialCrsType crsType, Expression expression) + throws IOException { + Geometry geometry = makeGeometryFromLiteral(expression); + if (crsType == SpatialRelatesFunction.SpatialCrsType.GEO) { + return asGeometryDocValueReader( + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "SpatialRelatesFunction"), + geometry + ); + } else { + return asGeometryDocValueReader(CoordinateEncoder.CARTESIAN, new CartesianShapeIndexer("SpatialRelatesFunction"), geometry); + } + + } + + /** + * Converting shapes into doc-values byte arrays is needed under two situations: + * - If both left and right are constants, we convert the right to Component2D and the left to doc-values for comparison + * - If the right is a constant and no lucene push-down was possible, we get WKB in the left and convert it to doc-values for comparison + */ + static GeometryDocValueReader asGeometryDocValueReader(CoordinateEncoder encoder, ShapeIndexer shapeIndexer, Geometry geometry) + throws IOException { + GeometryDocValueReader reader = new GeometryDocValueReader(); + CentroidCalculator centroidCalculator = new CentroidCalculator(); + if (geometry instanceof Circle) { + // Both the centroid calculator and the shape indexer do not support circles + throw new IllegalArgumentException(ShapeType.CIRCLE + " geometry is not supported"); + } + centroidCalculator.add(geometry); + reader.reset(GeometryDocValueWriter.write(shapeIndexer.indexShape(geometry), encoder, centroidCalculator)); + return reader; + } + + /** + * This function is used in two places, when evaluating a spatial constant in the SpatialRelatesFunction, as well as when + * we do lucene-pushdown of spatial functions. + */ + public static Geometry makeGeometryFromLiteral(Expression expr) { + Object value = valueOf(expr); + + if (value instanceof BytesRef bytesRef) { + return SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(bytesRef); + } else { + throw new IllegalArgumentException( + "Unsupported combination of literal [" + value.getClass().getSimpleName() + "] of type [" + expr.dataType() + "]" + ); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java new file mode 100644 index 0000000000000..7dac02e50ddbc --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.util.automaton.Transition; +import org.apache.lucene.util.automaton.UTF32ToUTF8; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * Matches {@link BytesRef}s against {@link Automaton automata}. + */ +public class AutomataMatch { + /** + * Build an {@link EvalOperator.ExpressionEvaluator.Factory} that will match + * {@link BytesRef}s against {@link Automaton automata} and return a {@link BooleanBlock}. + */ + public static EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Source source, + EvalOperator.ExpressionEvaluator.Factory field, + Automaton utf32Automaton + ) { + /* + * ByteRunAutomaton has a way to convert utf32 to utf8, but if we used it + * we couldn't get a nice toDot - so we call UTF32ToUTF8 ourselves. + */ + Automaton automaton = Operations.determinize(new UTF32ToUTF8().convert(utf32Automaton), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + ByteRunAutomaton run = new ByteRunAutomaton(automaton, true, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return new AutomataMatchEvaluator.Factory(source, field, run, toDot(automaton)); + } + + @Evaluator + static boolean process(BytesRef input, @Fixed(includeInToString = false) ByteRunAutomaton automaton, @Fixed String pattern) { + if (input == null) { + return false; + } + return automaton.run(input.bytes, input.offset, input.length); + } + + private static final int MAX_LENGTH = 1024 * 64; + + /** + * Convert an {@link Automaton} to dot. + *

    + * This was borrowed from {@link Automaton#toDot} but has been modified to snip if the length + * grows too much and to format the bytes differently. + *

    + */ + public static String toDot(Automaton automaton) { + StringBuilder b = new StringBuilder(); + b.append("digraph Automaton {\n"); + b.append(" rankdir = LR\n"); + b.append(" node [width=0.2, height=0.2, fontsize=8]\n"); + int numStates = automaton.getNumStates(); + if (numStates > 0) { + b.append(" initial [shape=plaintext,label=\"\"]\n"); + b.append(" initial -> 0\n"); + } + + Transition t = new Transition(); + + too_big: for (int state = 0; state < numStates; ++state) { + b.append(" "); + b.append(state); + if (automaton.isAccept(state)) { + b.append(" [shape=doublecircle,label=\"").append(state).append("\"]\n"); + } else { + b.append(" [shape=circle,label=\"").append(state).append("\"]\n"); + } + + int numTransitions = automaton.initTransition(state, t); + + for (int i = 0; i < numTransitions; ++i) { + automaton.getNextTransition(t); + + assert t.max >= t.min; + + b.append(" "); + b.append(state); + b.append(" -> "); + b.append(t.dest); + b.append(" [label=\""); + appendByte(t.min, b); + if (t.max != t.min) { + b.append('-'); + appendByte(t.max, b); + } + + b.append("\"]\n"); + if (b.length() >= MAX_LENGTH) { + b.append("...snip..."); + break too_big; + } + } + } + + b.append('}'); + return b.toString(); + } + + static void appendByte(int c, StringBuilder b) { + if (c > 255) { + throw new UnsupportedOperationException("can only format bytes but got [" + c + "]"); + } + if (c == 34) { + b.append("\\\""); + return; + } + if (c == 92) { + b.append("\\\\"); + return; + } + if (c >= 33 && c <= 126) { + b.appendCodePoint(c); + return; + } + b.append("0x"); + String hex = Integer.toHexString(c); + switch (hex.length()) { + case 1 -> b.append('0').append(hex); + case 2 -> b.append(hex); + default -> throw new UnsupportedOperationException("can only format bytes"); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index 9cf38d3f1d960..a9f44eece2411 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -41,8 +41,8 @@ public class Concat extends EsqlScalarFunction { @FunctionInfo(returnType = "keyword", description = "Concatenates two or more strings.") public Concat( Source source, - @Param(name = "first", type = { "keyword", "text" }) Expression first, - @Param(name = "rest", type = { "keyword", "text" }) List rest + @Param(name = "string1", type = { "keyword", "text" }) Expression first, + @Param(name = "string2", type = { "keyword", "text" }) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java index 7582f0cd361f6..8185cbd7eb85e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java @@ -29,7 +29,7 @@ */ public class LTrim extends UnaryScalarFunction { @FunctionInfo(returnType = { "keyword", "text" }, description = "Removes leading whitespaces from a string.") - public LTrim(Source source, @Param(name = "str", type = { "keyword", "text" }) Expression str) { + public LTrim(Source source, @Param(name = "string", type = { "keyword", "text" }) Expression str) { super(source, str); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 229bb665a6772..af3f25ee85408 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -43,12 +44,13 @@ public class Left extends EsqlScalarFunction { @FunctionInfo( returnType = "keyword", - description = "Return the substring that extracts length chars from the string starting from the left." + description = "Returns the substring that extracts 'length' chars from 'string' starting from the left.", + examples = { @Example(file = "string", tag = "left") } ) public Left( Source source, - @Param(name = "str", type = { "keyword", "text" }) Expression str, - @Param(name = "length", type = { "integer" }) Expression length + @Param(name = "string", type = { "keyword", "text" }, description = "The string from which to return a substring.") Expression str, + @Param(name = "length", type = { "integer" }, description = "The number of characters to return.") Expression length ) { super(source, Arrays.asList(str, length)); this.source = source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java index 8dc89a458e3be..759e6db436eda 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java @@ -29,7 +29,7 @@ public class Length extends UnaryScalarFunction { @FunctionInfo(returnType = "integer", description = "Returns the character length of a string.") - public Length(Source source, @Param(name = "str", type = { "keyword", "text" }) Expression field) { + public Length(Source source, @Param(name = "string", type = { "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java similarity index 72% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RLike.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java index 510c9b7098926..3fe4b92ca8f25 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java @@ -5,17 +5,21 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; +package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import java.util.function.Function; + import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -public class RLike extends org.elasticsearch.xpack.ql.expression.predicate.regex.RLike { +public class RLike extends org.elasticsearch.xpack.ql.expression.predicate.regex.RLike implements EvaluatorMapper { public RLike(Source source, Expression value, RLikePattern pattern) { super(source, value, pattern); } @@ -38,4 +42,11 @@ protected RLike replaceChild(Expression newChild) { protected TypeResolution resolveType() { return isString(field(), sourceText(), DEFAULT); } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + return AutomataMatch.toEvaluator(source(), toEvaluator.apply(field()), pattern().createAutomaton()); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java index 857d2765bcfe4..ce93f88f71990 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java @@ -29,7 +29,7 @@ */ public class RTrim extends UnaryScalarFunction { @FunctionInfo(returnType = { "keyword", "text" }, description = "Removes trailing whitespaces from a string.") - public RTrim(Source source, @Param(name = "str", type = { "keyword", "text" }) Expression str) { + public RTrim(Source source, @Param(name = "string", type = { "keyword", "text" }) Expression str) { super(source, str); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java index ed368068694e2..0ed12abcc0a83 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java @@ -43,9 +43,9 @@ public class Replace extends EsqlScalarFunction { ) public Replace( Source source, - @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "string", type = { "keyword", "text" }) Expression str, @Param(name = "regex", type = { "keyword", "text" }) Expression regex, - @Param(name = "newStr", type = { "keyword", "text" }) Expression newStr + @Param(name = "newString", type = { "keyword", "text" }) Expression newStr ) { super(source, Arrays.asList(str, regex, newStr)); this.str = str; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java index 0e3e78e8188ab..13087dbd32916 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java @@ -47,7 +47,7 @@ public class Right extends EsqlScalarFunction { ) public Right( Source source, - @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "string", type = { "keyword", "text" }) Expression str, @Param(name = "length", type = { "integer" }) Expression length ) { super(source, Arrays.asList(str, length)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 66d50aa4df061..611fc9947d3db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -25,10 +25,9 @@ import java.util.function.Function; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; /** * Splits a string on some delimiter into a multivalued string field. @@ -37,7 +36,7 @@ public class Split extends BinaryScalarFunction implements EvaluatorMapper { @FunctionInfo(returnType = "keyword", description = "Split a single valued string into multiple strings.") public Split( Source source, - @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "string", type = { "keyword", "text" }) Expression str, @Param(name = "delim", type = { "keyword", "text" }) Expression delim ) { super(source, str, delim); @@ -59,7 +58,7 @@ protected TypeResolution resolveType() { return resolution; } - return isString(right(), sourceText(), SECOND); + return isStringAndExact(right(), sourceText(), SECOND); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 36d24fef03006..88187b8ba65bc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -41,7 +41,7 @@ public class Substring extends EsqlScalarFunction implements OptionalArgument { ) public Substring( Source source, - @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "string", type = { "keyword", "text" }) Expression str, @Param(name = "start", type = { "integer" }) Expression start, @Param(optional = true, name = "length", type = { "integer" }) Expression length ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java index 1efb966bf9ce2..6d71eb9fd4baf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java @@ -29,7 +29,7 @@ */ public final class Trim extends UnaryScalarFunction { @FunctionInfo(returnType = { "keyword", "text" }, description = "Removes leading and trailing whitespaces from a string.") - public Trim(Source source, @Param(name = "str", type = { "keyword", "text" }) Expression str) { + public Trim(Source source, @Param(name = "string", type = { "keyword", "text" }) Expression str) { super(source, str); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/WildcardLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java similarity index 62% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/WildcardLike.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java index f7039bb95d1b2..00d5cc7b439fa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/WildcardLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java @@ -5,17 +5,22 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; +package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import org.apache.lucene.util.automaton.Automata; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import java.util.function.Function; + import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -public class WildcardLike extends org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardLike { +public class WildcardLike extends org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardLike implements EvaluatorMapper { public WildcardLike(Source source, Expression left, WildcardPattern pattern) { super(source, left, pattern, false); } @@ -34,4 +39,16 @@ protected WildcardLike replaceChild(Expression newLeft) { protected TypeResolution resolveType() { return isString(field(), sourceText(), DEFAULT); } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + return AutomataMatch.toEvaluator( + source(), + toEvaluator.apply(field()), + // The empty pattern will accept the empty string + pattern().pattern().length() == 0 ? Automata.makeEmptyString() : pattern().createAutomaton() + ); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java index 42fd526cb3b99..170e3de6e4209 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.ql.type.DataType; import static org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation.OperationSymbol.DIV; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; public class Div extends EsqlArithmeticOperation implements BinaryComparisonInversible { @@ -73,7 +73,7 @@ static long processLongs(long lhs, long rhs) { @Evaluator(extraName = "UnsignedLongs", warnExceptions = { ArithmeticException.class }) static long processUnsignedLongs(long lhs, long rhs) { - return asLongUnsigned(Long.divideUnsigned(asLongUnsigned(lhs), asLongUnsigned(rhs))); + return longToUnsignedLong(Long.divideUnsigned(longToUnsignedLong(lhs, true), longToUnsignedLong(rhs, true)), true); } @Evaluator(extraName = "Doubles") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java index 41a396819a7f2..bc1ad8fcb5f94 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.ql.tree.Source; import static org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation.OperationSymbol.MOD; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; public class Mod extends EsqlArithmeticOperation { @@ -52,7 +52,7 @@ static long processLongs(long lhs, long rhs) { @Evaluator(extraName = "UnsignedLongs", warnExceptions = { ArithmeticException.class }) static long processUnsignedLongs(long lhs, long rhs) { - return asLongUnsigned(Long.remainderUnsigned(asLongUnsigned(lhs), asLongUnsigned(rhs))); + return longToUnsignedLong(Long.remainderUnsigned(longToUnsignedLong(lhs, true), longToUnsignedLong(rhs, true)), true); } @Evaluator(extraName = "Doubles") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java index b20160ac936d6..ab2f9079b610c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java @@ -7,10 +7,10 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; +import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.InProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -67,7 +67,7 @@ protected boolean areCompatible(DataType left, DataType right) { @Override protected TypeResolution resolveType() { // TODO: move the foldability check from QL's In to SQL's and remove this method - TypeResolution resolution = TypeResolutions.isExact(value(), functionName(), DEFAULT); + TypeResolution resolution = EsqlTypeResolutions.isExact(value(), functionName(), DEFAULT); if (resolution.unresolved()) { return resolution; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 96a1ce9ed715e..d198d740029e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -26,8 +26,6 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.WildcardLike; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.aggregate.Avg; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; @@ -39,6 +37,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; @@ -102,6 +101,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; @@ -109,6 +109,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; @@ -118,6 +119,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; @@ -207,6 +209,7 @@ import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.Entry.of; import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; @@ -387,6 +390,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, Pow.class, PlanNamedTypes::writePow, PlanNamedTypes::readPow), of(ScalarFunction.class, StartsWith.class, PlanNamedTypes::writeStartsWith, PlanNamedTypes::readStartsWith), of(ScalarFunction.class, EndsWith.class, PlanNamedTypes::writeEndsWith, PlanNamedTypes::readEndsWith), + of(ScalarFunction.class, SpatialIntersects.class, PlanNamedTypes::writeIntersects, PlanNamedTypes::readIntersects), of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring), of(ScalarFunction.class, Left.class, PlanNamedTypes::writeLeft, PlanNamedTypes::readLeft), of(ScalarFunction.class, Right.class, PlanNamedTypes::writeRight, PlanNamedTypes::readRight), @@ -412,6 +416,7 @@ public static List namedTypeEntries() { of(AggregateFunction.class, Percentile.class, PlanNamedTypes::writePercentile, PlanNamedTypes::readPercentile), of(AggregateFunction.class, SpatialCentroid.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), of(AggregateFunction.class, Sum.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), + of(AggregateFunction.class, Values.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), // Multivalue functions of(ScalarFunction.class, MvAvg.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), of(ScalarFunction.class, MvCount.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), @@ -992,7 +997,7 @@ static void writeFieldAttribute(PlanStreamOutput out, FieldAttribute fileAttribu out.writeNamed(EsField.class, fileAttribute.field()); out.writeOptionalString(fileAttribute.qualifier()); out.writeEnum(fileAttribute.nullable()); - out.writeLong(Long.parseLong(fileAttribute.id().toString())); + out.writeLong(stringToLong(fileAttribute.id().toString())); out.writeBoolean(fileAttribute.synthetic()); } @@ -1014,7 +1019,7 @@ static void writeReferenceAttr(PlanStreamOutput out, ReferenceAttribute referenc out.writeString(referenceAttribute.dataType().typeName()); out.writeOptionalString(referenceAttribute.qualifier()); out.writeEnum(referenceAttribute.nullable()); - out.writeLong(Long.parseLong(referenceAttribute.id().toString())); + out.writeLong(stringToLong(referenceAttribute.id().toString())); out.writeBoolean(referenceAttribute.synthetic()); } @@ -1037,7 +1042,7 @@ static void writeMetadataAttr(PlanStreamOutput out, MetadataAttribute metadataAt out.writeString(metadataAttribute.dataType().typeName()); out.writeOptionalString(metadataAttribute.qualifier()); out.writeEnum(metadataAttribute.nullable()); - out.writeLong(Long.parseLong(metadataAttribute.id().toString())); + out.writeLong(stringToLong(metadataAttribute.id().toString())); out.writeBoolean(metadataAttribute.synthetic()); out.writeBoolean(metadataAttribute.searchable()); } @@ -1057,7 +1062,7 @@ static void writeUnsupportedAttr(PlanStreamOutput out, UnsupportedAttribute unsu out.writeString(unsupportedAttribute.name()); writeUnsupportedEsField(out, unsupportedAttribute.field()); out.writeOptionalString(unsupportedAttribute.hasCustomMessage() ? unsupportedAttribute.unresolvedMessage() : null); - out.writeLong(Long.parseLong(unsupportedAttribute.id().toString())); + out.writeLong(stringToLong(unsupportedAttribute.id().toString())); } // -- EsFields @@ -1470,6 +1475,17 @@ static void writeDateTrunc(PlanStreamOutput out, DateTrunc dateTrunc) throws IOE out.writeExpression(fields.get(1)); } + static SpatialIntersects readIntersects(PlanStreamInput in) throws IOException { + return new SpatialIntersects(Source.EMPTY, in.readExpression(), in.readExpression()); + } + + static void writeIntersects(PlanStreamOutput out, SpatialIntersects intersects) throws IOException { + List fields = intersects.children(); + assert fields.size() == 2; + out.writeExpression(fields.get(0)); + out.writeExpression(fields.get(1)); + } + static Now readNow(PlanStreamInput in) throws IOException { return new Now(in.readSource(), in.configuration()); } @@ -1657,7 +1673,8 @@ static void writeArithmeticOperation(PlanStreamOutput out, ArithmeticOperation a entry(name(Max.class), Max::new), entry(name(Median.class), Median::new), entry(name(MedianAbsoluteDeviation.class), MedianAbsoluteDeviation::new), - entry(name(SpatialCentroid.class), SpatialCentroid::new) + entry(name(SpatialCentroid.class), SpatialCentroid::new), + entry(name(Values.class), Values::new) ); static AggregateFunction readAggFunction(PlanStreamInput in, String name) throws IOException { @@ -1719,7 +1736,7 @@ static void writeAlias(PlanStreamOutput out, Alias alias) throws IOException { out.writeString(alias.name()); out.writeOptionalString(alias.qualifier()); out.writeExpression(alias.child()); - out.writeLong(Long.parseLong(alias.id().toString())); + out.writeLong(stringToLong(alias.id().toString())); out.writeBoolean(alias.synthetic()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index 546f34d1b474c..50c893f18b15e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerRules.OptimizerRule; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; @@ -24,6 +25,7 @@ import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; import org.elasticsearch.xpack.esql.plan.physical.EsTimeseriesQueryExec; +import org.elasticsearch.xpack.esql.plan.physical.EvalExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.FilterExec; @@ -269,6 +271,8 @@ public static boolean canPushToSource(Expression exp, Predicate } else if (exp instanceof CIDRMatch cidrMatch) { return isAttributePushable(cidrMatch.ipField(), cidrMatch, hasIdenticalDelegate) && Expressions.foldable(cidrMatch.matches()); + } else if (exp instanceof SpatialRelatesFunction bc) { + return bc.canPushToSource(LocalPhysicalPlanOptimizer::isAggregatable); } return false; } @@ -453,7 +457,7 @@ public static boolean isPushableFieldAttribute(Expression exp, Predicate { @Override protected PhysicalPlan rule(AggregateExec aggregate) { - var foundAttributes = new HashSet(); + var foundAttributes = new HashSet(); PhysicalPlan plan = aggregate.transformDown(UnaryExec.class, exec -> { if (exec instanceof AggregateExec agg) { @@ -461,7 +465,8 @@ protected PhysicalPlan rule(AggregateExec aggregate) { var changedAggregates = false; for (NamedExpression aggExpr : agg.aggregates()) { if (aggExpr instanceof Alias as && as.child() instanceof SpatialAggregateFunction af) { - if (af.field() instanceof FieldAttribute fieldAttribute) { + if (af.field() instanceof FieldAttribute fieldAttribute + && allowedForDocValues(fieldAttribute, agg, foundAttributes)) { // We need to both mark the field to load differently, and change the spatial function to know to use it foundAttributes.add(fieldAttribute); changedAggregates = true; @@ -484,6 +489,36 @@ protected PhysicalPlan rule(AggregateExec aggregate) { ); } } + if (exec instanceof EvalExec evalExec) { + List fields = evalExec.fields(); + List changed = fields.stream() + .map( + f -> (Alias) f.transformDown( + SpatialRelatesFunction.class, + spatialRelatesFunction -> (spatialRelatesFunction.hasFieldAttribute(foundAttributes)) + ? spatialRelatesFunction.withDocValues(foundAttributes) + : spatialRelatesFunction + ) + ) + .toList(); + if (changed.equals(fields) == false) { + exec = new EvalExec(exec.source(), exec.child(), changed); + } + } + if (exec instanceof FilterExec filterExec) { + // Note that ST_CENTROID does not support shapes, but SpatialRelatesFunction does, so when we extend the centroid + // to support shapes, we need to consider loading shape doc-values for both centroid and relates (ST_INTERSECTS) + var condition = filterExec.condition() + .transformDown( + SpatialRelatesFunction.class, + spatialRelatesFunction -> (spatialRelatesFunction.hasFieldAttribute(foundAttributes)) + ? spatialRelatesFunction.withDocValues(foundAttributes) + : spatialRelatesFunction + ); + if (filterExec.condition().equals(condition) == false) { + exec = new FilterExec(filterExec.source(), filterExec.child(), condition); + } + } if (exec instanceof FieldExtractExec fieldExtractExec) { // Tell the field extractor that it should extract the field from doc-values instead of source values var attributesToExtract = fieldExtractExec.attributesToExtract(); @@ -501,5 +536,24 @@ protected PhysicalPlan rule(AggregateExec aggregate) { }); return plan; } + + /** + * This function disallows the use of more than one field for doc-values extraction in the same spatial relation function. + * This is because comparing two doc-values fields is not supported in the current implementation. + */ + private boolean allowedForDocValues(FieldAttribute fieldAttribute, AggregateExec agg, Set foundAttributes) { + var candidateDocValuesAttributes = new HashSet<>(foundAttributes); + candidateDocValuesAttributes.add(fieldAttribute); + var spatialRelatesAttributes = new HashSet(); + agg.forEachExpressionDown(SpatialRelatesFunction.class, relatesFunction -> { + candidateDocValuesAttributes.forEach(candidate -> { + if (relatesFunction.hasFieldAttribute(Set.of(candidate))) { + spatialRelatesAttributes.add(candidate); + } + }); + }); + // Disallow more than one spatial field to be extracted using doc-values (for now) + return spatialRelatesAttributes.size() < 2; + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 59f0d46bf618a..3425306863585 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.AttributeSet; +import org.elasticsearch.xpack.ql.expression.EmptyAttribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.ExpressionSet; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -46,7 +47,6 @@ import org.elasticsearch.xpack.ql.expression.predicate.logical.Or; import org.elasticsearch.xpack.ql.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules; -import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.BooleanFunctionEqualsElimination; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.ConstantFolding; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.LiteralsOnTheRight; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.PruneLiteralsInOrderBy; @@ -69,7 +69,6 @@ import org.elasticsearch.xpack.ql.util.Holder; import org.elasticsearch.xpack.ql.util.StringUtils; -import java.time.ZoneId; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -83,7 +82,6 @@ import static java.util.Collections.singleton; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; import static org.elasticsearch.xpack.ql.expression.Expressions.asAttributes; -import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.PropagateEquals; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.DOWN; @@ -110,6 +108,23 @@ protected List> batches() { return rules(); } + protected static Batch substitutions() { + return new Batch<>( + "Substitutions", + Limiter.ONCE, + // first extract nested aggs top-level - this simplifies the rest of the rules + new ReplaceStatsAggExpressionWithEval(), + // second extract nested aggs inside of them + new ReplaceStatsNestedExpressionWithEval(), + // lastly replace surrogate functions + new SubstituteSurrogates(), + new ReplaceRegexMatch(), + new ReplaceAliasingEvalWithProject(), + new SkipQueryOnEmptyMappings() + // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 + ); + } + protected static Batch operators() { return new Batch<>( "Operator Optimization", @@ -127,10 +142,10 @@ protected static Batch operators() { new BooleanSimplification(), new LiteralsOnTheRight(), // needs to occur before BinaryComparison combinations (see class) - new PropagateEquals(), + new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.PropagateEquals(), new PropagateNullable(), - new BooleanFunctionEqualsElimination(), - new CombineDisjunctionsToIn(), + new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.BooleanFunctionEqualsElimination(), + new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.CombineDisjunctionsToIn(), new SimplifyComparisonsArithmetics(EsqlDataTypes::areCompatible), // prune/elimination new PruneFilters(), @@ -153,26 +168,11 @@ protected static Batch cleanup() { } protected static List> rules() { - var substitutions = new Batch<>( - "Substitutions", - Limiter.ONCE, - // first extract nested aggs top-level - this simplifies the rest of the rules - new ReplaceStatsAggExpressionWithEval(), - // second extract nested aggs inside of them - new ReplaceStatsNestedExpressionWithEval(), - // lastly replace surrogate functions - new SubstituteSurrogates(), - new ReplaceRegexMatch(), - new ReplaceAliasingEvalWithProject(), - new SkipQueryOnEmptyMappings() - // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 - ); - var skip = new Batch<>("Skip Compute", new SkipQueryOnLimitZero()); var defaultTopN = new Batch<>("Add default TopN", new AddDefaultTopN()); var label = new Batch<>("Set as Optimized", Limiter.ONCE, new SetAsOptimized()); - return asList(substitutions, operators(), skip, cleanup(), defaultTopN, label); + return asList(substitutions(), operators(), skip, cleanup(), defaultTopN, label); } // TODO: currently this rule only works for aggregate functions (AVG) @@ -194,8 +194,10 @@ protected LogicalPlan rule(Aggregate aggregate) { // first pass to check existing aggregates (to avoid duplication and alias waste) for (NamedExpression agg : aggs) { - if (Alias.unwrap(agg) instanceof AggregateFunction af && af instanceof SurrogateExpression == false) { - aggFuncToAttr.put(af, agg.toAttribute()); + if (Alias.unwrap(agg) instanceof AggregateFunction af) { + if ((af instanceof SurrogateExpression se && se.surrogate() != null) == false) { + aggFuncToAttr.put(af, agg.toAttribute()); + } } } @@ -203,7 +205,7 @@ protected LogicalPlan rule(Aggregate aggregate) { // 0. check list of surrogate expressions for (NamedExpression agg : aggs) { Expression e = Alias.unwrap(agg); - if (e instanceof SurrogateExpression sf) { + if (e instanceof SurrogateExpression sf && sf.surrogate() != null) { changed = true; Expression s = sf.surrogate(); @@ -243,9 +245,22 @@ protected LogicalPlan rule(Aggregate aggregate) { LogicalPlan plan = aggregate; if (changed) { var source = aggregate.source(); - plan = new Aggregate(aggregate.source(), aggregate.child(), aggregate.groupings(), newAggs); + if (newAggs.isEmpty() == false) { + plan = new Aggregate(source, aggregate.child(), aggregate.groupings(), newAggs); + } else { + // All aggs actually have been surrogates for (foldable) expressions, e.g. + // \_Aggregate[[],[AVG([1, 2][INTEGER]) AS s]] + // Replace by a local relation with one row, followed by an eval, e.g. + // \_Eval[[MVAVG([1, 2][INTEGER]) AS s]] + // \_LocalRelation[[{e}#21],[ConstantNullBlock[positions=1]]] + plan = new LocalRelation( + source, + List.of(new EmptyAttribute(source)), + LocalSupplier.of(new Block[] { BlockUtils.constantBlock(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, null, 1) }) + ); + } // 5. force the initial projection in place - if (transientEval.size() > 0) { + if (transientEval.isEmpty() == false) { plan = new Eval(source, plan, transientEval); // project away transient fields and re-enforce the original order using references (not copies) to the original aggs // this works since the replaced aliases have their nameId copied to avoid having to update all references (which has @@ -503,6 +518,8 @@ public LogicalPlan apply(LogicalPlan plan) { plan = plan.transformUp(p -> { // Apply the replacement inside Filter and Eval (which shouldn't make a difference) + // TODO: also allow aggregates once aggs on constants are supported. + // C.f. https://github.com/elastic/elasticsearch/issues/100634 if (p instanceof Filter || p instanceof Eval) { p = p.transformExpressionsOnly(ReferenceAttribute.class, replaceReference); } @@ -1118,28 +1135,6 @@ private static Project pushDownPastProject(UnaryPlan parent) { } } - /** - * Combine disjunctions on the same field into an In expression. - * This rule looks for both simple equalities: - * 1. a == 1 OR a == 2 becomes a IN (1, 2) - * and combinations of In - * 2. a == 1 OR a IN (2) becomes a IN (1, 2) - * 3. a IN (1) OR a IN (2) becomes a IN (1, 2) - * - * This rule does NOT check for type compatibility as that phase has been - * already be verified in the analyzer. - */ - public static class CombineDisjunctionsToIn extends OptimizerRules.CombineDisjunctionsToIn { - - protected In createIn(Expression key, List values, ZoneId zoneId) { - return new In(key.source(), key, values); - } - - protected Equals createEquals(Expression k, Set v, ZoneId finalZoneId) { - return new Equals(k.source(), k, v.iterator().next(), finalZoneId); - } - } - static class ReplaceLimitAndSortAsTopN extends OptimizerRules.OptimizerRule { @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java index bf569ee587dbc..6b62029bd8f45 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java @@ -22,11 +22,10 @@ private LogicalVerifier() {} /** Verifies the optimized logical plan. */ public Failures verify(LogicalPlan plan) { Failures failures = new Failures(); + Failures dependencyFailures = new Failures(); plan.forEachUp(p -> { - // dependency check - // FIXME: re-enable - // DEPENDENCY_CHECK.checkPlan(p, failures); + DEPENDENCY_CHECK.checkPlan(p, dependencyFailures); if (failures.hasFailures() == false) { p.forEachExpression(ex -> { @@ -37,6 +36,10 @@ public Failures verify(LogicalPlan plan) { } }); + if (dependencyFailures.hasFailures()) { + throw new IllegalStateException(dependencyFailures.toString()); + } + return failures; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java index b9018f56e60de..38ac596135abb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java @@ -7,6 +7,14 @@ package org.elasticsearch.xpack.esql.optimizer; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NullEquals; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; @@ -30,13 +38,39 @@ import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.ql.common.Failures; import org.elasticsearch.xpack.ql.expression.AttributeSet; +import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.function.Function; +import org.elasticsearch.xpack.ql.expression.predicate.Predicates; +import org.elasticsearch.xpack.ql.expression.predicate.Range; +import org.elasticsearch.xpack.ql.expression.predicate.logical.And; +import org.elasticsearch.xpack.ql.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.ql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.plan.QueryPlan; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.CollectionUtils; + +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.xpack.ql.common.Failure.fail; +import static org.elasticsearch.xpack.ql.expression.Literal.FALSE; +import static org.elasticsearch.xpack.ql.expression.Literal.TRUE; +import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.combineOr; +import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.splitOr; class OptimizerRules { @@ -64,6 +98,16 @@ protected AttributeSet generates(P p) { } static class LogicalPlanDependencyCheck extends DependencyConsistency { + @Override + protected AttributeSet references(LogicalPlan plan) { + if (plan instanceof Enrich enrich) { + // The enrichFields are NamedExpressions, so we compute their references as well when just calling enrich.references(). + // But they are not actually referring to attributes from the input plan - only the match field does. + return enrich.matchField().references(); + } + return super.references(plan); + } + @Override protected AttributeSet generates(LogicalPlan logicalPlan) { // source-like operators @@ -139,4 +183,435 @@ protected AttributeSet references(PhysicalPlan plan) { return plan.references(); } } + + /** + * Combine disjunctions on the same field into an In expression. + * This rule looks for both simple equalities: + * 1. a == 1 OR a == 2 becomes a IN (1, 2) + * and combinations of In + * 2. a == 1 OR a IN (2) becomes a IN (1, 2) + * 3. a IN (1) OR a IN (2) becomes a IN (1, 2) + * + * This rule does NOT check for type compatibility as that phase has been + * already be verified in the analyzer. + */ + public static class CombineDisjunctionsToIn extends org.elasticsearch.xpack.ql.optimizer.OptimizerRules.OptimizerExpressionRule { + CombineDisjunctionsToIn() { + super(org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.UP); + } + + protected In createIn(Expression key, List values, ZoneId zoneId) { + return new In(key.source(), key, values); + } + + protected Equals createEquals(Expression k, Set v, ZoneId finalZoneId) { + return new Equals(k.source(), k, v.iterator().next(), finalZoneId); + } + + @Override + protected Expression rule(Or or) { + Expression e = or; + // look only at equals and In + List exps = splitOr(e); + + Map> found = new LinkedHashMap<>(); + ZoneId zoneId = null; + List ors = new LinkedList<>(); + + for (Expression exp : exps) { + if (exp instanceof Equals eq) { + // consider only equals against foldables + if (eq.right().foldable()) { + found.computeIfAbsent(eq.left(), k -> new LinkedHashSet<>()).add(eq.right()); + } else { + ors.add(exp); + } + if (zoneId == null) { + zoneId = eq.zoneId(); + } + } else if (exp instanceof In in) { + found.computeIfAbsent(in.value(), k -> new LinkedHashSet<>()).addAll(in.list()); + if (zoneId == null) { + zoneId = in.zoneId(); + } + } else { + ors.add(exp); + } + } + + if (found.isEmpty() == false) { + // combine equals alongside the existing ors + final ZoneId finalZoneId = zoneId; + found.forEach( + (k, v) -> { ors.add(v.size() == 1 ? createEquals(k, v, finalZoneId) : createIn(k, new ArrayList<>(v), finalZoneId)); } + ); + + // TODO: this makes a QL `or`, not an ESQL `or` + Expression combineOr = combineOr(ors); + // check the result semantically since the result might different in order + // but be actually the same which can trigger a loop + // e.g. a == 1 OR a == 2 OR null --> null OR a in (1,2) --> literalsOnTheRight --> cycle + if (e.semanticEquals(combineOr) == false) { + e = combineOr; + } + } + + return e; + } + } + + /** + * This rule must always be placed after {@link org.elasticsearch.xpack.ql.optimizer.OptimizerRules.LiteralsOnTheRight}, since it looks + * at TRUE/FALSE literals' existence on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. + */ + public static final class BooleanFunctionEqualsElimination extends + org.elasticsearch.xpack.ql.optimizer.OptimizerRules.OptimizerExpressionRule { + + BooleanFunctionEqualsElimination() { + super(org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.UP); + } + + @Override + protected Expression rule(BinaryComparison bc) { + if ((bc instanceof Equals || bc instanceof NotEquals) && bc.left() instanceof Function) { + // for expression "==" or "!=" TRUE/FALSE, return the expression itself or its negated variant + + // TODO: Replace use of QL Not with ESQL Not + if (TRUE.equals(bc.right())) { + return bc instanceof Equals ? bc.left() : new Not(bc.left().source(), bc.left()); + } + if (FALSE.equals(bc.right())) { + return bc instanceof Equals ? new Not(bc.left().source(), bc.left()) : bc.left(); + } + } + + return bc; + } + } + + /** + * Propagate Equals to eliminate conjuncted Ranges or BinaryComparisons. + * When encountering a different Equals, non-containing {@link Range} or {@link BinaryComparison}, the conjunction becomes false. + * When encountering a containing {@link Range}, {@link BinaryComparison} or {@link NotEquals}, these get eliminated by the equality. + * + * Since this rule can eliminate Ranges and BinaryComparisons, it should be applied before + * {@link org.elasticsearch.xpack.ql.optimizer.OptimizerRules.CombineBinaryComparisons}. + * + * This rule doesn't perform any promotion of {@link BinaryComparison}s, that is handled by + * {@link org.elasticsearch.xpack.ql.optimizer.OptimizerRules.CombineBinaryComparisons} on purpose as the resulting Range might be + * foldable (which is picked by the folding rule on the next run). + */ + public static final class PropagateEquals extends org.elasticsearch.xpack.ql.optimizer.OptimizerRules.OptimizerExpressionRule< + BinaryLogic> { + + PropagateEquals() { + super(org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.DOWN); + } + + public Expression rule(BinaryLogic e) { + if (e instanceof And) { + return propagate((And) e); + } else if (e instanceof Or) { + return propagate((Or) e); + } + return e; + } + + // combine conjunction + private static Expression propagate(And and) { + List ranges = new ArrayList<>(); + // Only equalities, not-equalities and inequalities with a foldable .right are extracted separately; + // the others go into the general 'exps'. + // TODO: In 105217, this should change to EsqlBinaryComparison, but it doesn't exist in this branch yet + List equals = new ArrayList<>(); + List notEquals = new ArrayList<>(); + List inequalities = new ArrayList<>(); + List exps = new ArrayList<>(); + + boolean changed = false; + + for (Expression ex : Predicates.splitAnd(and)) { + if (ex instanceof Range) { + ranges.add((Range) ex); + } else if (ex instanceof Equals || ex instanceof NullEquals) { + BinaryComparison otherEq = (BinaryComparison) ex; + // equals on different values evaluate to FALSE + // ignore date/time fields as equality comparison might actually be a range check + if (otherEq.right().foldable() && DataTypes.isDateTime(otherEq.left().dataType()) == false) { + for (BinaryComparison eq : equals) { + if (otherEq.left().semanticEquals(eq.left())) { + Integer comp = BinaryComparison.compare(eq.right().fold(), otherEq.right().fold()); + if (comp != null) { + // var cannot be equal to two different values at the same time + if (comp != 0) { + return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); + } + } + } + } + equals.add(otherEq); + } else { + exps.add(otherEq); + } + } else if (ex instanceof GreaterThan + || ex instanceof GreaterThanOrEqual + || ex instanceof LessThan + || ex instanceof LessThanOrEqual) { + BinaryComparison bc = (BinaryComparison) ex; + if (bc.right().foldable()) { + inequalities.add(bc); + } else { + exps.add(ex); + } + } else if (ex instanceof NotEquals otherNotEq) { + if (otherNotEq.right().foldable()) { + notEquals.add(otherNotEq); + } else { + exps.add(ex); + } + } else { + exps.add(ex); + } + } + + // check + for (BinaryComparison eq : equals) { + Object eqValue = eq.right().fold(); + + for (Iterator iterator = ranges.iterator(); iterator.hasNext();) { + Range range = iterator.next(); + + if (range.value().semanticEquals(eq.left())) { + // if equals is outside the interval, evaluate the whole expression to FALSE + if (range.lower().foldable()) { + Integer compare = BinaryComparison.compare(range.lower().fold(), eqValue); + if (compare != null && ( + // eq outside the lower boundary + compare > 0 || + // eq matches the boundary but should not be included + (compare == 0 && range.includeLower() == false))) { + return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); + } + } + if (range.upper().foldable()) { + Integer compare = BinaryComparison.compare(range.upper().fold(), eqValue); + if (compare != null && ( + // eq outside the upper boundary + compare < 0 || + // eq matches the boundary but should not be included + (compare == 0 && range.includeUpper() == false))) { + return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); + } + } + + // it's in the range and thus, remove it + iterator.remove(); + changed = true; + } + } + + // evaluate all NotEquals against the Equal + for (Iterator iter = notEquals.iterator(); iter.hasNext();) { + NotEquals neq = iter.next(); + if (eq.left().semanticEquals(neq.left())) { + Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); + if (comp != null) { + if (comp == 0) { // clashing and conflicting: a = 1 AND a != 1 + return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); + } else { // clashing and redundant: a = 1 AND a != 2 + iter.remove(); + changed = true; + } + } + } + } + + // evaluate all inequalities against the Equal + for (Iterator iter = inequalities.iterator(); iter.hasNext();) { + BinaryComparison bc = iter.next(); + if (eq.left().semanticEquals(bc.left())) { + Integer compare = BinaryComparison.compare(eqValue, bc.right().fold()); + if (compare != null) { + if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { // a = 2 AND a />= ? + if ((compare == 0 && bc instanceof GreaterThan) || // a = 2 AND a > 2 + compare < 0) { // a = 2 AND a >/>= 3 + return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); + } + } + + iter.remove(); + changed = true; + } + } + } + } + + return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : and; + } + + // combine disjunction: + // a = 2 OR a > 3 -> nop; a = 2 OR a > 1 -> a > 1 + // a = 2 OR a < 3 -> a < 3; a = 2 OR a < 1 -> nop + // a = 2 OR 3 < a < 5 -> nop; a = 2 OR 1 < a < 3 -> 1 < a < 3; a = 2 OR 0 < a < 1 -> nop + // a = 2 OR a != 2 -> TRUE; a = 2 OR a = 5 -> nop; a = 2 OR a != 5 -> a != 5 + private static Expression propagate(Or or) { + List exps = new ArrayList<>(); + List equals = new ArrayList<>(); // foldable right term Equals + List notEquals = new ArrayList<>(); // foldable right term NotEquals + List ranges = new ArrayList<>(); + List inequalities = new ArrayList<>(); // foldable right term (=limit) BinaryComparision + + // split expressions by type + for (Expression ex : Predicates.splitOr(or)) { + if (ex instanceof Equals eq) { + if (eq.right().foldable()) { + equals.add(eq); + } else { + exps.add(ex); + } + } else if (ex instanceof NotEquals neq) { + if (neq.right().foldable()) { + notEquals.add(neq); + } else { + exps.add(ex); + } + } else if (ex instanceof Range) { + ranges.add((Range) ex); + } else if (ex instanceof BinaryComparison bc) { + if (bc.right().foldable()) { + inequalities.add(bc); + } else { + exps.add(ex); + } + } else { + exps.add(ex); + } + } + + boolean updated = false; // has the expression been modified? + + // evaluate the impact of each Equal over the different types of Expressions + for (Iterator iterEq = equals.iterator(); iterEq.hasNext();) { + Equals eq = iterEq.next(); + Object eqValue = eq.right().fold(); + boolean removeEquals = false; + + // Equals OR NotEquals + for (NotEquals neq : notEquals) { + if (eq.left().semanticEquals(neq.left())) { // a = 2 OR a != ? -> ... + Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); + if (comp != null) { + if (comp == 0) { // a = 2 OR a != 2 -> TRUE + return TRUE; + } else { // a = 2 OR a != 5 -> a != 5 + removeEquals = true; + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + continue; + } + + // Equals OR Range + for (int i = 0; i < ranges.size(); i++) { // might modify list, so use index loop + Range range = ranges.get(i); + if (eq.left().semanticEquals(range.value())) { + Integer lowerComp = range.lower().foldable() ? BinaryComparison.compare(eqValue, range.lower().fold()) : null; + Integer upperComp = range.upper().foldable() ? BinaryComparison.compare(eqValue, range.upper().fold()) : null; + + if (lowerComp != null && lowerComp == 0) { + if (range.includeLower() == false) { // a = 2 OR 2 < a < ? -> 2 <= a < ? + ranges.set( + i, + new Range( + range.source(), + range.value(), + range.lower(), + true, + range.upper(), + range.includeUpper(), + range.zoneId() + ) + ); + } // else : a = 2 OR 2 <= a < ? -> 2 <= a < ? + removeEquals = true; // update range with lower equality instead or simply superfluous + break; + } else if (upperComp != null && upperComp == 0) { + if (range.includeUpper() == false) { // a = 2 OR ? < a < 2 -> ? < a <= 2 + ranges.set( + i, + new Range( + range.source(), + range.value(), + range.lower(), + range.includeLower(), + range.upper(), + true, + range.zoneId() + ) + ); + } // else : a = 2 OR ? < a <= 2 -> ? < a <= 2 + removeEquals = true; // update range with upper equality instead + break; + } else if (lowerComp != null && upperComp != null) { + if (0 < lowerComp && upperComp < 0) { // a = 2 OR 1 < a < 3 + removeEquals = true; // equality is superfluous + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + continue; + } + + // Equals OR Inequality + for (int i = 0; i < inequalities.size(); i++) { + BinaryComparison bc = inequalities.get(i); + if (eq.left().semanticEquals(bc.left())) { + Integer comp = BinaryComparison.compare(eqValue, bc.right().fold()); + if (comp != null) { + if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { + if (comp < 0) { // a = 1 OR a > 2 -> nop + continue; + } else if (comp == 0 && bc instanceof GreaterThan) { // a = 2 OR a > 2 -> a >= 2 + inequalities.set(i, new GreaterThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); + } // else (0 < comp || bc instanceof GreaterThanOrEqual) : + // a = 3 OR a > 2 -> a > 2; a = 2 OR a => 2 -> a => 2 + + removeEquals = true; // update range with equality instead or simply superfluous + break; + } else if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { + if (comp > 0) { // a = 2 OR a < 1 -> nop + continue; + } + if (comp == 0 && bc instanceof LessThan) { // a = 2 OR a < 2 -> a <= 2 + inequalities.set(i, new LessThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); + } // else (comp < 0 || bc instanceof LessThanOrEqual) : a = 2 OR a < 3 -> a < 3; a = 2 OR a <= 2 -> a <= 2 + removeEquals = true; // update range with equality instead or simply superfluous + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + } + } + + return updated ? Predicates.combineOr(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : or; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 20d06df68b12b..ed47b27924a92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -10,6 +10,7 @@ null 'inlinestats' 'keep' 'limit' +'meta' 'mv_expand' 'rename' 'row' @@ -95,6 +96,9 @@ null null null 'info' +null +null +null 'functions' null null @@ -117,6 +121,7 @@ GROK INLINESTATS KEEP LIMIT +META MV_EXPAND RENAME ROW @@ -202,10 +207,13 @@ MVEXPAND_LINE_COMMENT MVEXPAND_MULTILINE_COMMENT MVEXPAND_WS INFO -FUNCTIONS SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS +FUNCTIONS +META_LINE_COMMENT +META_MULTILINE_COMMENT +META_WS COLON SETTING SETTING_LINE_COMMENT @@ -223,6 +231,7 @@ GROK INLINESTATS KEEP LIMIT +META MV_EXPAND RENAME ROW @@ -355,10 +364,14 @@ MVEXPAND_MULTILINE_COMMENT MVEXPAND_WS SHOW_PIPE INFO -FUNCTIONS SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS +META_PIPE +FUNCTIONS +META_LINE_COMMENT +META_MULTILINE_COMMENT +META_WS SETTING_CLOSING_BRACKET COLON SETTING @@ -381,7 +394,8 @@ ENRICH_MODE ENRICH_FIELD_MODE MVEXPAND_MODE SHOW_MODE +META_MODE SETTING_MODE atn: -[4, 0, 104, 1147, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 4, 17, 460, 8, 17, 11, 17, 12, 17, 461, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 5, 18, 470, 8, 18, 10, 18, 12, 18, 473, 9, 18, 1, 18, 3, 18, 476, 8, 18, 1, 18, 3, 18, 479, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 488, 8, 19, 10, 19, 12, 19, 491, 9, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 4, 20, 499, 8, 20, 11, 20, 12, 20, 500, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 542, 8, 31, 1, 31, 4, 31, 545, 8, 31, 11, 31, 12, 31, 546, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 3, 34, 556, 8, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 563, 8, 36, 1, 37, 1, 37, 1, 37, 5, 37, 568, 8, 37, 10, 37, 12, 37, 571, 9, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 579, 8, 37, 10, 37, 12, 37, 582, 9, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 589, 8, 37, 1, 37, 3, 37, 592, 8, 37, 3, 37, 594, 8, 37, 1, 38, 4, 38, 597, 8, 38, 11, 38, 12, 38, 598, 1, 39, 4, 39, 602, 8, 39, 11, 39, 12, 39, 603, 1, 39, 1, 39, 5, 39, 608, 8, 39, 10, 39, 12, 39, 611, 9, 39, 1, 39, 1, 39, 4, 39, 615, 8, 39, 11, 39, 12, 39, 616, 1, 39, 4, 39, 620, 8, 39, 11, 39, 12, 39, 621, 1, 39, 1, 39, 5, 39, 626, 8, 39, 10, 39, 12, 39, 629, 9, 39, 3, 39, 631, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 4, 39, 637, 8, 39, 11, 39, 12, 39, 638, 1, 39, 1, 39, 3, 39, 643, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 5, 76, 771, 8, 76, 10, 76, 12, 76, 774, 9, 76, 1, 76, 1, 76, 3, 76, 778, 8, 76, 1, 76, 4, 76, 781, 8, 76, 11, 76, 12, 76, 782, 3, 76, 785, 8, 76, 1, 77, 1, 77, 4, 77, 789, 8, 77, 11, 77, 12, 77, 790, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 3, 88, 842, 8, 88, 1, 89, 4, 89, 845, 8, 89, 11, 89, 12, 89, 846, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 3, 97, 882, 8, 97, 1, 98, 1, 98, 3, 98, 886, 8, 98, 1, 98, 5, 98, 889, 8, 98, 10, 98, 12, 98, 892, 9, 98, 1, 98, 1, 98, 3, 98, 896, 8, 98, 1, 98, 4, 98, 899, 8, 98, 11, 98, 12, 98, 900, 3, 98, 903, 8, 98, 1, 99, 1, 99, 4, 99, 907, 8, 99, 11, 99, 12, 99, 908, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 117, 4, 117, 984, 8, 117, 11, 117, 12, 117, 985, 1, 117, 1, 117, 3, 117, 990, 8, 117, 1, 117, 4, 117, 993, 8, 117, 11, 117, 12, 117, 994, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 4, 148, 1132, 8, 148, 11, 148, 12, 148, 1133, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 2, 489, 580, 0, 152, 11, 1, 13, 2, 15, 3, 17, 4, 19, 5, 21, 6, 23, 7, 25, 8, 27, 9, 29, 10, 31, 11, 33, 12, 35, 13, 37, 14, 39, 15, 41, 16, 43, 17, 45, 18, 47, 19, 49, 20, 51, 21, 53, 0, 55, 0, 57, 22, 59, 23, 61, 24, 63, 25, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 64, 163, 65, 165, 0, 167, 66, 169, 67, 171, 68, 173, 69, 175, 0, 177, 0, 179, 0, 181, 0, 183, 0, 185, 70, 187, 0, 189, 71, 191, 0, 193, 72, 195, 73, 197, 74, 199, 0, 201, 0, 203, 0, 205, 0, 207, 0, 209, 75, 211, 76, 213, 77, 215, 78, 217, 0, 219, 0, 221, 0, 223, 0, 225, 79, 227, 0, 229, 80, 231, 81, 233, 82, 235, 0, 237, 0, 239, 83, 241, 84, 243, 0, 245, 85, 247, 0, 249, 0, 251, 86, 253, 87, 255, 88, 257, 0, 259, 0, 261, 0, 263, 0, 265, 0, 267, 0, 269, 0, 271, 89, 273, 90, 275, 91, 277, 0, 279, 0, 281, 0, 283, 0, 285, 92, 287, 93, 289, 94, 291, 0, 293, 95, 295, 96, 297, 97, 299, 98, 301, 99, 303, 0, 305, 100, 307, 101, 309, 102, 311, 103, 313, 104, 11, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1175, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 1, 53, 1, 0, 0, 0, 1, 55, 1, 0, 0, 0, 1, 57, 1, 0, 0, 0, 1, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 2, 63, 1, 0, 0, 0, 2, 85, 1, 0, 0, 0, 2, 87, 1, 0, 0, 0, 2, 89, 1, 0, 0, 0, 2, 91, 1, 0, 0, 0, 2, 93, 1, 0, 0, 0, 2, 95, 1, 0, 0, 0, 2, 97, 1, 0, 0, 0, 2, 99, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 2, 103, 1, 0, 0, 0, 2, 105, 1, 0, 0, 0, 2, 107, 1, 0, 0, 0, 2, 109, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 2, 113, 1, 0, 0, 0, 2, 115, 1, 0, 0, 0, 2, 117, 1, 0, 0, 0, 2, 119, 1, 0, 0, 0, 2, 121, 1, 0, 0, 0, 2, 123, 1, 0, 0, 0, 2, 125, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 2, 129, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 2, 133, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 2, 137, 1, 0, 0, 0, 2, 139, 1, 0, 0, 0, 2, 141, 1, 0, 0, 0, 2, 143, 1, 0, 0, 0, 2, 145, 1, 0, 0, 0, 2, 147, 1, 0, 0, 0, 2, 149, 1, 0, 0, 0, 2, 151, 1, 0, 0, 0, 2, 153, 1, 0, 0, 0, 2, 155, 1, 0, 0, 0, 2, 157, 1, 0, 0, 0, 2, 159, 1, 0, 0, 0, 2, 161, 1, 0, 0, 0, 2, 163, 1, 0, 0, 0, 2, 167, 1, 0, 0, 0, 2, 169, 1, 0, 0, 0, 2, 171, 1, 0, 0, 0, 2, 173, 1, 0, 0, 0, 3, 175, 1, 0, 0, 0, 3, 177, 1, 0, 0, 0, 3, 179, 1, 0, 0, 0, 3, 181, 1, 0, 0, 0, 3, 183, 1, 0, 0, 0, 3, 185, 1, 0, 0, 0, 3, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 4, 199, 1, 0, 0, 0, 4, 201, 1, 0, 0, 0, 4, 203, 1, 0, 0, 0, 4, 209, 1, 0, 0, 0, 4, 211, 1, 0, 0, 0, 4, 213, 1, 0, 0, 0, 4, 215, 1, 0, 0, 0, 5, 217, 1, 0, 0, 0, 5, 219, 1, 0, 0, 0, 5, 221, 1, 0, 0, 0, 5, 223, 1, 0, 0, 0, 5, 225, 1, 0, 0, 0, 5, 227, 1, 0, 0, 0, 5, 229, 1, 0, 0, 0, 5, 231, 1, 0, 0, 0, 5, 233, 1, 0, 0, 0, 6, 235, 1, 0, 0, 0, 6, 237, 1, 0, 0, 0, 6, 239, 1, 0, 0, 0, 6, 241, 1, 0, 0, 0, 6, 245, 1, 0, 0, 0, 6, 247, 1, 0, 0, 0, 6, 249, 1, 0, 0, 0, 6, 251, 1, 0, 0, 0, 6, 253, 1, 0, 0, 0, 6, 255, 1, 0, 0, 0, 7, 257, 1, 0, 0, 0, 7, 259, 1, 0, 0, 0, 7, 261, 1, 0, 0, 0, 7, 263, 1, 0, 0, 0, 7, 265, 1, 0, 0, 0, 7, 267, 1, 0, 0, 0, 7, 269, 1, 0, 0, 0, 7, 271, 1, 0, 0, 0, 7, 273, 1, 0, 0, 0, 7, 275, 1, 0, 0, 0, 8, 277, 1, 0, 0, 0, 8, 279, 1, 0, 0, 0, 8, 281, 1, 0, 0, 0, 8, 283, 1, 0, 0, 0, 8, 285, 1, 0, 0, 0, 8, 287, 1, 0, 0, 0, 8, 289, 1, 0, 0, 0, 9, 291, 1, 0, 0, 0, 9, 293, 1, 0, 0, 0, 9, 295, 1, 0, 0, 0, 9, 297, 1, 0, 0, 0, 9, 299, 1, 0, 0, 0, 9, 301, 1, 0, 0, 0, 10, 303, 1, 0, 0, 0, 10, 305, 1, 0, 0, 0, 10, 307, 1, 0, 0, 0, 10, 309, 1, 0, 0, 0, 10, 311, 1, 0, 0, 0, 10, 313, 1, 0, 0, 0, 11, 315, 1, 0, 0, 0, 13, 325, 1, 0, 0, 0, 15, 332, 1, 0, 0, 0, 17, 341, 1, 0, 0, 0, 19, 348, 1, 0, 0, 0, 21, 358, 1, 0, 0, 0, 23, 365, 1, 0, 0, 0, 25, 372, 1, 0, 0, 0, 27, 386, 1, 0, 0, 0, 29, 393, 1, 0, 0, 0, 31, 401, 1, 0, 0, 0, 33, 413, 1, 0, 0, 0, 35, 422, 1, 0, 0, 0, 37, 428, 1, 0, 0, 0, 39, 435, 1, 0, 0, 0, 41, 442, 1, 0, 0, 0, 43, 450, 1, 0, 0, 0, 45, 459, 1, 0, 0, 0, 47, 465, 1, 0, 0, 0, 49, 482, 1, 0, 0, 0, 51, 498, 1, 0, 0, 0, 53, 504, 1, 0, 0, 0, 55, 509, 1, 0, 0, 0, 57, 514, 1, 0, 0, 0, 59, 518, 1, 0, 0, 0, 61, 522, 1, 0, 0, 0, 63, 526, 1, 0, 0, 0, 65, 530, 1, 0, 0, 0, 67, 532, 1, 0, 0, 0, 69, 534, 1, 0, 0, 0, 71, 537, 1, 0, 0, 0, 73, 539, 1, 0, 0, 0, 75, 548, 1, 0, 0, 0, 77, 550, 1, 0, 0, 0, 79, 555, 1, 0, 0, 0, 81, 557, 1, 0, 0, 0, 83, 562, 1, 0, 0, 0, 85, 593, 1, 0, 0, 0, 87, 596, 1, 0, 0, 0, 89, 642, 1, 0, 0, 0, 91, 644, 1, 0, 0, 0, 93, 647, 1, 0, 0, 0, 95, 651, 1, 0, 0, 0, 97, 655, 1, 0, 0, 0, 99, 657, 1, 0, 0, 0, 101, 659, 1, 0, 0, 0, 103, 664, 1, 0, 0, 0, 105, 666, 1, 0, 0, 0, 107, 672, 1, 0, 0, 0, 109, 678, 1, 0, 0, 0, 111, 683, 1, 0, 0, 0, 113, 685, 1, 0, 0, 0, 115, 688, 1, 0, 0, 0, 117, 691, 1, 0, 0, 0, 119, 696, 1, 0, 0, 0, 121, 700, 1, 0, 0, 0, 123, 705, 1, 0, 0, 0, 125, 711, 1, 0, 0, 0, 127, 714, 1, 0, 0, 0, 129, 716, 1, 0, 0, 0, 131, 722, 1, 0, 0, 0, 133, 724, 1, 0, 0, 0, 135, 729, 1, 0, 0, 0, 137, 732, 1, 0, 0, 0, 139, 735, 1, 0, 0, 0, 141, 738, 1, 0, 0, 0, 143, 740, 1, 0, 0, 0, 145, 743, 1, 0, 0, 0, 147, 745, 1, 0, 0, 0, 149, 748, 1, 0, 0, 0, 151, 750, 1, 0, 0, 0, 153, 752, 1, 0, 0, 0, 155, 754, 1, 0, 0, 0, 157, 756, 1, 0, 0, 0, 159, 758, 1, 0, 0, 0, 161, 763, 1, 0, 0, 0, 163, 784, 1, 0, 0, 0, 165, 786, 1, 0, 0, 0, 167, 794, 1, 0, 0, 0, 169, 796, 1, 0, 0, 0, 171, 800, 1, 0, 0, 0, 173, 804, 1, 0, 0, 0, 175, 808, 1, 0, 0, 0, 177, 813, 1, 0, 0, 0, 179, 817, 1, 0, 0, 0, 181, 821, 1, 0, 0, 0, 183, 825, 1, 0, 0, 0, 185, 829, 1, 0, 0, 0, 187, 841, 1, 0, 0, 0, 189, 844, 1, 0, 0, 0, 191, 848, 1, 0, 0, 0, 193, 852, 1, 0, 0, 0, 195, 856, 1, 0, 0, 0, 197, 860, 1, 0, 0, 0, 199, 864, 1, 0, 0, 0, 201, 869, 1, 0, 0, 0, 203, 873, 1, 0, 0, 0, 205, 881, 1, 0, 0, 0, 207, 902, 1, 0, 0, 0, 209, 906, 1, 0, 0, 0, 211, 910, 1, 0, 0, 0, 213, 914, 1, 0, 0, 0, 215, 918, 1, 0, 0, 0, 217, 922, 1, 0, 0, 0, 219, 927, 1, 0, 0, 0, 221, 931, 1, 0, 0, 0, 223, 935, 1, 0, 0, 0, 225, 939, 1, 0, 0, 0, 227, 942, 1, 0, 0, 0, 229, 946, 1, 0, 0, 0, 231, 950, 1, 0, 0, 0, 233, 954, 1, 0, 0, 0, 235, 958, 1, 0, 0, 0, 237, 963, 1, 0, 0, 0, 239, 968, 1, 0, 0, 0, 241, 973, 1, 0, 0, 0, 243, 980, 1, 0, 0, 0, 245, 989, 1, 0, 0, 0, 247, 996, 1, 0, 0, 0, 249, 1000, 1, 0, 0, 0, 251, 1004, 1, 0, 0, 0, 253, 1008, 1, 0, 0, 0, 255, 1012, 1, 0, 0, 0, 257, 1016, 1, 0, 0, 0, 259, 1022, 1, 0, 0, 0, 261, 1026, 1, 0, 0, 0, 263, 1030, 1, 0, 0, 0, 265, 1034, 1, 0, 0, 0, 267, 1038, 1, 0, 0, 0, 269, 1042, 1, 0, 0, 0, 271, 1046, 1, 0, 0, 0, 273, 1050, 1, 0, 0, 0, 275, 1054, 1, 0, 0, 0, 277, 1058, 1, 0, 0, 0, 279, 1063, 1, 0, 0, 0, 281, 1067, 1, 0, 0, 0, 283, 1071, 1, 0, 0, 0, 285, 1075, 1, 0, 0, 0, 287, 1079, 1, 0, 0, 0, 289, 1083, 1, 0, 0, 0, 291, 1087, 1, 0, 0, 0, 293, 1092, 1, 0, 0, 0, 295, 1097, 1, 0, 0, 0, 297, 1107, 1, 0, 0, 0, 299, 1111, 1, 0, 0, 0, 301, 1115, 1, 0, 0, 0, 303, 1119, 1, 0, 0, 0, 305, 1124, 1, 0, 0, 0, 307, 1131, 1, 0, 0, 0, 309, 1135, 1, 0, 0, 0, 311, 1139, 1, 0, 0, 0, 313, 1143, 1, 0, 0, 0, 315, 316, 5, 100, 0, 0, 316, 317, 5, 105, 0, 0, 317, 318, 5, 115, 0, 0, 318, 319, 5, 115, 0, 0, 319, 320, 5, 101, 0, 0, 320, 321, 5, 99, 0, 0, 321, 322, 5, 116, 0, 0, 322, 323, 1, 0, 0, 0, 323, 324, 6, 0, 0, 0, 324, 12, 1, 0, 0, 0, 325, 326, 5, 100, 0, 0, 326, 327, 5, 114, 0, 0, 327, 328, 5, 111, 0, 0, 328, 329, 5, 112, 0, 0, 329, 330, 1, 0, 0, 0, 330, 331, 6, 1, 1, 0, 331, 14, 1, 0, 0, 0, 332, 333, 5, 101, 0, 0, 333, 334, 5, 110, 0, 0, 334, 335, 5, 114, 0, 0, 335, 336, 5, 105, 0, 0, 336, 337, 5, 99, 0, 0, 337, 338, 5, 104, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 6, 2, 2, 0, 340, 16, 1, 0, 0, 0, 341, 342, 5, 101, 0, 0, 342, 343, 5, 118, 0, 0, 343, 344, 5, 97, 0, 0, 344, 345, 5, 108, 0, 0, 345, 346, 1, 0, 0, 0, 346, 347, 6, 3, 0, 0, 347, 18, 1, 0, 0, 0, 348, 349, 5, 101, 0, 0, 349, 350, 5, 120, 0, 0, 350, 351, 5, 112, 0, 0, 351, 352, 5, 108, 0, 0, 352, 353, 5, 97, 0, 0, 353, 354, 5, 105, 0, 0, 354, 355, 5, 110, 0, 0, 355, 356, 1, 0, 0, 0, 356, 357, 6, 4, 3, 0, 357, 20, 1, 0, 0, 0, 358, 359, 5, 102, 0, 0, 359, 360, 5, 114, 0, 0, 360, 361, 5, 111, 0, 0, 361, 362, 5, 109, 0, 0, 362, 363, 1, 0, 0, 0, 363, 364, 6, 5, 4, 0, 364, 22, 1, 0, 0, 0, 365, 366, 5, 103, 0, 0, 366, 367, 5, 114, 0, 0, 367, 368, 5, 111, 0, 0, 368, 369, 5, 107, 0, 0, 369, 370, 1, 0, 0, 0, 370, 371, 6, 6, 0, 0, 371, 24, 1, 0, 0, 0, 372, 373, 5, 105, 0, 0, 373, 374, 5, 110, 0, 0, 374, 375, 5, 108, 0, 0, 375, 376, 5, 105, 0, 0, 376, 377, 5, 110, 0, 0, 377, 378, 5, 101, 0, 0, 378, 379, 5, 115, 0, 0, 379, 380, 5, 116, 0, 0, 380, 381, 5, 97, 0, 0, 381, 382, 5, 116, 0, 0, 382, 383, 5, 115, 0, 0, 383, 384, 1, 0, 0, 0, 384, 385, 6, 7, 0, 0, 385, 26, 1, 0, 0, 0, 386, 387, 5, 107, 0, 0, 387, 388, 5, 101, 0, 0, 388, 389, 5, 101, 0, 0, 389, 390, 5, 112, 0, 0, 390, 391, 1, 0, 0, 0, 391, 392, 6, 8, 1, 0, 392, 28, 1, 0, 0, 0, 393, 394, 5, 108, 0, 0, 394, 395, 5, 105, 0, 0, 395, 396, 5, 109, 0, 0, 396, 397, 5, 105, 0, 0, 397, 398, 5, 116, 0, 0, 398, 399, 1, 0, 0, 0, 399, 400, 6, 9, 0, 0, 400, 30, 1, 0, 0, 0, 401, 402, 5, 109, 0, 0, 402, 403, 5, 118, 0, 0, 403, 404, 5, 95, 0, 0, 404, 405, 5, 101, 0, 0, 405, 406, 5, 120, 0, 0, 406, 407, 5, 112, 0, 0, 407, 408, 5, 97, 0, 0, 408, 409, 5, 110, 0, 0, 409, 410, 5, 100, 0, 0, 410, 411, 1, 0, 0, 0, 411, 412, 6, 10, 5, 0, 412, 32, 1, 0, 0, 0, 413, 414, 5, 114, 0, 0, 414, 415, 5, 101, 0, 0, 415, 416, 5, 110, 0, 0, 416, 417, 5, 97, 0, 0, 417, 418, 5, 109, 0, 0, 418, 419, 5, 101, 0, 0, 419, 420, 1, 0, 0, 0, 420, 421, 6, 11, 6, 0, 421, 34, 1, 0, 0, 0, 422, 423, 5, 114, 0, 0, 423, 424, 5, 111, 0, 0, 424, 425, 5, 119, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 6, 12, 0, 0, 427, 36, 1, 0, 0, 0, 428, 429, 5, 115, 0, 0, 429, 430, 5, 104, 0, 0, 430, 431, 5, 111, 0, 0, 431, 432, 5, 119, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 13, 7, 0, 434, 38, 1, 0, 0, 0, 435, 436, 5, 115, 0, 0, 436, 437, 5, 111, 0, 0, 437, 438, 5, 114, 0, 0, 438, 439, 5, 116, 0, 0, 439, 440, 1, 0, 0, 0, 440, 441, 6, 14, 0, 0, 441, 40, 1, 0, 0, 0, 442, 443, 5, 115, 0, 0, 443, 444, 5, 116, 0, 0, 444, 445, 5, 97, 0, 0, 445, 446, 5, 116, 0, 0, 446, 447, 5, 115, 0, 0, 447, 448, 1, 0, 0, 0, 448, 449, 6, 15, 0, 0, 449, 42, 1, 0, 0, 0, 450, 451, 5, 119, 0, 0, 451, 452, 5, 104, 0, 0, 452, 453, 5, 101, 0, 0, 453, 454, 5, 114, 0, 0, 454, 455, 5, 101, 0, 0, 455, 456, 1, 0, 0, 0, 456, 457, 6, 16, 0, 0, 457, 44, 1, 0, 0, 0, 458, 460, 8, 0, 0, 0, 459, 458, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 1, 0, 0, 0, 463, 464, 6, 17, 0, 0, 464, 46, 1, 0, 0, 0, 465, 466, 5, 47, 0, 0, 466, 467, 5, 47, 0, 0, 467, 471, 1, 0, 0, 0, 468, 470, 8, 1, 0, 0, 469, 468, 1, 0, 0, 0, 470, 473, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 475, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 474, 476, 5, 13, 0, 0, 475, 474, 1, 0, 0, 0, 475, 476, 1, 0, 0, 0, 476, 478, 1, 0, 0, 0, 477, 479, 5, 10, 0, 0, 478, 477, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 18, 8, 0, 481, 48, 1, 0, 0, 0, 482, 483, 5, 47, 0, 0, 483, 484, 5, 42, 0, 0, 484, 489, 1, 0, 0, 0, 485, 488, 3, 49, 19, 0, 486, 488, 9, 0, 0, 0, 487, 485, 1, 0, 0, 0, 487, 486, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 489, 487, 1, 0, 0, 0, 490, 492, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 492, 493, 5, 42, 0, 0, 493, 494, 5, 47, 0, 0, 494, 495, 1, 0, 0, 0, 495, 496, 6, 19, 8, 0, 496, 50, 1, 0, 0, 0, 497, 499, 7, 2, 0, 0, 498, 497, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 498, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 20, 8, 0, 503, 52, 1, 0, 0, 0, 504, 505, 3, 159, 74, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 21, 9, 0, 507, 508, 6, 21, 10, 0, 508, 54, 1, 0, 0, 0, 509, 510, 3, 63, 26, 0, 510, 511, 1, 0, 0, 0, 511, 512, 6, 22, 11, 0, 512, 513, 6, 22, 12, 0, 513, 56, 1, 0, 0, 0, 514, 515, 3, 51, 20, 0, 515, 516, 1, 0, 0, 0, 516, 517, 6, 23, 8, 0, 517, 58, 1, 0, 0, 0, 518, 519, 3, 47, 18, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 24, 8, 0, 521, 60, 1, 0, 0, 0, 522, 523, 3, 49, 19, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 25, 8, 0, 525, 62, 1, 0, 0, 0, 526, 527, 5, 124, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 26, 12, 0, 529, 64, 1, 0, 0, 0, 530, 531, 7, 3, 0, 0, 531, 66, 1, 0, 0, 0, 532, 533, 7, 4, 0, 0, 533, 68, 1, 0, 0, 0, 534, 535, 5, 92, 0, 0, 535, 536, 7, 5, 0, 0, 536, 70, 1, 0, 0, 0, 537, 538, 8, 6, 0, 0, 538, 72, 1, 0, 0, 0, 539, 541, 7, 7, 0, 0, 540, 542, 7, 8, 0, 0, 541, 540, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 544, 1, 0, 0, 0, 543, 545, 3, 65, 27, 0, 544, 543, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 544, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 74, 1, 0, 0, 0, 548, 549, 5, 64, 0, 0, 549, 76, 1, 0, 0, 0, 550, 551, 5, 96, 0, 0, 551, 78, 1, 0, 0, 0, 552, 556, 8, 9, 0, 0, 553, 554, 5, 96, 0, 0, 554, 556, 5, 96, 0, 0, 555, 552, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 556, 80, 1, 0, 0, 0, 557, 558, 5, 95, 0, 0, 558, 82, 1, 0, 0, 0, 559, 563, 3, 67, 28, 0, 560, 563, 3, 65, 27, 0, 561, 563, 3, 81, 35, 0, 562, 559, 1, 0, 0, 0, 562, 560, 1, 0, 0, 0, 562, 561, 1, 0, 0, 0, 563, 84, 1, 0, 0, 0, 564, 569, 5, 34, 0, 0, 565, 568, 3, 69, 29, 0, 566, 568, 3, 71, 30, 0, 567, 565, 1, 0, 0, 0, 567, 566, 1, 0, 0, 0, 568, 571, 1, 0, 0, 0, 569, 567, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 572, 1, 0, 0, 0, 571, 569, 1, 0, 0, 0, 572, 594, 5, 34, 0, 0, 573, 574, 5, 34, 0, 0, 574, 575, 5, 34, 0, 0, 575, 576, 5, 34, 0, 0, 576, 580, 1, 0, 0, 0, 577, 579, 8, 1, 0, 0, 578, 577, 1, 0, 0, 0, 579, 582, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 580, 578, 1, 0, 0, 0, 581, 583, 1, 0, 0, 0, 582, 580, 1, 0, 0, 0, 583, 584, 5, 34, 0, 0, 584, 585, 5, 34, 0, 0, 585, 586, 5, 34, 0, 0, 586, 588, 1, 0, 0, 0, 587, 589, 5, 34, 0, 0, 588, 587, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 591, 1, 0, 0, 0, 590, 592, 5, 34, 0, 0, 591, 590, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 564, 1, 0, 0, 0, 593, 573, 1, 0, 0, 0, 594, 86, 1, 0, 0, 0, 595, 597, 3, 65, 27, 0, 596, 595, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 596, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 599, 88, 1, 0, 0, 0, 600, 602, 3, 65, 27, 0, 601, 600, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 601, 1, 0, 0, 0, 603, 604, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 609, 3, 103, 46, 0, 606, 608, 3, 65, 27, 0, 607, 606, 1, 0, 0, 0, 608, 611, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 643, 1, 0, 0, 0, 611, 609, 1, 0, 0, 0, 612, 614, 3, 103, 46, 0, 613, 615, 3, 65, 27, 0, 614, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 643, 1, 0, 0, 0, 618, 620, 3, 65, 27, 0, 619, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 630, 1, 0, 0, 0, 623, 627, 3, 103, 46, 0, 624, 626, 3, 65, 27, 0, 625, 624, 1, 0, 0, 0, 626, 629, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 631, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 630, 623, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 633, 3, 73, 31, 0, 633, 643, 1, 0, 0, 0, 634, 636, 3, 103, 46, 0, 635, 637, 3, 65, 27, 0, 636, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 641, 3, 73, 31, 0, 641, 643, 1, 0, 0, 0, 642, 601, 1, 0, 0, 0, 642, 612, 1, 0, 0, 0, 642, 619, 1, 0, 0, 0, 642, 634, 1, 0, 0, 0, 643, 90, 1, 0, 0, 0, 644, 645, 5, 98, 0, 0, 645, 646, 5, 121, 0, 0, 646, 92, 1, 0, 0, 0, 647, 648, 5, 97, 0, 0, 648, 649, 5, 110, 0, 0, 649, 650, 5, 100, 0, 0, 650, 94, 1, 0, 0, 0, 651, 652, 5, 97, 0, 0, 652, 653, 5, 115, 0, 0, 653, 654, 5, 99, 0, 0, 654, 96, 1, 0, 0, 0, 655, 656, 5, 61, 0, 0, 656, 98, 1, 0, 0, 0, 657, 658, 5, 44, 0, 0, 658, 100, 1, 0, 0, 0, 659, 660, 5, 100, 0, 0, 660, 661, 5, 101, 0, 0, 661, 662, 5, 115, 0, 0, 662, 663, 5, 99, 0, 0, 663, 102, 1, 0, 0, 0, 664, 665, 5, 46, 0, 0, 665, 104, 1, 0, 0, 0, 666, 667, 5, 102, 0, 0, 667, 668, 5, 97, 0, 0, 668, 669, 5, 108, 0, 0, 669, 670, 5, 115, 0, 0, 670, 671, 5, 101, 0, 0, 671, 106, 1, 0, 0, 0, 672, 673, 5, 102, 0, 0, 673, 674, 5, 105, 0, 0, 674, 675, 5, 114, 0, 0, 675, 676, 5, 115, 0, 0, 676, 677, 5, 116, 0, 0, 677, 108, 1, 0, 0, 0, 678, 679, 5, 108, 0, 0, 679, 680, 5, 97, 0, 0, 680, 681, 5, 115, 0, 0, 681, 682, 5, 116, 0, 0, 682, 110, 1, 0, 0, 0, 683, 684, 5, 40, 0, 0, 684, 112, 1, 0, 0, 0, 685, 686, 5, 105, 0, 0, 686, 687, 5, 110, 0, 0, 687, 114, 1, 0, 0, 0, 688, 689, 5, 105, 0, 0, 689, 690, 5, 115, 0, 0, 690, 116, 1, 0, 0, 0, 691, 692, 5, 108, 0, 0, 692, 693, 5, 105, 0, 0, 693, 694, 5, 107, 0, 0, 694, 695, 5, 101, 0, 0, 695, 118, 1, 0, 0, 0, 696, 697, 5, 110, 0, 0, 697, 698, 5, 111, 0, 0, 698, 699, 5, 116, 0, 0, 699, 120, 1, 0, 0, 0, 700, 701, 5, 110, 0, 0, 701, 702, 5, 117, 0, 0, 702, 703, 5, 108, 0, 0, 703, 704, 5, 108, 0, 0, 704, 122, 1, 0, 0, 0, 705, 706, 5, 110, 0, 0, 706, 707, 5, 117, 0, 0, 707, 708, 5, 108, 0, 0, 708, 709, 5, 108, 0, 0, 709, 710, 5, 115, 0, 0, 710, 124, 1, 0, 0, 0, 711, 712, 5, 111, 0, 0, 712, 713, 5, 114, 0, 0, 713, 126, 1, 0, 0, 0, 714, 715, 5, 63, 0, 0, 715, 128, 1, 0, 0, 0, 716, 717, 5, 114, 0, 0, 717, 718, 5, 108, 0, 0, 718, 719, 5, 105, 0, 0, 719, 720, 5, 107, 0, 0, 720, 721, 5, 101, 0, 0, 721, 130, 1, 0, 0, 0, 722, 723, 5, 41, 0, 0, 723, 132, 1, 0, 0, 0, 724, 725, 5, 116, 0, 0, 725, 726, 5, 114, 0, 0, 726, 727, 5, 117, 0, 0, 727, 728, 5, 101, 0, 0, 728, 134, 1, 0, 0, 0, 729, 730, 5, 61, 0, 0, 730, 731, 5, 61, 0, 0, 731, 136, 1, 0, 0, 0, 732, 733, 5, 61, 0, 0, 733, 734, 5, 126, 0, 0, 734, 138, 1, 0, 0, 0, 735, 736, 5, 33, 0, 0, 736, 737, 5, 61, 0, 0, 737, 140, 1, 0, 0, 0, 738, 739, 5, 60, 0, 0, 739, 142, 1, 0, 0, 0, 740, 741, 5, 60, 0, 0, 741, 742, 5, 61, 0, 0, 742, 144, 1, 0, 0, 0, 743, 744, 5, 62, 0, 0, 744, 146, 1, 0, 0, 0, 745, 746, 5, 62, 0, 0, 746, 747, 5, 61, 0, 0, 747, 148, 1, 0, 0, 0, 748, 749, 5, 43, 0, 0, 749, 150, 1, 0, 0, 0, 750, 751, 5, 45, 0, 0, 751, 152, 1, 0, 0, 0, 752, 753, 5, 42, 0, 0, 753, 154, 1, 0, 0, 0, 754, 755, 5, 47, 0, 0, 755, 156, 1, 0, 0, 0, 756, 757, 5, 37, 0, 0, 757, 158, 1, 0, 0, 0, 758, 759, 5, 91, 0, 0, 759, 760, 1, 0, 0, 0, 760, 761, 6, 74, 0, 0, 761, 762, 6, 74, 0, 0, 762, 160, 1, 0, 0, 0, 763, 764, 5, 93, 0, 0, 764, 765, 1, 0, 0, 0, 765, 766, 6, 75, 12, 0, 766, 767, 6, 75, 12, 0, 767, 162, 1, 0, 0, 0, 768, 772, 3, 67, 28, 0, 769, 771, 3, 83, 36, 0, 770, 769, 1, 0, 0, 0, 771, 774, 1, 0, 0, 0, 772, 770, 1, 0, 0, 0, 772, 773, 1, 0, 0, 0, 773, 785, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 775, 778, 3, 81, 35, 0, 776, 778, 3, 75, 32, 0, 777, 775, 1, 0, 0, 0, 777, 776, 1, 0, 0, 0, 778, 780, 1, 0, 0, 0, 779, 781, 3, 83, 36, 0, 780, 779, 1, 0, 0, 0, 781, 782, 1, 0, 0, 0, 782, 780, 1, 0, 0, 0, 782, 783, 1, 0, 0, 0, 783, 785, 1, 0, 0, 0, 784, 768, 1, 0, 0, 0, 784, 777, 1, 0, 0, 0, 785, 164, 1, 0, 0, 0, 786, 788, 3, 77, 33, 0, 787, 789, 3, 79, 34, 0, 788, 787, 1, 0, 0, 0, 789, 790, 1, 0, 0, 0, 790, 788, 1, 0, 0, 0, 790, 791, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 793, 3, 77, 33, 0, 793, 166, 1, 0, 0, 0, 794, 795, 3, 165, 77, 0, 795, 168, 1, 0, 0, 0, 796, 797, 3, 47, 18, 0, 797, 798, 1, 0, 0, 0, 798, 799, 6, 79, 8, 0, 799, 170, 1, 0, 0, 0, 800, 801, 3, 49, 19, 0, 801, 802, 1, 0, 0, 0, 802, 803, 6, 80, 8, 0, 803, 172, 1, 0, 0, 0, 804, 805, 3, 51, 20, 0, 805, 806, 1, 0, 0, 0, 806, 807, 6, 81, 8, 0, 807, 174, 1, 0, 0, 0, 808, 809, 3, 63, 26, 0, 809, 810, 1, 0, 0, 0, 810, 811, 6, 82, 11, 0, 811, 812, 6, 82, 12, 0, 812, 176, 1, 0, 0, 0, 813, 814, 3, 159, 74, 0, 814, 815, 1, 0, 0, 0, 815, 816, 6, 83, 9, 0, 816, 178, 1, 0, 0, 0, 817, 818, 3, 161, 75, 0, 818, 819, 1, 0, 0, 0, 819, 820, 6, 84, 13, 0, 820, 180, 1, 0, 0, 0, 821, 822, 3, 99, 44, 0, 822, 823, 1, 0, 0, 0, 823, 824, 6, 85, 14, 0, 824, 182, 1, 0, 0, 0, 825, 826, 3, 97, 43, 0, 826, 827, 1, 0, 0, 0, 827, 828, 6, 86, 15, 0, 828, 184, 1, 0, 0, 0, 829, 830, 5, 109, 0, 0, 830, 831, 5, 101, 0, 0, 831, 832, 5, 116, 0, 0, 832, 833, 5, 97, 0, 0, 833, 834, 5, 100, 0, 0, 834, 835, 5, 97, 0, 0, 835, 836, 5, 116, 0, 0, 836, 837, 5, 97, 0, 0, 837, 186, 1, 0, 0, 0, 838, 842, 8, 10, 0, 0, 839, 840, 5, 47, 0, 0, 840, 842, 8, 11, 0, 0, 841, 838, 1, 0, 0, 0, 841, 839, 1, 0, 0, 0, 842, 188, 1, 0, 0, 0, 843, 845, 3, 187, 88, 0, 844, 843, 1, 0, 0, 0, 845, 846, 1, 0, 0, 0, 846, 844, 1, 0, 0, 0, 846, 847, 1, 0, 0, 0, 847, 190, 1, 0, 0, 0, 848, 849, 3, 167, 78, 0, 849, 850, 1, 0, 0, 0, 850, 851, 6, 90, 16, 0, 851, 192, 1, 0, 0, 0, 852, 853, 3, 47, 18, 0, 853, 854, 1, 0, 0, 0, 854, 855, 6, 91, 8, 0, 855, 194, 1, 0, 0, 0, 856, 857, 3, 49, 19, 0, 857, 858, 1, 0, 0, 0, 858, 859, 6, 92, 8, 0, 859, 196, 1, 0, 0, 0, 860, 861, 3, 51, 20, 0, 861, 862, 1, 0, 0, 0, 862, 863, 6, 93, 8, 0, 863, 198, 1, 0, 0, 0, 864, 865, 3, 63, 26, 0, 865, 866, 1, 0, 0, 0, 866, 867, 6, 94, 11, 0, 867, 868, 6, 94, 12, 0, 868, 200, 1, 0, 0, 0, 869, 870, 3, 103, 46, 0, 870, 871, 1, 0, 0, 0, 871, 872, 6, 95, 17, 0, 872, 202, 1, 0, 0, 0, 873, 874, 3, 99, 44, 0, 874, 875, 1, 0, 0, 0, 875, 876, 6, 96, 14, 0, 876, 204, 1, 0, 0, 0, 877, 882, 3, 67, 28, 0, 878, 882, 3, 65, 27, 0, 879, 882, 3, 81, 35, 0, 880, 882, 3, 153, 71, 0, 881, 877, 1, 0, 0, 0, 881, 878, 1, 0, 0, 0, 881, 879, 1, 0, 0, 0, 881, 880, 1, 0, 0, 0, 882, 206, 1, 0, 0, 0, 883, 886, 3, 67, 28, 0, 884, 886, 3, 153, 71, 0, 885, 883, 1, 0, 0, 0, 885, 884, 1, 0, 0, 0, 886, 890, 1, 0, 0, 0, 887, 889, 3, 205, 97, 0, 888, 887, 1, 0, 0, 0, 889, 892, 1, 0, 0, 0, 890, 888, 1, 0, 0, 0, 890, 891, 1, 0, 0, 0, 891, 903, 1, 0, 0, 0, 892, 890, 1, 0, 0, 0, 893, 896, 3, 81, 35, 0, 894, 896, 3, 75, 32, 0, 895, 893, 1, 0, 0, 0, 895, 894, 1, 0, 0, 0, 896, 898, 1, 0, 0, 0, 897, 899, 3, 205, 97, 0, 898, 897, 1, 0, 0, 0, 899, 900, 1, 0, 0, 0, 900, 898, 1, 0, 0, 0, 900, 901, 1, 0, 0, 0, 901, 903, 1, 0, 0, 0, 902, 885, 1, 0, 0, 0, 902, 895, 1, 0, 0, 0, 903, 208, 1, 0, 0, 0, 904, 907, 3, 207, 98, 0, 905, 907, 3, 165, 77, 0, 906, 904, 1, 0, 0, 0, 906, 905, 1, 0, 0, 0, 907, 908, 1, 0, 0, 0, 908, 906, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 210, 1, 0, 0, 0, 910, 911, 3, 47, 18, 0, 911, 912, 1, 0, 0, 0, 912, 913, 6, 100, 8, 0, 913, 212, 1, 0, 0, 0, 914, 915, 3, 49, 19, 0, 915, 916, 1, 0, 0, 0, 916, 917, 6, 101, 8, 0, 917, 214, 1, 0, 0, 0, 918, 919, 3, 51, 20, 0, 919, 920, 1, 0, 0, 0, 920, 921, 6, 102, 8, 0, 921, 216, 1, 0, 0, 0, 922, 923, 3, 63, 26, 0, 923, 924, 1, 0, 0, 0, 924, 925, 6, 103, 11, 0, 925, 926, 6, 103, 12, 0, 926, 218, 1, 0, 0, 0, 927, 928, 3, 97, 43, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 104, 15, 0, 930, 220, 1, 0, 0, 0, 931, 932, 3, 99, 44, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 105, 14, 0, 934, 222, 1, 0, 0, 0, 935, 936, 3, 103, 46, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 106, 17, 0, 938, 224, 1, 0, 0, 0, 939, 940, 5, 97, 0, 0, 940, 941, 5, 115, 0, 0, 941, 226, 1, 0, 0, 0, 942, 943, 3, 209, 99, 0, 943, 944, 1, 0, 0, 0, 944, 945, 6, 108, 18, 0, 945, 228, 1, 0, 0, 0, 946, 947, 3, 47, 18, 0, 947, 948, 1, 0, 0, 0, 948, 949, 6, 109, 8, 0, 949, 230, 1, 0, 0, 0, 950, 951, 3, 49, 19, 0, 951, 952, 1, 0, 0, 0, 952, 953, 6, 110, 8, 0, 953, 232, 1, 0, 0, 0, 954, 955, 3, 51, 20, 0, 955, 956, 1, 0, 0, 0, 956, 957, 6, 111, 8, 0, 957, 234, 1, 0, 0, 0, 958, 959, 3, 63, 26, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 112, 11, 0, 961, 962, 6, 112, 12, 0, 962, 236, 1, 0, 0, 0, 963, 964, 3, 159, 74, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 113, 9, 0, 966, 967, 6, 113, 19, 0, 967, 238, 1, 0, 0, 0, 968, 969, 5, 111, 0, 0, 969, 970, 5, 110, 0, 0, 970, 971, 1, 0, 0, 0, 971, 972, 6, 114, 20, 0, 972, 240, 1, 0, 0, 0, 973, 974, 5, 119, 0, 0, 974, 975, 5, 105, 0, 0, 975, 976, 5, 116, 0, 0, 976, 977, 5, 104, 0, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 115, 20, 0, 979, 242, 1, 0, 0, 0, 980, 981, 8, 12, 0, 0, 981, 244, 1, 0, 0, 0, 982, 984, 3, 243, 116, 0, 983, 982, 1, 0, 0, 0, 984, 985, 1, 0, 0, 0, 985, 983, 1, 0, 0, 0, 985, 986, 1, 0, 0, 0, 986, 987, 1, 0, 0, 0, 987, 988, 3, 305, 147, 0, 988, 990, 1, 0, 0, 0, 989, 983, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 992, 1, 0, 0, 0, 991, 993, 3, 243, 116, 0, 992, 991, 1, 0, 0, 0, 993, 994, 1, 0, 0, 0, 994, 992, 1, 0, 0, 0, 994, 995, 1, 0, 0, 0, 995, 246, 1, 0, 0, 0, 996, 997, 3, 167, 78, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 118, 16, 0, 999, 248, 1, 0, 0, 0, 1000, 1001, 3, 245, 117, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 6, 119, 21, 0, 1003, 250, 1, 0, 0, 0, 1004, 1005, 3, 47, 18, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 6, 120, 8, 0, 1007, 252, 1, 0, 0, 0, 1008, 1009, 3, 49, 19, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 121, 8, 0, 1011, 254, 1, 0, 0, 0, 1012, 1013, 3, 51, 20, 0, 1013, 1014, 1, 0, 0, 0, 1014, 1015, 6, 122, 8, 0, 1015, 256, 1, 0, 0, 0, 1016, 1017, 3, 63, 26, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1019, 6, 123, 11, 0, 1019, 1020, 6, 123, 12, 0, 1020, 1021, 6, 123, 12, 0, 1021, 258, 1, 0, 0, 0, 1022, 1023, 3, 97, 43, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 124, 15, 0, 1025, 260, 1, 0, 0, 0, 1026, 1027, 3, 99, 44, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 125, 14, 0, 1029, 262, 1, 0, 0, 0, 1030, 1031, 3, 103, 46, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 126, 17, 0, 1033, 264, 1, 0, 0, 0, 1034, 1035, 3, 241, 115, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 127, 22, 0, 1037, 266, 1, 0, 0, 0, 1038, 1039, 3, 209, 99, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1041, 6, 128, 18, 0, 1041, 268, 1, 0, 0, 0, 1042, 1043, 3, 167, 78, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 129, 16, 0, 1045, 270, 1, 0, 0, 0, 1046, 1047, 3, 47, 18, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 130, 8, 0, 1049, 272, 1, 0, 0, 0, 1050, 1051, 3, 49, 19, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 131, 8, 0, 1053, 274, 1, 0, 0, 0, 1054, 1055, 3, 51, 20, 0, 1055, 1056, 1, 0, 0, 0, 1056, 1057, 6, 132, 8, 0, 1057, 276, 1, 0, 0, 0, 1058, 1059, 3, 63, 26, 0, 1059, 1060, 1, 0, 0, 0, 1060, 1061, 6, 133, 11, 0, 1061, 1062, 6, 133, 12, 0, 1062, 278, 1, 0, 0, 0, 1063, 1064, 3, 103, 46, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 6, 134, 17, 0, 1066, 280, 1, 0, 0, 0, 1067, 1068, 3, 167, 78, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1070, 6, 135, 16, 0, 1070, 282, 1, 0, 0, 0, 1071, 1072, 3, 163, 76, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1074, 6, 136, 23, 0, 1074, 284, 1, 0, 0, 0, 1075, 1076, 3, 47, 18, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 137, 8, 0, 1078, 286, 1, 0, 0, 0, 1079, 1080, 3, 49, 19, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 138, 8, 0, 1082, 288, 1, 0, 0, 0, 1083, 1084, 3, 51, 20, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 139, 8, 0, 1086, 290, 1, 0, 0, 0, 1087, 1088, 3, 63, 26, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 140, 11, 0, 1090, 1091, 6, 140, 12, 0, 1091, 292, 1, 0, 0, 0, 1092, 1093, 5, 105, 0, 0, 1093, 1094, 5, 110, 0, 0, 1094, 1095, 5, 102, 0, 0, 1095, 1096, 5, 111, 0, 0, 1096, 294, 1, 0, 0, 0, 1097, 1098, 5, 102, 0, 0, 1098, 1099, 5, 117, 0, 0, 1099, 1100, 5, 110, 0, 0, 1100, 1101, 5, 99, 0, 0, 1101, 1102, 5, 116, 0, 0, 1102, 1103, 5, 105, 0, 0, 1103, 1104, 5, 111, 0, 0, 1104, 1105, 5, 110, 0, 0, 1105, 1106, 5, 115, 0, 0, 1106, 296, 1, 0, 0, 0, 1107, 1108, 3, 47, 18, 0, 1108, 1109, 1, 0, 0, 0, 1109, 1110, 6, 143, 8, 0, 1110, 298, 1, 0, 0, 0, 1111, 1112, 3, 49, 19, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 144, 8, 0, 1114, 300, 1, 0, 0, 0, 1115, 1116, 3, 51, 20, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 145, 8, 0, 1118, 302, 1, 0, 0, 0, 1119, 1120, 3, 161, 75, 0, 1120, 1121, 1, 0, 0, 0, 1121, 1122, 6, 146, 13, 0, 1122, 1123, 6, 146, 12, 0, 1123, 304, 1, 0, 0, 0, 1124, 1125, 5, 58, 0, 0, 1125, 306, 1, 0, 0, 0, 1126, 1132, 3, 75, 32, 0, 1127, 1132, 3, 65, 27, 0, 1128, 1132, 3, 103, 46, 0, 1129, 1132, 3, 67, 28, 0, 1130, 1132, 3, 81, 35, 0, 1131, 1126, 1, 0, 0, 0, 1131, 1127, 1, 0, 0, 0, 1131, 1128, 1, 0, 0, 0, 1131, 1129, 1, 0, 0, 0, 1131, 1130, 1, 0, 0, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1131, 1, 0, 0, 0, 1133, 1134, 1, 0, 0, 0, 1134, 308, 1, 0, 0, 0, 1135, 1136, 3, 47, 18, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 6, 149, 8, 0, 1138, 310, 1, 0, 0, 0, 1139, 1140, 3, 49, 19, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1142, 6, 150, 8, 0, 1142, 312, 1, 0, 0, 0, 1143, 1144, 3, 51, 20, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1146, 6, 151, 8, 0, 1146, 314, 1, 0, 0, 0, 57, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 461, 471, 475, 478, 487, 489, 500, 541, 546, 555, 562, 567, 569, 580, 588, 591, 593, 598, 603, 609, 616, 621, 627, 630, 638, 642, 772, 777, 782, 784, 790, 841, 846, 881, 885, 890, 895, 900, 902, 906, 908, 985, 989, 994, 1131, 1133, 24, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 63, 0, 5, 0, 0, 7, 25, 0, 4, 0, 0, 7, 64, 0, 7, 33, 0, 7, 32, 0, 7, 66, 0, 7, 35, 0, 7, 75, 0, 5, 10, 0, 5, 7, 0, 7, 85, 0, 7, 84, 0, 7, 65, 0] \ No newline at end of file +[4, 0, 108, 1182, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 478, 8, 18, 11, 18, 12, 18, 479, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 488, 8, 19, 10, 19, 12, 19, 491, 9, 19, 1, 19, 3, 19, 494, 8, 19, 1, 19, 3, 19, 497, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 506, 8, 20, 10, 20, 12, 20, 509, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 517, 8, 21, 11, 21, 12, 21, 518, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 560, 8, 32, 1, 32, 4, 32, 563, 8, 32, 11, 32, 12, 32, 564, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 574, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 581, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 586, 8, 38, 10, 38, 12, 38, 589, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 597, 8, 38, 10, 38, 12, 38, 600, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 607, 8, 38, 1, 38, 3, 38, 610, 8, 38, 3, 38, 612, 8, 38, 1, 39, 4, 39, 615, 8, 39, 11, 39, 12, 39, 616, 1, 40, 4, 40, 620, 8, 40, 11, 40, 12, 40, 621, 1, 40, 1, 40, 5, 40, 626, 8, 40, 10, 40, 12, 40, 629, 9, 40, 1, 40, 1, 40, 4, 40, 633, 8, 40, 11, 40, 12, 40, 634, 1, 40, 4, 40, 638, 8, 40, 11, 40, 12, 40, 639, 1, 40, 1, 40, 5, 40, 644, 8, 40, 10, 40, 12, 40, 647, 9, 40, 3, 40, 649, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 655, 8, 40, 11, 40, 12, 40, 656, 1, 40, 1, 40, 3, 40, 661, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 789, 8, 77, 10, 77, 12, 77, 792, 9, 77, 1, 77, 1, 77, 3, 77, 796, 8, 77, 1, 77, 4, 77, 799, 8, 77, 11, 77, 12, 77, 800, 3, 77, 803, 8, 77, 1, 78, 1, 78, 4, 78, 807, 8, 78, 11, 78, 12, 78, 808, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 3, 89, 860, 8, 89, 1, 90, 4, 90, 863, 8, 90, 11, 90, 12, 90, 864, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 3, 98, 900, 8, 98, 1, 99, 1, 99, 3, 99, 904, 8, 99, 1, 99, 5, 99, 907, 8, 99, 10, 99, 12, 99, 910, 9, 99, 1, 99, 1, 99, 3, 99, 914, 8, 99, 1, 99, 4, 99, 917, 8, 99, 11, 99, 12, 99, 918, 3, 99, 921, 8, 99, 1, 100, 1, 100, 4, 100, 925, 8, 100, 11, 100, 12, 100, 926, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 118, 4, 118, 1002, 8, 118, 11, 118, 12, 118, 1003, 1, 118, 1, 118, 3, 118, 1008, 8, 118, 1, 118, 4, 118, 1011, 8, 118, 11, 118, 12, 118, 1012, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 4, 153, 1167, 8, 153, 11, 153, 12, 153, 1168, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 2, 507, 598, 0, 157, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 0, 170, 67, 172, 68, 174, 69, 176, 70, 178, 0, 180, 0, 182, 0, 184, 0, 186, 0, 188, 71, 190, 0, 192, 72, 194, 0, 196, 73, 198, 74, 200, 75, 202, 0, 204, 0, 206, 0, 208, 0, 210, 0, 212, 76, 214, 77, 216, 78, 218, 79, 220, 0, 222, 0, 224, 0, 226, 0, 228, 80, 230, 0, 232, 81, 234, 82, 236, 83, 238, 0, 240, 0, 242, 84, 244, 85, 246, 0, 248, 86, 250, 0, 252, 0, 254, 87, 256, 88, 258, 89, 260, 0, 262, 0, 264, 0, 266, 0, 268, 0, 270, 0, 272, 0, 274, 90, 276, 91, 278, 92, 280, 0, 282, 0, 284, 0, 286, 0, 288, 93, 290, 94, 292, 95, 294, 0, 296, 96, 298, 97, 300, 98, 302, 99, 304, 0, 306, 100, 308, 101, 310, 102, 312, 103, 314, 0, 316, 104, 318, 105, 320, 106, 322, 107, 324, 108, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1209, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 3, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 4, 202, 1, 0, 0, 0, 4, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 214, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 5, 220, 1, 0, 0, 0, 5, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 6, 238, 1, 0, 0, 0, 6, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 7, 260, 1, 0, 0, 0, 7, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 8, 280, 1, 0, 0, 0, 8, 282, 1, 0, 0, 0, 8, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 9, 294, 1, 0, 0, 0, 9, 296, 1, 0, 0, 0, 9, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 10, 304, 1, 0, 0, 0, 10, 306, 1, 0, 0, 0, 10, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 11, 314, 1, 0, 0, 0, 11, 316, 1, 0, 0, 0, 11, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 12, 326, 1, 0, 0, 0, 14, 336, 1, 0, 0, 0, 16, 343, 1, 0, 0, 0, 18, 352, 1, 0, 0, 0, 20, 359, 1, 0, 0, 0, 22, 369, 1, 0, 0, 0, 24, 376, 1, 0, 0, 0, 26, 383, 1, 0, 0, 0, 28, 397, 1, 0, 0, 0, 30, 404, 1, 0, 0, 0, 32, 412, 1, 0, 0, 0, 34, 419, 1, 0, 0, 0, 36, 431, 1, 0, 0, 0, 38, 440, 1, 0, 0, 0, 40, 446, 1, 0, 0, 0, 42, 453, 1, 0, 0, 0, 44, 460, 1, 0, 0, 0, 46, 468, 1, 0, 0, 0, 48, 477, 1, 0, 0, 0, 50, 483, 1, 0, 0, 0, 52, 500, 1, 0, 0, 0, 54, 516, 1, 0, 0, 0, 56, 522, 1, 0, 0, 0, 58, 527, 1, 0, 0, 0, 60, 532, 1, 0, 0, 0, 62, 536, 1, 0, 0, 0, 64, 540, 1, 0, 0, 0, 66, 544, 1, 0, 0, 0, 68, 548, 1, 0, 0, 0, 70, 550, 1, 0, 0, 0, 72, 552, 1, 0, 0, 0, 74, 555, 1, 0, 0, 0, 76, 557, 1, 0, 0, 0, 78, 566, 1, 0, 0, 0, 80, 568, 1, 0, 0, 0, 82, 573, 1, 0, 0, 0, 84, 575, 1, 0, 0, 0, 86, 580, 1, 0, 0, 0, 88, 611, 1, 0, 0, 0, 90, 614, 1, 0, 0, 0, 92, 660, 1, 0, 0, 0, 94, 662, 1, 0, 0, 0, 96, 665, 1, 0, 0, 0, 98, 669, 1, 0, 0, 0, 100, 673, 1, 0, 0, 0, 102, 675, 1, 0, 0, 0, 104, 677, 1, 0, 0, 0, 106, 682, 1, 0, 0, 0, 108, 684, 1, 0, 0, 0, 110, 690, 1, 0, 0, 0, 112, 696, 1, 0, 0, 0, 114, 701, 1, 0, 0, 0, 116, 703, 1, 0, 0, 0, 118, 706, 1, 0, 0, 0, 120, 709, 1, 0, 0, 0, 122, 714, 1, 0, 0, 0, 124, 718, 1, 0, 0, 0, 126, 723, 1, 0, 0, 0, 128, 729, 1, 0, 0, 0, 130, 732, 1, 0, 0, 0, 132, 734, 1, 0, 0, 0, 134, 740, 1, 0, 0, 0, 136, 742, 1, 0, 0, 0, 138, 747, 1, 0, 0, 0, 140, 750, 1, 0, 0, 0, 142, 753, 1, 0, 0, 0, 144, 756, 1, 0, 0, 0, 146, 758, 1, 0, 0, 0, 148, 761, 1, 0, 0, 0, 150, 763, 1, 0, 0, 0, 152, 766, 1, 0, 0, 0, 154, 768, 1, 0, 0, 0, 156, 770, 1, 0, 0, 0, 158, 772, 1, 0, 0, 0, 160, 774, 1, 0, 0, 0, 162, 776, 1, 0, 0, 0, 164, 781, 1, 0, 0, 0, 166, 802, 1, 0, 0, 0, 168, 804, 1, 0, 0, 0, 170, 812, 1, 0, 0, 0, 172, 814, 1, 0, 0, 0, 174, 818, 1, 0, 0, 0, 176, 822, 1, 0, 0, 0, 178, 826, 1, 0, 0, 0, 180, 831, 1, 0, 0, 0, 182, 835, 1, 0, 0, 0, 184, 839, 1, 0, 0, 0, 186, 843, 1, 0, 0, 0, 188, 847, 1, 0, 0, 0, 190, 859, 1, 0, 0, 0, 192, 862, 1, 0, 0, 0, 194, 866, 1, 0, 0, 0, 196, 870, 1, 0, 0, 0, 198, 874, 1, 0, 0, 0, 200, 878, 1, 0, 0, 0, 202, 882, 1, 0, 0, 0, 204, 887, 1, 0, 0, 0, 206, 891, 1, 0, 0, 0, 208, 899, 1, 0, 0, 0, 210, 920, 1, 0, 0, 0, 212, 924, 1, 0, 0, 0, 214, 928, 1, 0, 0, 0, 216, 932, 1, 0, 0, 0, 218, 936, 1, 0, 0, 0, 220, 940, 1, 0, 0, 0, 222, 945, 1, 0, 0, 0, 224, 949, 1, 0, 0, 0, 226, 953, 1, 0, 0, 0, 228, 957, 1, 0, 0, 0, 230, 960, 1, 0, 0, 0, 232, 964, 1, 0, 0, 0, 234, 968, 1, 0, 0, 0, 236, 972, 1, 0, 0, 0, 238, 976, 1, 0, 0, 0, 240, 981, 1, 0, 0, 0, 242, 986, 1, 0, 0, 0, 244, 991, 1, 0, 0, 0, 246, 998, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1014, 1, 0, 0, 0, 252, 1018, 1, 0, 0, 0, 254, 1022, 1, 0, 0, 0, 256, 1026, 1, 0, 0, 0, 258, 1030, 1, 0, 0, 0, 260, 1034, 1, 0, 0, 0, 262, 1040, 1, 0, 0, 0, 264, 1044, 1, 0, 0, 0, 266, 1048, 1, 0, 0, 0, 268, 1052, 1, 0, 0, 0, 270, 1056, 1, 0, 0, 0, 272, 1060, 1, 0, 0, 0, 274, 1064, 1, 0, 0, 0, 276, 1068, 1, 0, 0, 0, 278, 1072, 1, 0, 0, 0, 280, 1076, 1, 0, 0, 0, 282, 1081, 1, 0, 0, 0, 284, 1085, 1, 0, 0, 0, 286, 1089, 1, 0, 0, 0, 288, 1093, 1, 0, 0, 0, 290, 1097, 1, 0, 0, 0, 292, 1101, 1, 0, 0, 0, 294, 1105, 1, 0, 0, 0, 296, 1110, 1, 0, 0, 0, 298, 1115, 1, 0, 0, 0, 300, 1119, 1, 0, 0, 0, 302, 1123, 1, 0, 0, 0, 304, 1127, 1, 0, 0, 0, 306, 1132, 1, 0, 0, 0, 308, 1142, 1, 0, 0, 0, 310, 1146, 1, 0, 0, 0, 312, 1150, 1, 0, 0, 0, 314, 1154, 1, 0, 0, 0, 316, 1159, 1, 0, 0, 0, 318, 1166, 1, 0, 0, 0, 320, 1170, 1, 0, 0, 0, 322, 1174, 1, 0, 0, 0, 324, 1178, 1, 0, 0, 0, 326, 327, 5, 100, 0, 0, 327, 328, 5, 105, 0, 0, 328, 329, 5, 115, 0, 0, 329, 330, 5, 115, 0, 0, 330, 331, 5, 101, 0, 0, 331, 332, 5, 99, 0, 0, 332, 333, 5, 116, 0, 0, 333, 334, 1, 0, 0, 0, 334, 335, 6, 0, 0, 0, 335, 13, 1, 0, 0, 0, 336, 337, 5, 100, 0, 0, 337, 338, 5, 114, 0, 0, 338, 339, 5, 111, 0, 0, 339, 340, 5, 112, 0, 0, 340, 341, 1, 0, 0, 0, 341, 342, 6, 1, 1, 0, 342, 15, 1, 0, 0, 0, 343, 344, 5, 101, 0, 0, 344, 345, 5, 110, 0, 0, 345, 346, 5, 114, 0, 0, 346, 347, 5, 105, 0, 0, 347, 348, 5, 99, 0, 0, 348, 349, 5, 104, 0, 0, 349, 350, 1, 0, 0, 0, 350, 351, 6, 2, 2, 0, 351, 17, 1, 0, 0, 0, 352, 353, 5, 101, 0, 0, 353, 354, 5, 118, 0, 0, 354, 355, 5, 97, 0, 0, 355, 356, 5, 108, 0, 0, 356, 357, 1, 0, 0, 0, 357, 358, 6, 3, 0, 0, 358, 19, 1, 0, 0, 0, 359, 360, 5, 101, 0, 0, 360, 361, 5, 120, 0, 0, 361, 362, 5, 112, 0, 0, 362, 363, 5, 108, 0, 0, 363, 364, 5, 97, 0, 0, 364, 365, 5, 105, 0, 0, 365, 366, 5, 110, 0, 0, 366, 367, 1, 0, 0, 0, 367, 368, 6, 4, 3, 0, 368, 21, 1, 0, 0, 0, 369, 370, 5, 102, 0, 0, 370, 371, 5, 114, 0, 0, 371, 372, 5, 111, 0, 0, 372, 373, 5, 109, 0, 0, 373, 374, 1, 0, 0, 0, 374, 375, 6, 5, 4, 0, 375, 23, 1, 0, 0, 0, 376, 377, 5, 103, 0, 0, 377, 378, 5, 114, 0, 0, 378, 379, 5, 111, 0, 0, 379, 380, 5, 107, 0, 0, 380, 381, 1, 0, 0, 0, 381, 382, 6, 6, 0, 0, 382, 25, 1, 0, 0, 0, 383, 384, 5, 105, 0, 0, 384, 385, 5, 110, 0, 0, 385, 386, 5, 108, 0, 0, 386, 387, 5, 105, 0, 0, 387, 388, 5, 110, 0, 0, 388, 389, 5, 101, 0, 0, 389, 390, 5, 115, 0, 0, 390, 391, 5, 116, 0, 0, 391, 392, 5, 97, 0, 0, 392, 393, 5, 116, 0, 0, 393, 394, 5, 115, 0, 0, 394, 395, 1, 0, 0, 0, 395, 396, 6, 7, 0, 0, 396, 27, 1, 0, 0, 0, 397, 398, 5, 107, 0, 0, 398, 399, 5, 101, 0, 0, 399, 400, 5, 101, 0, 0, 400, 401, 5, 112, 0, 0, 401, 402, 1, 0, 0, 0, 402, 403, 6, 8, 1, 0, 403, 29, 1, 0, 0, 0, 404, 405, 5, 108, 0, 0, 405, 406, 5, 105, 0, 0, 406, 407, 5, 109, 0, 0, 407, 408, 5, 105, 0, 0, 408, 409, 5, 116, 0, 0, 409, 410, 1, 0, 0, 0, 410, 411, 6, 9, 0, 0, 411, 31, 1, 0, 0, 0, 412, 413, 5, 109, 0, 0, 413, 414, 5, 101, 0, 0, 414, 415, 5, 116, 0, 0, 415, 416, 5, 97, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 10, 5, 0, 418, 33, 1, 0, 0, 0, 419, 420, 5, 109, 0, 0, 420, 421, 5, 118, 0, 0, 421, 422, 5, 95, 0, 0, 422, 423, 5, 101, 0, 0, 423, 424, 5, 120, 0, 0, 424, 425, 5, 112, 0, 0, 425, 426, 5, 97, 0, 0, 426, 427, 5, 110, 0, 0, 427, 428, 5, 100, 0, 0, 428, 429, 1, 0, 0, 0, 429, 430, 6, 11, 6, 0, 430, 35, 1, 0, 0, 0, 431, 432, 5, 114, 0, 0, 432, 433, 5, 101, 0, 0, 433, 434, 5, 110, 0, 0, 434, 435, 5, 97, 0, 0, 435, 436, 5, 109, 0, 0, 436, 437, 5, 101, 0, 0, 437, 438, 1, 0, 0, 0, 438, 439, 6, 12, 7, 0, 439, 37, 1, 0, 0, 0, 440, 441, 5, 114, 0, 0, 441, 442, 5, 111, 0, 0, 442, 443, 5, 119, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 13, 0, 0, 445, 39, 1, 0, 0, 0, 446, 447, 5, 115, 0, 0, 447, 448, 5, 104, 0, 0, 448, 449, 5, 111, 0, 0, 449, 450, 5, 119, 0, 0, 450, 451, 1, 0, 0, 0, 451, 452, 6, 14, 8, 0, 452, 41, 1, 0, 0, 0, 453, 454, 5, 115, 0, 0, 454, 455, 5, 111, 0, 0, 455, 456, 5, 114, 0, 0, 456, 457, 5, 116, 0, 0, 457, 458, 1, 0, 0, 0, 458, 459, 6, 15, 0, 0, 459, 43, 1, 0, 0, 0, 460, 461, 5, 115, 0, 0, 461, 462, 5, 116, 0, 0, 462, 463, 5, 97, 0, 0, 463, 464, 5, 116, 0, 0, 464, 465, 5, 115, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 16, 0, 0, 467, 45, 1, 0, 0, 0, 468, 469, 5, 119, 0, 0, 469, 470, 5, 104, 0, 0, 470, 471, 5, 101, 0, 0, 471, 472, 5, 114, 0, 0, 472, 473, 5, 101, 0, 0, 473, 474, 1, 0, 0, 0, 474, 475, 6, 17, 0, 0, 475, 47, 1, 0, 0, 0, 476, 478, 8, 0, 0, 0, 477, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 477, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 18, 0, 0, 482, 49, 1, 0, 0, 0, 483, 484, 5, 47, 0, 0, 484, 485, 5, 47, 0, 0, 485, 489, 1, 0, 0, 0, 486, 488, 8, 1, 0, 0, 487, 486, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 487, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 493, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 492, 494, 5, 13, 0, 0, 493, 492, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 496, 1, 0, 0, 0, 495, 497, 5, 10, 0, 0, 496, 495, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 6, 19, 9, 0, 499, 51, 1, 0, 0, 0, 500, 501, 5, 47, 0, 0, 501, 502, 5, 42, 0, 0, 502, 507, 1, 0, 0, 0, 503, 506, 3, 52, 20, 0, 504, 506, 9, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 504, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 510, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 511, 5, 42, 0, 0, 511, 512, 5, 47, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 6, 20, 9, 0, 514, 53, 1, 0, 0, 0, 515, 517, 7, 2, 0, 0, 516, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 516, 1, 0, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 21, 9, 0, 521, 55, 1, 0, 0, 0, 522, 523, 3, 162, 75, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 22, 10, 0, 525, 526, 6, 22, 11, 0, 526, 57, 1, 0, 0, 0, 527, 528, 3, 66, 27, 0, 528, 529, 1, 0, 0, 0, 529, 530, 6, 23, 12, 0, 530, 531, 6, 23, 13, 0, 531, 59, 1, 0, 0, 0, 532, 533, 3, 54, 21, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 24, 9, 0, 535, 61, 1, 0, 0, 0, 536, 537, 3, 50, 19, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 25, 9, 0, 539, 63, 1, 0, 0, 0, 540, 541, 3, 52, 20, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 26, 9, 0, 543, 65, 1, 0, 0, 0, 544, 545, 5, 124, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 27, 13, 0, 547, 67, 1, 0, 0, 0, 548, 549, 7, 3, 0, 0, 549, 69, 1, 0, 0, 0, 550, 551, 7, 4, 0, 0, 551, 71, 1, 0, 0, 0, 552, 553, 5, 92, 0, 0, 553, 554, 7, 5, 0, 0, 554, 73, 1, 0, 0, 0, 555, 556, 8, 6, 0, 0, 556, 75, 1, 0, 0, 0, 557, 559, 7, 7, 0, 0, 558, 560, 7, 8, 0, 0, 559, 558, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 562, 1, 0, 0, 0, 561, 563, 3, 68, 28, 0, 562, 561, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 564, 565, 1, 0, 0, 0, 565, 77, 1, 0, 0, 0, 566, 567, 5, 64, 0, 0, 567, 79, 1, 0, 0, 0, 568, 569, 5, 96, 0, 0, 569, 81, 1, 0, 0, 0, 570, 574, 8, 9, 0, 0, 571, 572, 5, 96, 0, 0, 572, 574, 5, 96, 0, 0, 573, 570, 1, 0, 0, 0, 573, 571, 1, 0, 0, 0, 574, 83, 1, 0, 0, 0, 575, 576, 5, 95, 0, 0, 576, 85, 1, 0, 0, 0, 577, 581, 3, 70, 29, 0, 578, 581, 3, 68, 28, 0, 579, 581, 3, 84, 36, 0, 580, 577, 1, 0, 0, 0, 580, 578, 1, 0, 0, 0, 580, 579, 1, 0, 0, 0, 581, 87, 1, 0, 0, 0, 582, 587, 5, 34, 0, 0, 583, 586, 3, 72, 30, 0, 584, 586, 3, 74, 31, 0, 585, 583, 1, 0, 0, 0, 585, 584, 1, 0, 0, 0, 586, 589, 1, 0, 0, 0, 587, 585, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 590, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 590, 612, 5, 34, 0, 0, 591, 592, 5, 34, 0, 0, 592, 593, 5, 34, 0, 0, 593, 594, 5, 34, 0, 0, 594, 598, 1, 0, 0, 0, 595, 597, 8, 1, 0, 0, 596, 595, 1, 0, 0, 0, 597, 600, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 598, 596, 1, 0, 0, 0, 599, 601, 1, 0, 0, 0, 600, 598, 1, 0, 0, 0, 601, 602, 5, 34, 0, 0, 602, 603, 5, 34, 0, 0, 603, 604, 5, 34, 0, 0, 604, 606, 1, 0, 0, 0, 605, 607, 5, 34, 0, 0, 606, 605, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 609, 1, 0, 0, 0, 608, 610, 5, 34, 0, 0, 609, 608, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 612, 1, 0, 0, 0, 611, 582, 1, 0, 0, 0, 611, 591, 1, 0, 0, 0, 612, 89, 1, 0, 0, 0, 613, 615, 3, 68, 28, 0, 614, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 91, 1, 0, 0, 0, 618, 620, 3, 68, 28, 0, 619, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 627, 3, 106, 47, 0, 624, 626, 3, 68, 28, 0, 625, 624, 1, 0, 0, 0, 626, 629, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 661, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 630, 632, 3, 106, 47, 0, 631, 633, 3, 68, 28, 0, 632, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 632, 1, 0, 0, 0, 634, 635, 1, 0, 0, 0, 635, 661, 1, 0, 0, 0, 636, 638, 3, 68, 28, 0, 637, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 648, 1, 0, 0, 0, 641, 645, 3, 106, 47, 0, 642, 644, 3, 68, 28, 0, 643, 642, 1, 0, 0, 0, 644, 647, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 649, 1, 0, 0, 0, 647, 645, 1, 0, 0, 0, 648, 641, 1, 0, 0, 0, 648, 649, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 3, 76, 32, 0, 651, 661, 1, 0, 0, 0, 652, 654, 3, 106, 47, 0, 653, 655, 3, 68, 28, 0, 654, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 654, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 658, 1, 0, 0, 0, 658, 659, 3, 76, 32, 0, 659, 661, 1, 0, 0, 0, 660, 619, 1, 0, 0, 0, 660, 630, 1, 0, 0, 0, 660, 637, 1, 0, 0, 0, 660, 652, 1, 0, 0, 0, 661, 93, 1, 0, 0, 0, 662, 663, 5, 98, 0, 0, 663, 664, 5, 121, 0, 0, 664, 95, 1, 0, 0, 0, 665, 666, 5, 97, 0, 0, 666, 667, 5, 110, 0, 0, 667, 668, 5, 100, 0, 0, 668, 97, 1, 0, 0, 0, 669, 670, 5, 97, 0, 0, 670, 671, 5, 115, 0, 0, 671, 672, 5, 99, 0, 0, 672, 99, 1, 0, 0, 0, 673, 674, 5, 61, 0, 0, 674, 101, 1, 0, 0, 0, 675, 676, 5, 44, 0, 0, 676, 103, 1, 0, 0, 0, 677, 678, 5, 100, 0, 0, 678, 679, 5, 101, 0, 0, 679, 680, 5, 115, 0, 0, 680, 681, 5, 99, 0, 0, 681, 105, 1, 0, 0, 0, 682, 683, 5, 46, 0, 0, 683, 107, 1, 0, 0, 0, 684, 685, 5, 102, 0, 0, 685, 686, 5, 97, 0, 0, 686, 687, 5, 108, 0, 0, 687, 688, 5, 115, 0, 0, 688, 689, 5, 101, 0, 0, 689, 109, 1, 0, 0, 0, 690, 691, 5, 102, 0, 0, 691, 692, 5, 105, 0, 0, 692, 693, 5, 114, 0, 0, 693, 694, 5, 115, 0, 0, 694, 695, 5, 116, 0, 0, 695, 111, 1, 0, 0, 0, 696, 697, 5, 108, 0, 0, 697, 698, 5, 97, 0, 0, 698, 699, 5, 115, 0, 0, 699, 700, 5, 116, 0, 0, 700, 113, 1, 0, 0, 0, 701, 702, 5, 40, 0, 0, 702, 115, 1, 0, 0, 0, 703, 704, 5, 105, 0, 0, 704, 705, 5, 110, 0, 0, 705, 117, 1, 0, 0, 0, 706, 707, 5, 105, 0, 0, 707, 708, 5, 115, 0, 0, 708, 119, 1, 0, 0, 0, 709, 710, 5, 108, 0, 0, 710, 711, 5, 105, 0, 0, 711, 712, 5, 107, 0, 0, 712, 713, 5, 101, 0, 0, 713, 121, 1, 0, 0, 0, 714, 715, 5, 110, 0, 0, 715, 716, 5, 111, 0, 0, 716, 717, 5, 116, 0, 0, 717, 123, 1, 0, 0, 0, 718, 719, 5, 110, 0, 0, 719, 720, 5, 117, 0, 0, 720, 721, 5, 108, 0, 0, 721, 722, 5, 108, 0, 0, 722, 125, 1, 0, 0, 0, 723, 724, 5, 110, 0, 0, 724, 725, 5, 117, 0, 0, 725, 726, 5, 108, 0, 0, 726, 727, 5, 108, 0, 0, 727, 728, 5, 115, 0, 0, 728, 127, 1, 0, 0, 0, 729, 730, 5, 111, 0, 0, 730, 731, 5, 114, 0, 0, 731, 129, 1, 0, 0, 0, 732, 733, 5, 63, 0, 0, 733, 131, 1, 0, 0, 0, 734, 735, 5, 114, 0, 0, 735, 736, 5, 108, 0, 0, 736, 737, 5, 105, 0, 0, 737, 738, 5, 107, 0, 0, 738, 739, 5, 101, 0, 0, 739, 133, 1, 0, 0, 0, 740, 741, 5, 41, 0, 0, 741, 135, 1, 0, 0, 0, 742, 743, 5, 116, 0, 0, 743, 744, 5, 114, 0, 0, 744, 745, 5, 117, 0, 0, 745, 746, 5, 101, 0, 0, 746, 137, 1, 0, 0, 0, 747, 748, 5, 61, 0, 0, 748, 749, 5, 61, 0, 0, 749, 139, 1, 0, 0, 0, 750, 751, 5, 61, 0, 0, 751, 752, 5, 126, 0, 0, 752, 141, 1, 0, 0, 0, 753, 754, 5, 33, 0, 0, 754, 755, 5, 61, 0, 0, 755, 143, 1, 0, 0, 0, 756, 757, 5, 60, 0, 0, 757, 145, 1, 0, 0, 0, 758, 759, 5, 60, 0, 0, 759, 760, 5, 61, 0, 0, 760, 147, 1, 0, 0, 0, 761, 762, 5, 62, 0, 0, 762, 149, 1, 0, 0, 0, 763, 764, 5, 62, 0, 0, 764, 765, 5, 61, 0, 0, 765, 151, 1, 0, 0, 0, 766, 767, 5, 43, 0, 0, 767, 153, 1, 0, 0, 0, 768, 769, 5, 45, 0, 0, 769, 155, 1, 0, 0, 0, 770, 771, 5, 42, 0, 0, 771, 157, 1, 0, 0, 0, 772, 773, 5, 47, 0, 0, 773, 159, 1, 0, 0, 0, 774, 775, 5, 37, 0, 0, 775, 161, 1, 0, 0, 0, 776, 777, 5, 91, 0, 0, 777, 778, 1, 0, 0, 0, 778, 779, 6, 75, 0, 0, 779, 780, 6, 75, 0, 0, 780, 163, 1, 0, 0, 0, 781, 782, 5, 93, 0, 0, 782, 783, 1, 0, 0, 0, 783, 784, 6, 76, 13, 0, 784, 785, 6, 76, 13, 0, 785, 165, 1, 0, 0, 0, 786, 790, 3, 70, 29, 0, 787, 789, 3, 86, 37, 0, 788, 787, 1, 0, 0, 0, 789, 792, 1, 0, 0, 0, 790, 788, 1, 0, 0, 0, 790, 791, 1, 0, 0, 0, 791, 803, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 793, 796, 3, 84, 36, 0, 794, 796, 3, 78, 33, 0, 795, 793, 1, 0, 0, 0, 795, 794, 1, 0, 0, 0, 796, 798, 1, 0, 0, 0, 797, 799, 3, 86, 37, 0, 798, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 798, 1, 0, 0, 0, 800, 801, 1, 0, 0, 0, 801, 803, 1, 0, 0, 0, 802, 786, 1, 0, 0, 0, 802, 795, 1, 0, 0, 0, 803, 167, 1, 0, 0, 0, 804, 806, 3, 80, 34, 0, 805, 807, 3, 82, 35, 0, 806, 805, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 811, 3, 80, 34, 0, 811, 169, 1, 0, 0, 0, 812, 813, 3, 168, 78, 0, 813, 171, 1, 0, 0, 0, 814, 815, 3, 50, 19, 0, 815, 816, 1, 0, 0, 0, 816, 817, 6, 80, 9, 0, 817, 173, 1, 0, 0, 0, 818, 819, 3, 52, 20, 0, 819, 820, 1, 0, 0, 0, 820, 821, 6, 81, 9, 0, 821, 175, 1, 0, 0, 0, 822, 823, 3, 54, 21, 0, 823, 824, 1, 0, 0, 0, 824, 825, 6, 82, 9, 0, 825, 177, 1, 0, 0, 0, 826, 827, 3, 66, 27, 0, 827, 828, 1, 0, 0, 0, 828, 829, 6, 83, 12, 0, 829, 830, 6, 83, 13, 0, 830, 179, 1, 0, 0, 0, 831, 832, 3, 162, 75, 0, 832, 833, 1, 0, 0, 0, 833, 834, 6, 84, 10, 0, 834, 181, 1, 0, 0, 0, 835, 836, 3, 164, 76, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 85, 14, 0, 838, 183, 1, 0, 0, 0, 839, 840, 3, 102, 45, 0, 840, 841, 1, 0, 0, 0, 841, 842, 6, 86, 15, 0, 842, 185, 1, 0, 0, 0, 843, 844, 3, 100, 44, 0, 844, 845, 1, 0, 0, 0, 845, 846, 6, 87, 16, 0, 846, 187, 1, 0, 0, 0, 847, 848, 5, 109, 0, 0, 848, 849, 5, 101, 0, 0, 849, 850, 5, 116, 0, 0, 850, 851, 5, 97, 0, 0, 851, 852, 5, 100, 0, 0, 852, 853, 5, 97, 0, 0, 853, 854, 5, 116, 0, 0, 854, 855, 5, 97, 0, 0, 855, 189, 1, 0, 0, 0, 856, 860, 8, 10, 0, 0, 857, 858, 5, 47, 0, 0, 858, 860, 8, 11, 0, 0, 859, 856, 1, 0, 0, 0, 859, 857, 1, 0, 0, 0, 860, 191, 1, 0, 0, 0, 861, 863, 3, 190, 89, 0, 862, 861, 1, 0, 0, 0, 863, 864, 1, 0, 0, 0, 864, 862, 1, 0, 0, 0, 864, 865, 1, 0, 0, 0, 865, 193, 1, 0, 0, 0, 866, 867, 3, 170, 79, 0, 867, 868, 1, 0, 0, 0, 868, 869, 6, 91, 17, 0, 869, 195, 1, 0, 0, 0, 870, 871, 3, 50, 19, 0, 871, 872, 1, 0, 0, 0, 872, 873, 6, 92, 9, 0, 873, 197, 1, 0, 0, 0, 874, 875, 3, 52, 20, 0, 875, 876, 1, 0, 0, 0, 876, 877, 6, 93, 9, 0, 877, 199, 1, 0, 0, 0, 878, 879, 3, 54, 21, 0, 879, 880, 1, 0, 0, 0, 880, 881, 6, 94, 9, 0, 881, 201, 1, 0, 0, 0, 882, 883, 3, 66, 27, 0, 883, 884, 1, 0, 0, 0, 884, 885, 6, 95, 12, 0, 885, 886, 6, 95, 13, 0, 886, 203, 1, 0, 0, 0, 887, 888, 3, 106, 47, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 96, 18, 0, 890, 205, 1, 0, 0, 0, 891, 892, 3, 102, 45, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 97, 15, 0, 894, 207, 1, 0, 0, 0, 895, 900, 3, 70, 29, 0, 896, 900, 3, 68, 28, 0, 897, 900, 3, 84, 36, 0, 898, 900, 3, 156, 72, 0, 899, 895, 1, 0, 0, 0, 899, 896, 1, 0, 0, 0, 899, 897, 1, 0, 0, 0, 899, 898, 1, 0, 0, 0, 900, 209, 1, 0, 0, 0, 901, 904, 3, 70, 29, 0, 902, 904, 3, 156, 72, 0, 903, 901, 1, 0, 0, 0, 903, 902, 1, 0, 0, 0, 904, 908, 1, 0, 0, 0, 905, 907, 3, 208, 98, 0, 906, 905, 1, 0, 0, 0, 907, 910, 1, 0, 0, 0, 908, 906, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 921, 1, 0, 0, 0, 910, 908, 1, 0, 0, 0, 911, 914, 3, 84, 36, 0, 912, 914, 3, 78, 33, 0, 913, 911, 1, 0, 0, 0, 913, 912, 1, 0, 0, 0, 914, 916, 1, 0, 0, 0, 915, 917, 3, 208, 98, 0, 916, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 921, 1, 0, 0, 0, 920, 903, 1, 0, 0, 0, 920, 913, 1, 0, 0, 0, 921, 211, 1, 0, 0, 0, 922, 925, 3, 210, 99, 0, 923, 925, 3, 168, 78, 0, 924, 922, 1, 0, 0, 0, 924, 923, 1, 0, 0, 0, 925, 926, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 926, 927, 1, 0, 0, 0, 927, 213, 1, 0, 0, 0, 928, 929, 3, 50, 19, 0, 929, 930, 1, 0, 0, 0, 930, 931, 6, 101, 9, 0, 931, 215, 1, 0, 0, 0, 932, 933, 3, 52, 20, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 102, 9, 0, 935, 217, 1, 0, 0, 0, 936, 937, 3, 54, 21, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 103, 9, 0, 939, 219, 1, 0, 0, 0, 940, 941, 3, 66, 27, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 104, 12, 0, 943, 944, 6, 104, 13, 0, 944, 221, 1, 0, 0, 0, 945, 946, 3, 100, 44, 0, 946, 947, 1, 0, 0, 0, 947, 948, 6, 105, 16, 0, 948, 223, 1, 0, 0, 0, 949, 950, 3, 102, 45, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 106, 15, 0, 952, 225, 1, 0, 0, 0, 953, 954, 3, 106, 47, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 107, 18, 0, 956, 227, 1, 0, 0, 0, 957, 958, 5, 97, 0, 0, 958, 959, 5, 115, 0, 0, 959, 229, 1, 0, 0, 0, 960, 961, 3, 212, 100, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 109, 19, 0, 963, 231, 1, 0, 0, 0, 964, 965, 3, 50, 19, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 110, 9, 0, 967, 233, 1, 0, 0, 0, 968, 969, 3, 52, 20, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 111, 9, 0, 971, 235, 1, 0, 0, 0, 972, 973, 3, 54, 21, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 112, 9, 0, 975, 237, 1, 0, 0, 0, 976, 977, 3, 66, 27, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 113, 12, 0, 979, 980, 6, 113, 13, 0, 980, 239, 1, 0, 0, 0, 981, 982, 3, 162, 75, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 114, 10, 0, 984, 985, 6, 114, 20, 0, 985, 241, 1, 0, 0, 0, 986, 987, 5, 111, 0, 0, 987, 988, 5, 110, 0, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 115, 21, 0, 990, 243, 1, 0, 0, 0, 991, 992, 5, 119, 0, 0, 992, 993, 5, 105, 0, 0, 993, 994, 5, 116, 0, 0, 994, 995, 5, 104, 0, 0, 995, 996, 1, 0, 0, 0, 996, 997, 6, 116, 21, 0, 997, 245, 1, 0, 0, 0, 998, 999, 8, 12, 0, 0, 999, 247, 1, 0, 0, 0, 1000, 1002, 3, 246, 117, 0, 1001, 1000, 1, 0, 0, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 3, 316, 152, 0, 1006, 1008, 1, 0, 0, 0, 1007, 1001, 1, 0, 0, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1010, 1, 0, 0, 0, 1009, 1011, 3, 246, 117, 0, 1010, 1009, 1, 0, 0, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1010, 1, 0, 0, 0, 1012, 1013, 1, 0, 0, 0, 1013, 249, 1, 0, 0, 0, 1014, 1015, 3, 170, 79, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 119, 17, 0, 1017, 251, 1, 0, 0, 0, 1018, 1019, 3, 248, 118, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 120, 22, 0, 1021, 253, 1, 0, 0, 0, 1022, 1023, 3, 50, 19, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 121, 9, 0, 1025, 255, 1, 0, 0, 0, 1026, 1027, 3, 52, 20, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 122, 9, 0, 1029, 257, 1, 0, 0, 0, 1030, 1031, 3, 54, 21, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 123, 9, 0, 1033, 259, 1, 0, 0, 0, 1034, 1035, 3, 66, 27, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 124, 12, 0, 1037, 1038, 6, 124, 13, 0, 1038, 1039, 6, 124, 13, 0, 1039, 261, 1, 0, 0, 0, 1040, 1041, 3, 100, 44, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 125, 16, 0, 1043, 263, 1, 0, 0, 0, 1044, 1045, 3, 102, 45, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 126, 15, 0, 1047, 265, 1, 0, 0, 0, 1048, 1049, 3, 106, 47, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1051, 6, 127, 18, 0, 1051, 267, 1, 0, 0, 0, 1052, 1053, 3, 244, 116, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1055, 6, 128, 23, 0, 1055, 269, 1, 0, 0, 0, 1056, 1057, 3, 212, 100, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 6, 129, 19, 0, 1059, 271, 1, 0, 0, 0, 1060, 1061, 3, 170, 79, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 130, 17, 0, 1063, 273, 1, 0, 0, 0, 1064, 1065, 3, 50, 19, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 131, 9, 0, 1067, 275, 1, 0, 0, 0, 1068, 1069, 3, 52, 20, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 6, 132, 9, 0, 1071, 277, 1, 0, 0, 0, 1072, 1073, 3, 54, 21, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1075, 6, 133, 9, 0, 1075, 279, 1, 0, 0, 0, 1076, 1077, 3, 66, 27, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1079, 6, 134, 12, 0, 1079, 1080, 6, 134, 13, 0, 1080, 281, 1, 0, 0, 0, 1081, 1082, 3, 106, 47, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 135, 18, 0, 1084, 283, 1, 0, 0, 0, 1085, 1086, 3, 170, 79, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 136, 17, 0, 1088, 285, 1, 0, 0, 0, 1089, 1090, 3, 166, 77, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 137, 24, 0, 1092, 287, 1, 0, 0, 0, 1093, 1094, 3, 50, 19, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 138, 9, 0, 1096, 289, 1, 0, 0, 0, 1097, 1098, 3, 52, 20, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 139, 9, 0, 1100, 291, 1, 0, 0, 0, 1101, 1102, 3, 54, 21, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 140, 9, 0, 1104, 293, 1, 0, 0, 0, 1105, 1106, 3, 66, 27, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 141, 12, 0, 1108, 1109, 6, 141, 13, 0, 1109, 295, 1, 0, 0, 0, 1110, 1111, 5, 105, 0, 0, 1111, 1112, 5, 110, 0, 0, 1112, 1113, 5, 102, 0, 0, 1113, 1114, 5, 111, 0, 0, 1114, 297, 1, 0, 0, 0, 1115, 1116, 3, 50, 19, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 143, 9, 0, 1118, 299, 1, 0, 0, 0, 1119, 1120, 3, 52, 20, 0, 1120, 1121, 1, 0, 0, 0, 1121, 1122, 6, 144, 9, 0, 1122, 301, 1, 0, 0, 0, 1123, 1124, 3, 54, 21, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 145, 9, 0, 1126, 303, 1, 0, 0, 0, 1127, 1128, 3, 66, 27, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 146, 12, 0, 1130, 1131, 6, 146, 13, 0, 1131, 305, 1, 0, 0, 0, 1132, 1133, 5, 102, 0, 0, 1133, 1134, 5, 117, 0, 0, 1134, 1135, 5, 110, 0, 0, 1135, 1136, 5, 99, 0, 0, 1136, 1137, 5, 116, 0, 0, 1137, 1138, 5, 105, 0, 0, 1138, 1139, 5, 111, 0, 0, 1139, 1140, 5, 110, 0, 0, 1140, 1141, 5, 115, 0, 0, 1141, 307, 1, 0, 0, 0, 1142, 1143, 3, 50, 19, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 148, 9, 0, 1145, 309, 1, 0, 0, 0, 1146, 1147, 3, 52, 20, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 149, 9, 0, 1149, 311, 1, 0, 0, 0, 1150, 1151, 3, 54, 21, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 150, 9, 0, 1153, 313, 1, 0, 0, 0, 1154, 1155, 3, 164, 76, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 151, 14, 0, 1157, 1158, 6, 151, 13, 0, 1158, 315, 1, 0, 0, 0, 1159, 1160, 5, 58, 0, 0, 1160, 317, 1, 0, 0, 0, 1161, 1167, 3, 78, 33, 0, 1162, 1167, 3, 68, 28, 0, 1163, 1167, 3, 106, 47, 0, 1164, 1167, 3, 70, 29, 0, 1165, 1167, 3, 84, 36, 0, 1166, 1161, 1, 0, 0, 0, 1166, 1162, 1, 0, 0, 0, 1166, 1163, 1, 0, 0, 0, 1166, 1164, 1, 0, 0, 0, 1166, 1165, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1166, 1, 0, 0, 0, 1168, 1169, 1, 0, 0, 0, 1169, 319, 1, 0, 0, 0, 1170, 1171, 3, 50, 19, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 154, 9, 0, 1173, 321, 1, 0, 0, 0, 1174, 1175, 3, 52, 20, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 155, 9, 0, 1177, 323, 1, 0, 0, 0, 1178, 1179, 3, 54, 21, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 156, 9, 0, 1181, 325, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 479, 489, 493, 496, 505, 507, 518, 559, 564, 573, 580, 585, 587, 598, 606, 609, 611, 616, 621, 627, 634, 639, 645, 648, 656, 660, 790, 795, 800, 802, 808, 859, 864, 899, 903, 908, 913, 918, 920, 924, 926, 1003, 1007, 1012, 1166, 1168, 25, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 64, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 65, 0, 7, 34, 0, 7, 33, 0, 7, 67, 0, 7, 36, 0, 7, 76, 0, 5, 11, 0, 5, 7, 0, 7, 86, 0, 7, 85, 0, 7, 66, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 02ba5f7caacde..3107ec6259dbc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -18,28 +18,30 @@ public class EsqlBaseLexer extends Lexer { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, MV_EXPAND=11, RENAME=12, ROW=13, SHOW=14, SORT=15, STATS=16, - WHERE=17, UNKNOWN_CMD=18, LINE_COMMENT=19, MULTILINE_COMMENT=20, WS=21, - EXPLAIN_WS=22, EXPLAIN_LINE_COMMENT=23, EXPLAIN_MULTILINE_COMMENT=24, - PIPE=25, STRING=26, INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, - ASC=31, ASSIGN=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, LAST=38, - LP=39, IN=40, IS=41, LIKE=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, - RLIKE=48, RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, - GTE=57, PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, - CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, - EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, - FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, ID_PATTERN=75, - PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, - AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, - ON=83, WITH=84, ENRICH_POLICY_NAME=85, ENRICH_LINE_COMMENT=86, ENRICH_MULTILINE_COMMENT=87, - ENRICH_WS=88, ENRICH_FIELD_LINE_COMMENT=89, ENRICH_FIELD_MULTILINE_COMMENT=90, - ENRICH_FIELD_WS=91, MVEXPAND_LINE_COMMENT=92, MVEXPAND_MULTILINE_COMMENT=93, - MVEXPAND_WS=94, INFO=95, FUNCTIONS=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, - SHOW_WS=99, COLON=100, SETTING=101, SETTING_LINE_COMMENT=102, SETTTING_MULTILINE_COMMENT=103, - SETTING_WS=104; + KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, + STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, + WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, + PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, + ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, + LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, + RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, + GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, + CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, + EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, METADATA=71, FROM_UNQUOTED_IDENTIFIER=72, + FROM_LINE_COMMENT=73, FROM_MULTILINE_COMMENT=74, FROM_WS=75, ID_PATTERN=76, + PROJECT_LINE_COMMENT=77, PROJECT_MULTILINE_COMMENT=78, PROJECT_WS=79, + AS=80, RENAME_LINE_COMMENT=81, RENAME_MULTILINE_COMMENT=82, RENAME_WS=83, + ON=84, WITH=85, ENRICH_POLICY_NAME=86, ENRICH_LINE_COMMENT=87, ENRICH_MULTILINE_COMMENT=88, + ENRICH_WS=89, ENRICH_FIELD_LINE_COMMENT=90, ENRICH_FIELD_MULTILINE_COMMENT=91, + ENRICH_FIELD_WS=92, MVEXPAND_LINE_COMMENT=93, MVEXPAND_MULTILINE_COMMENT=94, + MVEXPAND_WS=95, INFO=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, + SHOW_WS=99, FUNCTIONS=100, META_LINE_COMMENT=101, META_MULTILINE_COMMENT=102, + META_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, + SETTING_WS=108; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, - ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10; + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, META_MODE=10, + SETTING_MODE=11; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; @@ -47,25 +49,25 @@ public class EsqlBaseLexer extends Lexer { public static String[] modeNames = { "DEFAULT_MODE", "EXPLAIN_MODE", "EXPRESSION_MODE", "FROM_MODE", "PROJECT_MODE", "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", - "SETTING_MODE" + "META_MODE", "SETTING_MODE" }; private static String[] makeRuleNames() { return new String[] { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "INLINESTATS", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_OPENING_BRACKET", - "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", - "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", - "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", - "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", - "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", - "FROM_COMMA", "FROM_ASSIGN", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", + "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", + "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "WS", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", + "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", + "UNDERSCORE", "UNQUOTED_ID_BODY", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", + "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", + "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", + "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", + "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", + "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", + "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", @@ -79,9 +81,10 @@ private static String[] makeRuleNames() { "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "SHOW_PIPE", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", - "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", + "SHOW_WS", "META_PIPE", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", + "META_WS", "SETTING_CLOSING_BRACKET", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } public static final String[] ruleNames = makeRuleNames(); @@ -89,25 +92,25 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'mv_expand'", "'rename'", - "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, - null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", - "'='", "','", "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", - "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", - "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", - "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, null, null, - null, null, "'metadata'", null, null, null, null, null, null, null, null, - "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, - null, null, null, null, null, "'info'", "'functions'", null, null, null, - "':'" + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", + "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, + null, null, null, null, null, null, "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", + "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'", null, null, + null, "'functions'", null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", - "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", + "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", @@ -121,8 +124,9 @@ private static String[] makeSymbolicNames() { "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", + "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } @@ -186,727 +190,749 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000h\u047b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000l\u049e\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002"+ - "\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005"+ - "\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002"+ - "\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002"+ - "\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010"+ - "\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013"+ - "\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016"+ - "\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019"+ - "\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c"+ - "\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f"+ - "\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007"+ - "#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007"+ - "(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007"+ - "-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u00022\u0007"+ - "2\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u00027\u0007"+ - "7\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007"+ - "<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007"+ - "A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007"+ - "F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007"+ - "K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007"+ - "P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007"+ - "U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007"+ - "Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007"+ - "_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007"+ - "d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007"+ - "i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007"+ - "n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007"+ - "s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007"+ - "x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007"+ - "}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002"+ - "\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002"+ - "\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002"+ - "\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002"+ - "\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002"+ - "\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002"+ - "\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002"+ - "\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002"+ - "\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ - "\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0011\u0004\u0011\u01cc\b\u0011\u000b\u0011"+ - "\f\u0011\u01cd\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0005\u0012\u01d6\b\u0012\n\u0012\f\u0012\u01d9\t\u0012\u0001"+ - "\u0012\u0003\u0012\u01dc\b\u0012\u0001\u0012\u0003\u0012\u01df\b\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ + "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ + "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ + "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ + "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f"+ + "\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012"+ + "\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015"+ + "\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018"+ + "\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b"+ + "\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e"+ + "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ + "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ + "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ + "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ + "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ + "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ + "<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002"+ + "A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002"+ + "F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002"+ + "K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002"+ + "P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002"+ + "U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002"+ + "Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002"+ + "_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002"+ + "d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002"+ + "i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002"+ + "n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002"+ + "s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002"+ + "x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002"+ + "}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080"+ + "\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083"+ + "\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086"+ + "\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089"+ + "\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c"+ + "\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f"+ + "\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092"+ + "\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095"+ + "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ + "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ + "\u0002\u009c\u0007\u009c\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01de\b\u0012\u000b\u0012\f"+ + "\u0012\u01df\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013"+ "\u0001\u0013\u0005\u0013\u01e8\b\u0013\n\u0013\f\u0013\u01eb\t\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0004"+ - "\u0014\u01f3\b\u0014\u000b\u0014\f\u0014\u01f4\u0001\u0014\u0001\u0014"+ - "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ - "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f"+ - "\u0001\u001f\u0003\u001f\u021e\b\u001f\u0001\u001f\u0004\u001f\u0221\b"+ - "\u001f\u000b\u001f\f\u001f\u0222\u0001 \u0001 \u0001!\u0001!\u0001\"\u0001"+ - "\"\u0001\"\u0003\"\u022c\b\"\u0001#\u0001#\u0001$\u0001$\u0001$\u0003"+ - "$\u0233\b$\u0001%\u0001%\u0001%\u0005%\u0238\b%\n%\f%\u023b\t%\u0001%"+ - "\u0001%\u0001%\u0001%\u0001%\u0001%\u0005%\u0243\b%\n%\f%\u0246\t%\u0001"+ - "%\u0001%\u0001%\u0001%\u0001%\u0003%\u024d\b%\u0001%\u0003%\u0250\b%\u0003"+ - "%\u0252\b%\u0001&\u0004&\u0255\b&\u000b&\f&\u0256\u0001\'\u0004\'\u025a"+ - "\b\'\u000b\'\f\'\u025b\u0001\'\u0001\'\u0005\'\u0260\b\'\n\'\f\'\u0263"+ - "\t\'\u0001\'\u0001\'\u0004\'\u0267\b\'\u000b\'\f\'\u0268\u0001\'\u0004"+ - "\'\u026c\b\'\u000b\'\f\'\u026d\u0001\'\u0001\'\u0005\'\u0272\b\'\n\'\f"+ - "\'\u0275\t\'\u0003\'\u0277\b\'\u0001\'\u0001\'\u0001\'\u0001\'\u0004\'"+ - "\u027d\b\'\u000b\'\f\'\u027e\u0001\'\u0001\'\u0003\'\u0283\b\'\u0001("+ - "\u0001(\u0001(\u0001)\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001"+ - "*\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0001-\u0001"+ - ".\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u00010\u0001"+ - "0\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u0001"+ - "2\u00013\u00013\u00013\u00014\u00014\u00014\u00015\u00015\u00015\u0001"+ - "5\u00015\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u0001"+ - "7\u00018\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001"+ - ":\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001"+ - "=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ - "?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001"+ - "C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001"+ - "H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001J\u0001J\u0001J\u0001K\u0001"+ - "K\u0001K\u0001K\u0001K\u0001L\u0001L\u0005L\u0303\bL\nL\fL\u0306\tL\u0001"+ - "L\u0001L\u0003L\u030a\bL\u0001L\u0004L\u030d\bL\u000bL\fL\u030e\u0003"+ - "L\u0311\bL\u0001M\u0001M\u0004M\u0315\bM\u000bM\fM\u0316\u0001M\u0001"+ - "M\u0001N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001"+ - "P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001R\u0001"+ - "S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001"+ - "U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001"+ - "W\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0003X\u034a\bX\u0001"+ - "Y\u0004Y\u034d\bY\u000bY\fY\u034e\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001"+ - "[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001"+ - "]\u0001^\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001"+ - "`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001a\u0003a\u0372\ba\u0001"+ - "b\u0001b\u0003b\u0376\bb\u0001b\u0005b\u0379\bb\nb\fb\u037c\tb\u0001b"+ - "\u0001b\u0003b\u0380\bb\u0001b\u0004b\u0383\bb\u000bb\fb\u0384\u0003b"+ - "\u0387\bb\u0001c\u0001c\u0004c\u038b\bc\u000bc\fc\u038c\u0001d\u0001d"+ - "\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001"+ - "f\u0001g\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ - "i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ - "k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001"+ - "n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001"+ - "p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001"+ - "r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ - "t\u0001u\u0004u\u03d8\bu\u000bu\fu\u03d9\u0001u\u0001u\u0003u\u03de\b"+ - "u\u0001u\u0004u\u03e1\bu\u000bu\fu\u03e2\u0001v\u0001v\u0001v\u0001v\u0001"+ - "w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001"+ - "y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001"+ - "{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001"+ - "~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f"+ - "\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081"+ - "\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082"+ - "\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084"+ - "\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085"+ - "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087"+ - "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088"+ - "\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a"+ - "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b"+ - "\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c"+ + "\u0013\u0003\u0013\u01ee\b\u0013\u0001\u0013\u0003\u0013\u01f1\b\u0013"+ + "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0014\u0005\u0014\u01fa\b\u0014\n\u0014\f\u0014\u01fd\t\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004"+ + "\u0015\u0205\b\u0015\u000b\u0015\f\u0015\u0206\u0001\u0015\u0001\u0015"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ + "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001"+ + " \u0003 \u0230\b \u0001 \u0004 \u0233\b \u000b \f \u0234\u0001!\u0001"+ + "!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u023e\b#\u0001$\u0001$\u0001"+ + "%\u0001%\u0001%\u0003%\u0245\b%\u0001&\u0001&\u0001&\u0005&\u024a\b&\n"+ + "&\f&\u024d\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u0255\b"+ + "&\n&\f&\u0258\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u025f\b&\u0001"+ + "&\u0003&\u0262\b&\u0003&\u0264\b&\u0001\'\u0004\'\u0267\b\'\u000b\'\f"+ + "\'\u0268\u0001(\u0004(\u026c\b(\u000b(\f(\u026d\u0001(\u0001(\u0005(\u0272"+ + "\b(\n(\f(\u0275\t(\u0001(\u0001(\u0004(\u0279\b(\u000b(\f(\u027a\u0001"+ + "(\u0004(\u027e\b(\u000b(\f(\u027f\u0001(\u0001(\u0005(\u0284\b(\n(\f("+ + "\u0287\t(\u0003(\u0289\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u028f\b("+ + "\u000b(\f(\u0290\u0001(\u0001(\u0003(\u0295\b(\u0001)\u0001)\u0001)\u0001"+ + "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u0001"+ + "0\u00010\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u0001"+ + "1\u00012\u00012\u00012\u00012\u00012\u00013\u00013\u00014\u00014\u0001"+ + "4\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00016\u00017\u0001"+ + "7\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u0001"+ + "9\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001;\u0001<\u0001"+ + "<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001>\u0001>\u0001>\u0001"+ + ">\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"+ + "A\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001E\u0001E\u0001"+ + "E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ + "J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001"+ + "L\u0001M\u0001M\u0005M\u0315\bM\nM\fM\u0318\tM\u0001M\u0001M\u0003M\u031c"+ + "\bM\u0001M\u0004M\u031f\bM\u000bM\fM\u0320\u0003M\u0323\bM\u0001N\u0001"+ + "N\u0004N\u0327\bN\u000bN\fN\u0328\u0001N\u0001N\u0001O\u0001O\u0001P\u0001"+ + "P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ + "R\u0001S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001"+ + "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ + "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001X\u0001X\u0001X\u0001"+ + "X\u0001Y\u0001Y\u0001Y\u0003Y\u035c\bY\u0001Z\u0004Z\u035f\bZ\u000bZ\f"+ + "Z\u0360\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001"+ + "]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001"+ + "_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001"+ + "a\u0001b\u0001b\u0001b\u0001b\u0003b\u0384\bb\u0001c\u0001c\u0003c\u0388"+ + "\bc\u0001c\u0005c\u038b\bc\nc\fc\u038e\tc\u0001c\u0001c\u0003c\u0392\b"+ + "c\u0001c\u0004c\u0395\bc\u000bc\fc\u0396\u0003c\u0399\bc\u0001d\u0001"+ + "d\u0004d\u039d\bd\u000bd\fd\u039e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001"+ + "f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001"+ + "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001"+ + "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ + "m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ + "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ + "r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001"+ + "t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001v\u0004v\u03ea\bv\u000b"+ + "v\fv\u03eb\u0001v\u0001v\u0003v\u03f0\bv\u0001v\u0004v\u03f3\bv\u000b"+ + "v\fv\u03f4\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001"+ + "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001"+ + "{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001"+ + "}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f"+ + "\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081"+ + "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082"+ + "\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084"+ + "\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085"+ + "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086"+ + "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088"+ + "\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089"+ + "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b"+ + "\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c"+ "\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e"+ - "\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ - "\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f"+ - "\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091"+ - "\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092"+ - "\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0094\u0004\u0094\u046c\b\u0094\u000b\u0094"+ - "\f\u0094\u046d\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096"+ - "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097"+ - "\u0001\u0097\u0002\u01e9\u0244\u0000\u0098\u000b\u0001\r\u0002\u000f\u0003"+ - "\u0011\u0004\u0013\u0005\u0015\u0006\u0017\u0007\u0019\b\u001b\t\u001d"+ - "\n\u001f\u000b!\f#\r%\u000e\'\u000f)\u0010+\u0011-\u0012/\u00131\u0014"+ - "3\u00155\u00007\u00009\u0016;\u0017=\u0018?\u0019A\u0000C\u0000E\u0000"+ - "G\u0000I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0000U\u001aW\u001bY\u001c"+ - "[\u001d]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u0083"+ - "1\u00852\u00873\u00894\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097"+ - ";\u0099<\u009b=\u009d>\u009f?\u00a1@\u00a3A\u00a5\u0000\u00a7B\u00a9C"+ - "\u00abD\u00adE\u00af\u0000\u00b1\u0000\u00b3\u0000\u00b5\u0000\u00b7\u0000"+ - "\u00b9F\u00bb\u0000\u00bdG\u00bf\u0000\u00c1H\u00c3I\u00c5J\u00c7\u0000"+ - "\u00c9\u0000\u00cb\u0000\u00cd\u0000\u00cf\u0000\u00d1K\u00d3L\u00d5M"+ - "\u00d7N\u00d9\u0000\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1O\u00e3\u0000"+ - "\u00e5P\u00e7Q\u00e9R\u00eb\u0000\u00ed\u0000\u00efS\u00f1T\u00f3\u0000"+ - "\u00f5U\u00f7\u0000\u00f9\u0000\u00fbV\u00fdW\u00ffX\u0101\u0000\u0103"+ - "\u0000\u0105\u0000\u0107\u0000\u0109\u0000\u010b\u0000\u010d\u0000\u010f"+ - "Y\u0111Z\u0113[\u0115\u0000\u0117\u0000\u0119\u0000\u011b\u0000\u011d"+ - "\\\u011f]\u0121^\u0123\u0000\u0125_\u0127`\u0129a\u012bb\u012dc\u012f"+ - "\u0000\u0131d\u0133e\u0135f\u0137g\u0139h\u000b\u0000\u0001\u0002\u0003"+ - "\u0004\u0005\u0006\u0007\b\t\n\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000"+ - "\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000"+ - "\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000"+ - "++--\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u000b"+ - "\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0497\u0000\u000b\u0001\u0000\u0000"+ - "\u0000\u0000\r\u0001\u0000\u0000\u0000\u0000\u000f\u0001\u0000\u0000\u0000"+ - "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ - "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ - "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ - "\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000"+ - "\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%"+ - "\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001"+ - "\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000"+ - "\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u0000"+ - "3\u0001\u0000\u0000\u0000\u00015\u0001\u0000\u0000\u0000\u00017\u0001"+ - "\u0000\u0000\u0000\u00019\u0001\u0000\u0000\u0000\u0001;\u0001\u0000\u0000"+ - "\u0000\u0001=\u0001\u0000\u0000\u0000\u0002?\u0001\u0000\u0000\u0000\u0002"+ - "U\u0001\u0000\u0000\u0000\u0002W\u0001\u0000\u0000\u0000\u0002Y\u0001"+ - "\u0000\u0000\u0000\u0002[\u0001\u0000\u0000\u0000\u0002]\u0001\u0000\u0000"+ - "\u0000\u0002_\u0001\u0000\u0000\u0000\u0002a\u0001\u0000\u0000\u0000\u0002"+ - "c\u0001\u0000\u0000\u0000\u0002e\u0001\u0000\u0000\u0000\u0002g\u0001"+ - "\u0000\u0000\u0000\u0002i\u0001\u0000\u0000\u0000\u0002k\u0001\u0000\u0000"+ - "\u0000\u0002m\u0001\u0000\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0002"+ - "q\u0001\u0000\u0000\u0000\u0002s\u0001\u0000\u0000\u0000\u0002u\u0001"+ - "\u0000\u0000\u0000\u0002w\u0001\u0000\u0000\u0000\u0002y\u0001\u0000\u0000"+ - "\u0000\u0002{\u0001\u0000\u0000\u0000\u0002}\u0001\u0000\u0000\u0000\u0002"+ - "\u007f\u0001\u0000\u0000\u0000\u0002\u0081\u0001\u0000\u0000\u0000\u0002"+ - "\u0083\u0001\u0000\u0000\u0000\u0002\u0085\u0001\u0000\u0000\u0000\u0002"+ - "\u0087\u0001\u0000\u0000\u0000\u0002\u0089\u0001\u0000\u0000\u0000\u0002"+ - "\u008b\u0001\u0000\u0000\u0000\u0002\u008d\u0001\u0000\u0000\u0000\u0002"+ - "\u008f\u0001\u0000\u0000\u0000\u0002\u0091\u0001\u0000\u0000\u0000\u0002"+ - "\u0093\u0001\u0000\u0000\u0000\u0002\u0095\u0001\u0000\u0000\u0000\u0002"+ - "\u0097\u0001\u0000\u0000\u0000\u0002\u0099\u0001\u0000\u0000\u0000\u0002"+ - "\u009b\u0001\u0000\u0000\u0000\u0002\u009d\u0001\u0000\u0000\u0000\u0002"+ - "\u009f\u0001\u0000\u0000\u0000\u0002\u00a1\u0001\u0000\u0000\u0000\u0002"+ - "\u00a3\u0001\u0000\u0000\u0000\u0002\u00a7\u0001\u0000\u0000\u0000\u0002"+ - "\u00a9\u0001\u0000\u0000\u0000\u0002\u00ab\u0001\u0000\u0000\u0000\u0002"+ - "\u00ad\u0001\u0000\u0000\u0000\u0003\u00af\u0001\u0000\u0000\u0000\u0003"+ - "\u00b1\u0001\u0000\u0000\u0000\u0003\u00b3\u0001\u0000\u0000\u0000\u0003"+ - "\u00b5\u0001\u0000\u0000\u0000\u0003\u00b7\u0001\u0000\u0000\u0000\u0003"+ - "\u00b9\u0001\u0000\u0000\u0000\u0003\u00bd\u0001\u0000\u0000\u0000\u0003"+ - "\u00bf\u0001\u0000\u0000\u0000\u0003\u00c1\u0001\u0000\u0000\u0000\u0003"+ - "\u00c3\u0001\u0000\u0000\u0000\u0003\u00c5\u0001\u0000\u0000\u0000\u0004"+ - "\u00c7\u0001\u0000\u0000\u0000\u0004\u00c9\u0001\u0000\u0000\u0000\u0004"+ - "\u00cb\u0001\u0000\u0000\u0000\u0004\u00d1\u0001\u0000\u0000\u0000\u0004"+ - "\u00d3\u0001\u0000\u0000\u0000\u0004\u00d5\u0001\u0000\u0000\u0000\u0004"+ - "\u00d7\u0001\u0000\u0000\u0000\u0005\u00d9\u0001\u0000\u0000\u0000\u0005"+ - "\u00db\u0001\u0000\u0000\u0000\u0005\u00dd\u0001\u0000\u0000\u0000\u0005"+ - "\u00df\u0001\u0000\u0000\u0000\u0005\u00e1\u0001\u0000\u0000\u0000\u0005"+ - "\u00e3\u0001\u0000\u0000\u0000\u0005\u00e5\u0001\u0000\u0000\u0000\u0005"+ - "\u00e7\u0001\u0000\u0000\u0000\u0005\u00e9\u0001\u0000\u0000\u0000\u0006"+ - "\u00eb\u0001\u0000\u0000\u0000\u0006\u00ed\u0001\u0000\u0000\u0000\u0006"+ - "\u00ef\u0001\u0000\u0000\u0000\u0006\u00f1\u0001\u0000\u0000\u0000\u0006"+ - "\u00f5\u0001\u0000\u0000\u0000\u0006\u00f7\u0001\u0000\u0000\u0000\u0006"+ - "\u00f9\u0001\u0000\u0000\u0000\u0006\u00fb\u0001\u0000\u0000\u0000\u0006"+ - "\u00fd\u0001\u0000\u0000\u0000\u0006\u00ff\u0001\u0000\u0000\u0000\u0007"+ - "\u0101\u0001\u0000\u0000\u0000\u0007\u0103\u0001\u0000\u0000\u0000\u0007"+ - "\u0105\u0001\u0000\u0000\u0000\u0007\u0107\u0001\u0000\u0000\u0000\u0007"+ - "\u0109\u0001\u0000\u0000\u0000\u0007\u010b\u0001\u0000\u0000\u0000\u0007"+ - "\u010d\u0001\u0000\u0000\u0000\u0007\u010f\u0001\u0000\u0000\u0000\u0007"+ - "\u0111\u0001\u0000\u0000\u0000\u0007\u0113\u0001\u0000\u0000\u0000\b\u0115"+ - "\u0001\u0000\u0000\u0000\b\u0117\u0001\u0000\u0000\u0000\b\u0119\u0001"+ - "\u0000\u0000\u0000\b\u011b\u0001\u0000\u0000\u0000\b\u011d\u0001\u0000"+ - "\u0000\u0000\b\u011f\u0001\u0000\u0000\u0000\b\u0121\u0001\u0000\u0000"+ - "\u0000\t\u0123\u0001\u0000\u0000\u0000\t\u0125\u0001\u0000\u0000\u0000"+ - "\t\u0127\u0001\u0000\u0000\u0000\t\u0129\u0001\u0000\u0000\u0000\t\u012b"+ - "\u0001\u0000\u0000\u0000\t\u012d\u0001\u0000\u0000\u0000\n\u012f\u0001"+ - "\u0000\u0000\u0000\n\u0131\u0001\u0000\u0000\u0000\n\u0133\u0001\u0000"+ - "\u0000\u0000\n\u0135\u0001\u0000\u0000\u0000\n\u0137\u0001\u0000\u0000"+ - "\u0000\n\u0139\u0001\u0000\u0000\u0000\u000b\u013b\u0001\u0000\u0000\u0000"+ - "\r\u0145\u0001\u0000\u0000\u0000\u000f\u014c\u0001\u0000\u0000\u0000\u0011"+ - "\u0155\u0001\u0000\u0000\u0000\u0013\u015c\u0001\u0000\u0000\u0000\u0015"+ - "\u0166\u0001\u0000\u0000\u0000\u0017\u016d\u0001\u0000\u0000\u0000\u0019"+ - "\u0174\u0001\u0000\u0000\u0000\u001b\u0182\u0001\u0000\u0000\u0000\u001d"+ - "\u0189\u0001\u0000\u0000\u0000\u001f\u0191\u0001\u0000\u0000\u0000!\u019d"+ - "\u0001\u0000\u0000\u0000#\u01a6\u0001\u0000\u0000\u0000%\u01ac\u0001\u0000"+ - "\u0000\u0000\'\u01b3\u0001\u0000\u0000\u0000)\u01ba\u0001\u0000\u0000"+ - "\u0000+\u01c2\u0001\u0000\u0000\u0000-\u01cb\u0001\u0000\u0000\u0000/"+ - "\u01d1\u0001\u0000\u0000\u00001\u01e2\u0001\u0000\u0000\u00003\u01f2\u0001"+ - "\u0000\u0000\u00005\u01f8\u0001\u0000\u0000\u00007\u01fd\u0001\u0000\u0000"+ - "\u00009\u0202\u0001\u0000\u0000\u0000;\u0206\u0001\u0000\u0000\u0000="+ - "\u020a\u0001\u0000\u0000\u0000?\u020e\u0001\u0000\u0000\u0000A\u0212\u0001"+ - "\u0000\u0000\u0000C\u0214\u0001\u0000\u0000\u0000E\u0216\u0001\u0000\u0000"+ - "\u0000G\u0219\u0001\u0000\u0000\u0000I\u021b\u0001\u0000\u0000\u0000K"+ - "\u0224\u0001\u0000\u0000\u0000M\u0226\u0001\u0000\u0000\u0000O\u022b\u0001"+ - "\u0000\u0000\u0000Q\u022d\u0001\u0000\u0000\u0000S\u0232\u0001\u0000\u0000"+ - "\u0000U\u0251\u0001\u0000\u0000\u0000W\u0254\u0001\u0000\u0000\u0000Y"+ - "\u0282\u0001\u0000\u0000\u0000[\u0284\u0001\u0000\u0000\u0000]\u0287\u0001"+ - "\u0000\u0000\u0000_\u028b\u0001\u0000\u0000\u0000a\u028f\u0001\u0000\u0000"+ - "\u0000c\u0291\u0001\u0000\u0000\u0000e\u0293\u0001\u0000\u0000\u0000g"+ - "\u0298\u0001\u0000\u0000\u0000i\u029a\u0001\u0000\u0000\u0000k\u02a0\u0001"+ - "\u0000\u0000\u0000m\u02a6\u0001\u0000\u0000\u0000o\u02ab\u0001\u0000\u0000"+ - "\u0000q\u02ad\u0001\u0000\u0000\u0000s\u02b0\u0001\u0000\u0000\u0000u"+ - "\u02b3\u0001\u0000\u0000\u0000w\u02b8\u0001\u0000\u0000\u0000y\u02bc\u0001"+ - "\u0000\u0000\u0000{\u02c1\u0001\u0000\u0000\u0000}\u02c7\u0001\u0000\u0000"+ - "\u0000\u007f\u02ca\u0001\u0000\u0000\u0000\u0081\u02cc\u0001\u0000\u0000"+ - "\u0000\u0083\u02d2\u0001\u0000\u0000\u0000\u0085\u02d4\u0001\u0000\u0000"+ - "\u0000\u0087\u02d9\u0001\u0000\u0000\u0000\u0089\u02dc\u0001\u0000\u0000"+ - "\u0000\u008b\u02df\u0001\u0000\u0000\u0000\u008d\u02e2\u0001\u0000\u0000"+ - "\u0000\u008f\u02e4\u0001\u0000\u0000\u0000\u0091\u02e7\u0001\u0000\u0000"+ - "\u0000\u0093\u02e9\u0001\u0000\u0000\u0000\u0095\u02ec\u0001\u0000\u0000"+ - "\u0000\u0097\u02ee\u0001\u0000\u0000\u0000\u0099\u02f0\u0001\u0000\u0000"+ - "\u0000\u009b\u02f2\u0001\u0000\u0000\u0000\u009d\u02f4\u0001\u0000\u0000"+ - "\u0000\u009f\u02f6\u0001\u0000\u0000\u0000\u00a1\u02fb\u0001\u0000\u0000"+ - "\u0000\u00a3\u0310\u0001\u0000\u0000\u0000\u00a5\u0312\u0001\u0000\u0000"+ - "\u0000\u00a7\u031a\u0001\u0000\u0000\u0000\u00a9\u031c\u0001\u0000\u0000"+ - "\u0000\u00ab\u0320\u0001\u0000\u0000\u0000\u00ad\u0324\u0001\u0000\u0000"+ - "\u0000\u00af\u0328\u0001\u0000\u0000\u0000\u00b1\u032d\u0001\u0000\u0000"+ - "\u0000\u00b3\u0331\u0001\u0000\u0000\u0000\u00b5\u0335\u0001\u0000\u0000"+ - "\u0000\u00b7\u0339\u0001\u0000\u0000\u0000\u00b9\u033d\u0001\u0000\u0000"+ - "\u0000\u00bb\u0349\u0001\u0000\u0000\u0000\u00bd\u034c\u0001\u0000\u0000"+ - "\u0000\u00bf\u0350\u0001\u0000\u0000\u0000\u00c1\u0354\u0001\u0000\u0000"+ - "\u0000\u00c3\u0358\u0001\u0000\u0000\u0000\u00c5\u035c\u0001\u0000\u0000"+ - "\u0000\u00c7\u0360\u0001\u0000\u0000\u0000\u00c9\u0365\u0001\u0000\u0000"+ - "\u0000\u00cb\u0369\u0001\u0000\u0000\u0000\u00cd\u0371\u0001\u0000\u0000"+ - "\u0000\u00cf\u0386\u0001\u0000\u0000\u0000\u00d1\u038a\u0001\u0000\u0000"+ - "\u0000\u00d3\u038e\u0001\u0000\u0000\u0000\u00d5\u0392\u0001\u0000\u0000"+ - "\u0000\u00d7\u0396\u0001\u0000\u0000\u0000\u00d9\u039a\u0001\u0000\u0000"+ - "\u0000\u00db\u039f\u0001\u0000\u0000\u0000\u00dd\u03a3\u0001\u0000\u0000"+ - "\u0000\u00df\u03a7\u0001\u0000\u0000\u0000\u00e1\u03ab\u0001\u0000\u0000"+ - "\u0000\u00e3\u03ae\u0001\u0000\u0000\u0000\u00e5\u03b2\u0001\u0000\u0000"+ - "\u0000\u00e7\u03b6\u0001\u0000\u0000\u0000\u00e9\u03ba\u0001\u0000\u0000"+ - "\u0000\u00eb\u03be\u0001\u0000\u0000\u0000\u00ed\u03c3\u0001\u0000\u0000"+ - "\u0000\u00ef\u03c8\u0001\u0000\u0000\u0000\u00f1\u03cd\u0001\u0000\u0000"+ - "\u0000\u00f3\u03d4\u0001\u0000\u0000\u0000\u00f5\u03dd\u0001\u0000\u0000"+ - "\u0000\u00f7\u03e4\u0001\u0000\u0000\u0000\u00f9\u03e8\u0001\u0000\u0000"+ - "\u0000\u00fb\u03ec\u0001\u0000\u0000\u0000\u00fd\u03f0\u0001\u0000\u0000"+ - "\u0000\u00ff\u03f4\u0001\u0000\u0000\u0000\u0101\u03f8\u0001\u0000\u0000"+ - "\u0000\u0103\u03fe\u0001\u0000\u0000\u0000\u0105\u0402\u0001\u0000\u0000"+ - "\u0000\u0107\u0406\u0001\u0000\u0000\u0000\u0109\u040a\u0001\u0000\u0000"+ - "\u0000\u010b\u040e\u0001\u0000\u0000\u0000\u010d\u0412\u0001\u0000\u0000"+ - "\u0000\u010f\u0416\u0001\u0000\u0000\u0000\u0111\u041a\u0001\u0000\u0000"+ - "\u0000\u0113\u041e\u0001\u0000\u0000\u0000\u0115\u0422\u0001\u0000\u0000"+ - "\u0000\u0117\u0427\u0001\u0000\u0000\u0000\u0119\u042b\u0001\u0000\u0000"+ - "\u0000\u011b\u042f\u0001\u0000\u0000\u0000\u011d\u0433\u0001\u0000\u0000"+ - "\u0000\u011f\u0437\u0001\u0000\u0000\u0000\u0121\u043b\u0001\u0000\u0000"+ - "\u0000\u0123\u043f\u0001\u0000\u0000\u0000\u0125\u0444\u0001\u0000\u0000"+ - "\u0000\u0127\u0449\u0001\u0000\u0000\u0000\u0129\u0453\u0001\u0000\u0000"+ - "\u0000\u012b\u0457\u0001\u0000\u0000\u0000\u012d\u045b\u0001\u0000\u0000"+ - "\u0000\u012f\u045f\u0001\u0000\u0000\u0000\u0131\u0464\u0001\u0000\u0000"+ - "\u0000\u0133\u046b\u0001\u0000\u0000\u0000\u0135\u046f\u0001\u0000\u0000"+ - "\u0000\u0137\u0473\u0001\u0000\u0000\u0000\u0139\u0477\u0001\u0000\u0000"+ - "\u0000\u013b\u013c\u0005d\u0000\u0000\u013c\u013d\u0005i\u0000\u0000\u013d"+ - "\u013e\u0005s\u0000\u0000\u013e\u013f\u0005s\u0000\u0000\u013f\u0140\u0005"+ - "e\u0000\u0000\u0140\u0141\u0005c\u0000\u0000\u0141\u0142\u0005t\u0000"+ - "\u0000\u0142\u0143\u0001\u0000\u0000\u0000\u0143\u0144\u0006\u0000\u0000"+ - "\u0000\u0144\f\u0001\u0000\u0000\u0000\u0145\u0146\u0005d\u0000\u0000"+ - "\u0146\u0147\u0005r\u0000\u0000\u0147\u0148\u0005o\u0000\u0000\u0148\u0149"+ - "\u0005p\u0000\u0000\u0149\u014a\u0001\u0000\u0000\u0000\u014a\u014b\u0006"+ - "\u0001\u0001\u0000\u014b\u000e\u0001\u0000\u0000\u0000\u014c\u014d\u0005"+ - "e\u0000\u0000\u014d\u014e\u0005n\u0000\u0000\u014e\u014f\u0005r\u0000"+ - "\u0000\u014f\u0150\u0005i\u0000\u0000\u0150\u0151\u0005c\u0000\u0000\u0151"+ - "\u0152\u0005h\u0000\u0000\u0152\u0153\u0001\u0000\u0000\u0000\u0153\u0154"+ - "\u0006\u0002\u0002\u0000\u0154\u0010\u0001\u0000\u0000\u0000\u0155\u0156"+ - "\u0005e\u0000\u0000\u0156\u0157\u0005v\u0000\u0000\u0157\u0158\u0005a"+ - "\u0000\u0000\u0158\u0159\u0005l\u0000\u0000\u0159\u015a\u0001\u0000\u0000"+ - "\u0000\u015a\u015b\u0006\u0003\u0000\u0000\u015b\u0012\u0001\u0000\u0000"+ - "\u0000\u015c\u015d\u0005e\u0000\u0000\u015d\u015e\u0005x\u0000\u0000\u015e"+ - "\u015f\u0005p\u0000\u0000\u015f\u0160\u0005l\u0000\u0000\u0160\u0161\u0005"+ - "a\u0000\u0000\u0161\u0162\u0005i\u0000\u0000\u0162\u0163\u0005n\u0000"+ - "\u0000\u0163\u0164\u0001\u0000\u0000\u0000\u0164\u0165\u0006\u0004\u0003"+ - "\u0000\u0165\u0014\u0001\u0000\u0000\u0000\u0166\u0167\u0005f\u0000\u0000"+ - "\u0167\u0168\u0005r\u0000\u0000\u0168\u0169\u0005o\u0000\u0000\u0169\u016a"+ - "\u0005m\u0000\u0000\u016a\u016b\u0001\u0000\u0000\u0000\u016b\u016c\u0006"+ - "\u0005\u0004\u0000\u016c\u0016\u0001\u0000\u0000\u0000\u016d\u016e\u0005"+ - "g\u0000\u0000\u016e\u016f\u0005r\u0000\u0000\u016f\u0170\u0005o\u0000"+ - "\u0000\u0170\u0171\u0005k\u0000\u0000\u0171\u0172\u0001\u0000\u0000\u0000"+ - "\u0172\u0173\u0006\u0006\u0000\u0000\u0173\u0018\u0001\u0000\u0000\u0000"+ - "\u0174\u0175\u0005i\u0000\u0000\u0175\u0176\u0005n\u0000\u0000\u0176\u0177"+ - "\u0005l\u0000\u0000\u0177\u0178\u0005i\u0000\u0000\u0178\u0179\u0005n"+ - "\u0000\u0000\u0179\u017a\u0005e\u0000\u0000\u017a\u017b\u0005s\u0000\u0000"+ - "\u017b\u017c\u0005t\u0000\u0000\u017c\u017d\u0005a\u0000\u0000\u017d\u017e"+ - "\u0005t\u0000\u0000\u017e\u017f\u0005s\u0000\u0000\u017f\u0180\u0001\u0000"+ - "\u0000\u0000\u0180\u0181\u0006\u0007\u0000\u0000\u0181\u001a\u0001\u0000"+ - "\u0000\u0000\u0182\u0183\u0005k\u0000\u0000\u0183\u0184\u0005e\u0000\u0000"+ - "\u0184\u0185\u0005e\u0000\u0000\u0185\u0186\u0005p\u0000\u0000\u0186\u0187"+ - "\u0001\u0000\u0000\u0000\u0187\u0188\u0006\b\u0001\u0000\u0188\u001c\u0001"+ - "\u0000\u0000\u0000\u0189\u018a\u0005l\u0000\u0000\u018a\u018b\u0005i\u0000"+ - "\u0000\u018b\u018c\u0005m\u0000\u0000\u018c\u018d\u0005i\u0000\u0000\u018d"+ - "\u018e\u0005t\u0000\u0000\u018e\u018f\u0001\u0000\u0000\u0000\u018f\u0190"+ - "\u0006\t\u0000\u0000\u0190\u001e\u0001\u0000\u0000\u0000\u0191\u0192\u0005"+ - "m\u0000\u0000\u0192\u0193\u0005v\u0000\u0000\u0193\u0194\u0005_\u0000"+ - "\u0000\u0194\u0195\u0005e\u0000\u0000\u0195\u0196\u0005x\u0000\u0000\u0196"+ - "\u0197\u0005p\u0000\u0000\u0197\u0198\u0005a\u0000\u0000\u0198\u0199\u0005"+ - "n\u0000\u0000\u0199\u019a\u0005d\u0000\u0000\u019a\u019b\u0001\u0000\u0000"+ - "\u0000\u019b\u019c\u0006\n\u0005\u0000\u019c \u0001\u0000\u0000\u0000"+ - "\u019d\u019e\u0005r\u0000\u0000\u019e\u019f\u0005e\u0000\u0000\u019f\u01a0"+ - "\u0005n\u0000\u0000\u01a0\u01a1\u0005a\u0000\u0000\u01a1\u01a2\u0005m"+ - "\u0000\u0000\u01a2\u01a3\u0005e\u0000\u0000\u01a3\u01a4\u0001\u0000\u0000"+ - "\u0000\u01a4\u01a5\u0006\u000b\u0006\u0000\u01a5\"\u0001\u0000\u0000\u0000"+ - "\u01a6\u01a7\u0005r\u0000\u0000\u01a7\u01a8\u0005o\u0000\u0000\u01a8\u01a9"+ - "\u0005w\u0000\u0000\u01a9\u01aa\u0001\u0000\u0000\u0000\u01aa\u01ab\u0006"+ - "\f\u0000\u0000\u01ab$\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005s\u0000"+ - "\u0000\u01ad\u01ae\u0005h\u0000\u0000\u01ae\u01af\u0005o\u0000\u0000\u01af"+ - "\u01b0\u0005w\u0000\u0000\u01b0\u01b1\u0001\u0000\u0000\u0000\u01b1\u01b2"+ - "\u0006\r\u0007\u0000\u01b2&\u0001\u0000\u0000\u0000\u01b3\u01b4\u0005"+ - "s\u0000\u0000\u01b4\u01b5\u0005o\u0000\u0000\u01b5\u01b6\u0005r\u0000"+ - "\u0000\u01b6\u01b7\u0005t\u0000\u0000\u01b7\u01b8\u0001\u0000\u0000\u0000"+ - "\u01b8\u01b9\u0006\u000e\u0000\u0000\u01b9(\u0001\u0000\u0000\u0000\u01ba"+ - "\u01bb\u0005s\u0000\u0000\u01bb\u01bc\u0005t\u0000\u0000\u01bc\u01bd\u0005"+ - "a\u0000\u0000\u01bd\u01be\u0005t\u0000\u0000\u01be\u01bf\u0005s\u0000"+ - "\u0000\u01bf\u01c0\u0001\u0000\u0000\u0000\u01c0\u01c1\u0006\u000f\u0000"+ - "\u0000\u01c1*\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005w\u0000\u0000\u01c3"+ - "\u01c4\u0005h\u0000\u0000\u01c4\u01c5\u0005e\u0000\u0000\u01c5\u01c6\u0005"+ - "r\u0000\u0000\u01c6\u01c7\u0005e\u0000\u0000\u01c7\u01c8\u0001\u0000\u0000"+ - "\u0000\u01c8\u01c9\u0006\u0010\u0000\u0000\u01c9,\u0001\u0000\u0000\u0000"+ - "\u01ca\u01cc\b\u0000\u0000\u0000\u01cb\u01ca\u0001\u0000\u0000\u0000\u01cc"+ - "\u01cd\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01cd"+ - "\u01ce\u0001\u0000\u0000\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000\u01cf"+ - "\u01d0\u0006\u0011\u0000\u0000\u01d0.\u0001\u0000\u0000\u0000\u01d1\u01d2"+ - "\u0005/\u0000\u0000\u01d2\u01d3\u0005/\u0000\u0000\u01d3\u01d7\u0001\u0000"+ - "\u0000\u0000\u01d4\u01d6\b\u0001\u0000\u0000\u01d5\u01d4\u0001\u0000\u0000"+ - "\u0000\u01d6\u01d9\u0001\u0000\u0000\u0000\u01d7\u01d5\u0001\u0000\u0000"+ - "\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01db\u0001\u0000\u0000"+ - "\u0000\u01d9\u01d7\u0001\u0000\u0000\u0000\u01da\u01dc\u0005\r\u0000\u0000"+ - "\u01db\u01da\u0001\u0000\u0000\u0000\u01db\u01dc\u0001\u0000\u0000\u0000"+ - "\u01dc\u01de\u0001\u0000\u0000\u0000\u01dd\u01df\u0005\n\u0000\u0000\u01de"+ - "\u01dd\u0001\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01df"+ - "\u01e0\u0001\u0000\u0000\u0000\u01e0\u01e1\u0006\u0012\b\u0000\u01e10"+ - "\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005/\u0000\u0000\u01e3\u01e4\u0005"+ - "*\u0000\u0000\u01e4\u01e9\u0001\u0000\u0000\u0000\u01e5\u01e8\u00031\u0013"+ - "\u0000\u01e6\u01e8\t\u0000\u0000\u0000\u01e7\u01e5\u0001\u0000\u0000\u0000"+ - "\u01e7\u01e6\u0001\u0000\u0000\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000"+ - "\u01e9\u01ea\u0001\u0000\u0000\u0000\u01e9\u01e7\u0001\u0000\u0000\u0000"+ - "\u01ea\u01ec\u0001\u0000\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000"+ - "\u01ec\u01ed\u0005*\u0000\u0000\u01ed\u01ee\u0005/\u0000\u0000\u01ee\u01ef"+ - "\u0001\u0000\u0000\u0000\u01ef\u01f0\u0006\u0013\b\u0000\u01f02\u0001"+ - "\u0000\u0000\u0000\u01f1\u01f3\u0007\u0002\u0000\u0000\u01f2\u01f1\u0001"+ - "\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f2\u0001"+ - "\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5\u01f6\u0001"+ - "\u0000\u0000\u0000\u01f6\u01f7\u0006\u0014\b\u0000\u01f74\u0001\u0000"+ - "\u0000\u0000\u01f8\u01f9\u0003\u009fJ\u0000\u01f9\u01fa\u0001\u0000\u0000"+ - "\u0000\u01fa\u01fb\u0006\u0015\t\u0000\u01fb\u01fc\u0006\u0015\n\u0000"+ - "\u01fc6\u0001\u0000\u0000\u0000\u01fd\u01fe\u0003?\u001a\u0000\u01fe\u01ff"+ - "\u0001\u0000\u0000\u0000\u01ff\u0200\u0006\u0016\u000b\u0000\u0200\u0201"+ - "\u0006\u0016\f\u0000\u02018\u0001\u0000\u0000\u0000\u0202\u0203\u0003"+ - "3\u0014\u0000\u0203\u0204\u0001\u0000\u0000\u0000\u0204\u0205\u0006\u0017"+ - "\b\u0000\u0205:\u0001\u0000\u0000\u0000\u0206\u0207\u0003/\u0012\u0000"+ - "\u0207\u0208\u0001\u0000\u0000\u0000\u0208\u0209\u0006\u0018\b\u0000\u0209"+ - "<\u0001\u0000\u0000\u0000\u020a\u020b\u00031\u0013\u0000\u020b\u020c\u0001"+ - "\u0000\u0000\u0000\u020c\u020d\u0006\u0019\b\u0000\u020d>\u0001\u0000"+ - "\u0000\u0000\u020e\u020f\u0005|\u0000\u0000\u020f\u0210\u0001\u0000\u0000"+ - "\u0000\u0210\u0211\u0006\u001a\f\u0000\u0211@\u0001\u0000\u0000\u0000"+ - "\u0212\u0213\u0007\u0003\u0000\u0000\u0213B\u0001\u0000\u0000\u0000\u0214"+ - "\u0215\u0007\u0004\u0000\u0000\u0215D\u0001\u0000\u0000\u0000\u0216\u0217"+ - "\u0005\\\u0000\u0000\u0217\u0218\u0007\u0005\u0000\u0000\u0218F\u0001"+ - "\u0000\u0000\u0000\u0219\u021a\b\u0006\u0000\u0000\u021aH\u0001\u0000"+ - "\u0000\u0000\u021b\u021d\u0007\u0007\u0000\u0000\u021c\u021e\u0007\b\u0000"+ - "\u0000\u021d\u021c\u0001\u0000\u0000\u0000\u021d\u021e\u0001\u0000\u0000"+ - "\u0000\u021e\u0220\u0001\u0000\u0000\u0000\u021f\u0221\u0003A\u001b\u0000"+ - "\u0220\u021f\u0001\u0000\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000"+ - "\u0222\u0220\u0001\u0000\u0000\u0000\u0222\u0223\u0001\u0000\u0000\u0000"+ - "\u0223J\u0001\u0000\u0000\u0000\u0224\u0225\u0005@\u0000\u0000\u0225L"+ - "\u0001\u0000\u0000\u0000\u0226\u0227\u0005`\u0000\u0000\u0227N\u0001\u0000"+ - "\u0000\u0000\u0228\u022c\b\t\u0000\u0000\u0229\u022a\u0005`\u0000\u0000"+ - "\u022a\u022c\u0005`\u0000\u0000\u022b\u0228\u0001\u0000\u0000\u0000\u022b"+ - "\u0229\u0001\u0000\u0000\u0000\u022cP\u0001\u0000\u0000\u0000\u022d\u022e"+ - "\u0005_\u0000\u0000\u022eR\u0001\u0000\u0000\u0000\u022f\u0233\u0003C"+ - "\u001c\u0000\u0230\u0233\u0003A\u001b\u0000\u0231\u0233\u0003Q#\u0000"+ - "\u0232\u022f\u0001\u0000\u0000\u0000\u0232\u0230\u0001\u0000\u0000\u0000"+ - "\u0232\u0231\u0001\u0000\u0000\u0000\u0233T\u0001\u0000\u0000\u0000\u0234"+ - "\u0239\u0005\"\u0000\u0000\u0235\u0238\u0003E\u001d\u0000\u0236\u0238"+ - "\u0003G\u001e\u0000\u0237\u0235\u0001\u0000\u0000\u0000\u0237\u0236\u0001"+ - "\u0000\u0000\u0000\u0238\u023b\u0001\u0000\u0000\u0000\u0239\u0237\u0001"+ - "\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000\u023a\u023c\u0001"+ - "\u0000\u0000\u0000\u023b\u0239\u0001\u0000\u0000\u0000\u023c\u0252\u0005"+ - "\"\u0000\u0000\u023d\u023e\u0005\"\u0000\u0000\u023e\u023f\u0005\"\u0000"+ - "\u0000\u023f\u0240\u0005\"\u0000\u0000\u0240\u0244\u0001\u0000\u0000\u0000"+ - "\u0241\u0243\b\u0001\u0000\u0000\u0242\u0241\u0001\u0000\u0000\u0000\u0243"+ - "\u0246\u0001\u0000\u0000\u0000\u0244\u0245\u0001\u0000\u0000\u0000\u0244"+ - "\u0242\u0001\u0000\u0000\u0000\u0245\u0247\u0001\u0000\u0000\u0000\u0246"+ - "\u0244\u0001\u0000\u0000\u0000\u0247\u0248\u0005\"\u0000\u0000\u0248\u0249"+ - "\u0005\"\u0000\u0000\u0249\u024a\u0005\"\u0000\u0000\u024a\u024c\u0001"+ - "\u0000\u0000\u0000\u024b\u024d\u0005\"\u0000\u0000\u024c\u024b\u0001\u0000"+ - "\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000\u024d\u024f\u0001\u0000"+ - "\u0000\u0000\u024e\u0250\u0005\"\u0000\u0000\u024f\u024e\u0001\u0000\u0000"+ - "\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0252\u0001\u0000\u0000"+ - "\u0000\u0251\u0234\u0001\u0000\u0000\u0000\u0251\u023d\u0001\u0000\u0000"+ - "\u0000\u0252V\u0001\u0000\u0000\u0000\u0253\u0255\u0003A\u001b\u0000\u0254"+ - "\u0253\u0001\u0000\u0000\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256"+ - "\u0254\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000\u0000\u0000\u0257"+ - "X\u0001\u0000\u0000\u0000\u0258\u025a\u0003A\u001b\u0000\u0259\u0258\u0001"+ - "\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000\u025b\u0259\u0001"+ - "\u0000\u0000\u0000\u025b\u025c\u0001\u0000\u0000\u0000\u025c\u025d\u0001"+ - "\u0000\u0000\u0000\u025d\u0261\u0003g.\u0000\u025e\u0260\u0003A\u001b"+ - "\u0000\u025f\u025e\u0001\u0000\u0000\u0000\u0260\u0263\u0001\u0000\u0000"+ - "\u0000\u0261\u025f\u0001\u0000\u0000\u0000\u0261\u0262\u0001\u0000\u0000"+ - "\u0000\u0262\u0283\u0001\u0000\u0000\u0000\u0263\u0261\u0001\u0000\u0000"+ - "\u0000\u0264\u0266\u0003g.\u0000\u0265\u0267\u0003A\u001b\u0000\u0266"+ - "\u0265\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268"+ - "\u0266\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269"+ - "\u0283\u0001\u0000\u0000\u0000\u026a\u026c\u0003A\u001b\u0000\u026b\u026a"+ - "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026b"+ - "\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e\u0276"+ - "\u0001\u0000\u0000\u0000\u026f\u0273\u0003g.\u0000\u0270\u0272\u0003A"+ - "\u001b\u0000\u0271\u0270\u0001\u0000\u0000\u0000\u0272\u0275\u0001\u0000"+ - "\u0000\u0000\u0273\u0271\u0001\u0000\u0000\u0000\u0273\u0274\u0001\u0000"+ - "\u0000\u0000\u0274\u0277\u0001\u0000\u0000\u0000\u0275\u0273\u0001\u0000"+ - "\u0000\u0000\u0276\u026f\u0001\u0000\u0000\u0000\u0276\u0277\u0001\u0000"+ - "\u0000\u0000\u0277\u0278\u0001\u0000\u0000\u0000\u0278\u0279\u0003I\u001f"+ - "\u0000\u0279\u0283\u0001\u0000\u0000\u0000\u027a\u027c\u0003g.\u0000\u027b"+ - "\u027d\u0003A\u001b\u0000\u027c\u027b\u0001\u0000\u0000\u0000\u027d\u027e"+ - "\u0001\u0000\u0000\u0000\u027e\u027c\u0001\u0000\u0000\u0000\u027e\u027f"+ - "\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280\u0281"+ - "\u0003I\u001f\u0000\u0281\u0283\u0001\u0000\u0000\u0000\u0282\u0259\u0001"+ - "\u0000\u0000\u0000\u0282\u0264\u0001\u0000\u0000\u0000\u0282\u026b\u0001"+ - "\u0000\u0000\u0000\u0282\u027a\u0001\u0000\u0000\u0000\u0283Z\u0001\u0000"+ - "\u0000\u0000\u0284\u0285\u0005b\u0000\u0000\u0285\u0286\u0005y\u0000\u0000"+ - "\u0286\\\u0001\u0000\u0000\u0000\u0287\u0288\u0005a\u0000\u0000\u0288"+ - "\u0289\u0005n\u0000\u0000\u0289\u028a\u0005d\u0000\u0000\u028a^\u0001"+ - "\u0000\u0000\u0000\u028b\u028c\u0005a\u0000\u0000\u028c\u028d\u0005s\u0000"+ - "\u0000\u028d\u028e\u0005c\u0000\u0000\u028e`\u0001\u0000\u0000\u0000\u028f"+ - "\u0290\u0005=\u0000\u0000\u0290b\u0001\u0000\u0000\u0000\u0291\u0292\u0005"+ - ",\u0000\u0000\u0292d\u0001\u0000\u0000\u0000\u0293\u0294\u0005d\u0000"+ - "\u0000\u0294\u0295\u0005e\u0000\u0000\u0295\u0296\u0005s\u0000\u0000\u0296"+ - "\u0297\u0005c\u0000\u0000\u0297f\u0001\u0000\u0000\u0000\u0298\u0299\u0005"+ - ".\u0000\u0000\u0299h\u0001\u0000\u0000\u0000\u029a\u029b\u0005f\u0000"+ - "\u0000\u029b\u029c\u0005a\u0000\u0000\u029c\u029d\u0005l\u0000\u0000\u029d"+ - "\u029e\u0005s\u0000\u0000\u029e\u029f\u0005e\u0000\u0000\u029fj\u0001"+ - "\u0000\u0000\u0000\u02a0\u02a1\u0005f\u0000\u0000\u02a1\u02a2\u0005i\u0000"+ - "\u0000\u02a2\u02a3\u0005r\u0000\u0000\u02a3\u02a4\u0005s\u0000\u0000\u02a4"+ - "\u02a5\u0005t\u0000\u0000\u02a5l\u0001\u0000\u0000\u0000\u02a6\u02a7\u0005"+ - "l\u0000\u0000\u02a7\u02a8\u0005a\u0000\u0000\u02a8\u02a9\u0005s\u0000"+ - "\u0000\u02a9\u02aa\u0005t\u0000\u0000\u02aan\u0001\u0000\u0000\u0000\u02ab"+ - "\u02ac\u0005(\u0000\u0000\u02acp\u0001\u0000\u0000\u0000\u02ad\u02ae\u0005"+ - "i\u0000\u0000\u02ae\u02af\u0005n\u0000\u0000\u02afr\u0001\u0000\u0000"+ - "\u0000\u02b0\u02b1\u0005i\u0000\u0000\u02b1\u02b2\u0005s\u0000\u0000\u02b2"+ - "t\u0001\u0000\u0000\u0000\u02b3\u02b4\u0005l\u0000\u0000\u02b4\u02b5\u0005"+ - "i\u0000\u0000\u02b5\u02b6\u0005k\u0000\u0000\u02b6\u02b7\u0005e\u0000"+ - "\u0000\u02b7v\u0001\u0000\u0000\u0000\u02b8\u02b9\u0005n\u0000\u0000\u02b9"+ - "\u02ba\u0005o\u0000\u0000\u02ba\u02bb\u0005t\u0000\u0000\u02bbx\u0001"+ - "\u0000\u0000\u0000\u02bc\u02bd\u0005n\u0000\u0000\u02bd\u02be\u0005u\u0000"+ - "\u0000\u02be\u02bf\u0005l\u0000\u0000\u02bf\u02c0\u0005l\u0000\u0000\u02c0"+ - "z\u0001\u0000\u0000\u0000\u02c1\u02c2\u0005n\u0000\u0000\u02c2\u02c3\u0005"+ - "u\u0000\u0000\u02c3\u02c4\u0005l\u0000\u0000\u02c4\u02c5\u0005l\u0000"+ - "\u0000\u02c5\u02c6\u0005s\u0000\u0000\u02c6|\u0001\u0000\u0000\u0000\u02c7"+ - "\u02c8\u0005o\u0000\u0000\u02c8\u02c9\u0005r\u0000\u0000\u02c9~\u0001"+ - "\u0000\u0000\u0000\u02ca\u02cb\u0005?\u0000\u0000\u02cb\u0080\u0001\u0000"+ - "\u0000\u0000\u02cc\u02cd\u0005r\u0000\u0000\u02cd\u02ce\u0005l\u0000\u0000"+ - "\u02ce\u02cf\u0005i\u0000\u0000\u02cf\u02d0\u0005k\u0000\u0000\u02d0\u02d1"+ - "\u0005e\u0000\u0000\u02d1\u0082\u0001\u0000\u0000\u0000\u02d2\u02d3\u0005"+ - ")\u0000\u0000\u02d3\u0084\u0001\u0000\u0000\u0000\u02d4\u02d5\u0005t\u0000"+ - "\u0000\u02d5\u02d6\u0005r\u0000\u0000\u02d6\u02d7\u0005u\u0000\u0000\u02d7"+ - "\u02d8\u0005e\u0000\u0000\u02d8\u0086\u0001\u0000\u0000\u0000\u02d9\u02da"+ - "\u0005=\u0000\u0000\u02da\u02db\u0005=\u0000\u0000\u02db\u0088\u0001\u0000"+ - "\u0000\u0000\u02dc\u02dd\u0005=\u0000\u0000\u02dd\u02de\u0005~\u0000\u0000"+ - "\u02de\u008a\u0001\u0000\u0000\u0000\u02df\u02e0\u0005!\u0000\u0000\u02e0"+ - "\u02e1\u0005=\u0000\u0000\u02e1\u008c\u0001\u0000\u0000\u0000\u02e2\u02e3"+ - "\u0005<\u0000\u0000\u02e3\u008e\u0001\u0000\u0000\u0000\u02e4\u02e5\u0005"+ - "<\u0000\u0000\u02e5\u02e6\u0005=\u0000\u0000\u02e6\u0090\u0001\u0000\u0000"+ - "\u0000\u02e7\u02e8\u0005>\u0000\u0000\u02e8\u0092\u0001\u0000\u0000\u0000"+ - "\u02e9\u02ea\u0005>\u0000\u0000\u02ea\u02eb\u0005=\u0000\u0000\u02eb\u0094"+ - "\u0001\u0000\u0000\u0000\u02ec\u02ed\u0005+\u0000\u0000\u02ed\u0096\u0001"+ - "\u0000\u0000\u0000\u02ee\u02ef\u0005-\u0000\u0000\u02ef\u0098\u0001\u0000"+ - "\u0000\u0000\u02f0\u02f1\u0005*\u0000\u0000\u02f1\u009a\u0001\u0000\u0000"+ - "\u0000\u02f2\u02f3\u0005/\u0000\u0000\u02f3\u009c\u0001\u0000\u0000\u0000"+ - "\u02f4\u02f5\u0005%\u0000\u0000\u02f5\u009e\u0001\u0000\u0000\u0000\u02f6"+ - "\u02f7\u0005[\u0000\u0000\u02f7\u02f8\u0001\u0000\u0000\u0000\u02f8\u02f9"+ - "\u0006J\u0000\u0000\u02f9\u02fa\u0006J\u0000\u0000\u02fa\u00a0\u0001\u0000"+ - "\u0000\u0000\u02fb\u02fc\u0005]\u0000\u0000\u02fc\u02fd\u0001\u0000\u0000"+ - "\u0000\u02fd\u02fe\u0006K\f\u0000\u02fe\u02ff\u0006K\f\u0000\u02ff\u00a2"+ - "\u0001\u0000\u0000\u0000\u0300\u0304\u0003C\u001c\u0000\u0301\u0303\u0003"+ - "S$\u0000\u0302\u0301\u0001\u0000\u0000\u0000\u0303\u0306\u0001\u0000\u0000"+ - "\u0000\u0304\u0302\u0001\u0000\u0000\u0000\u0304\u0305\u0001\u0000\u0000"+ - "\u0000\u0305\u0311\u0001\u0000\u0000\u0000\u0306\u0304\u0001\u0000\u0000"+ - "\u0000\u0307\u030a\u0003Q#\u0000\u0308\u030a\u0003K \u0000\u0309\u0307"+ - "\u0001\u0000\u0000\u0000\u0309\u0308\u0001\u0000\u0000\u0000\u030a\u030c"+ - "\u0001\u0000\u0000\u0000\u030b\u030d\u0003S$\u0000\u030c\u030b\u0001\u0000"+ - "\u0000\u0000\u030d\u030e\u0001\u0000\u0000\u0000\u030e\u030c\u0001\u0000"+ - "\u0000\u0000\u030e\u030f\u0001\u0000\u0000\u0000\u030f\u0311\u0001\u0000"+ - "\u0000\u0000\u0310\u0300\u0001\u0000\u0000\u0000\u0310\u0309\u0001\u0000"+ - "\u0000\u0000\u0311\u00a4\u0001\u0000\u0000\u0000\u0312\u0314\u0003M!\u0000"+ - "\u0313\u0315\u0003O\"\u0000\u0314\u0313\u0001\u0000\u0000\u0000\u0315"+ - "\u0316\u0001\u0000\u0000\u0000\u0316\u0314\u0001\u0000\u0000\u0000\u0316"+ - "\u0317\u0001\u0000\u0000\u0000\u0317\u0318\u0001\u0000\u0000\u0000\u0318"+ - "\u0319\u0003M!\u0000\u0319\u00a6\u0001\u0000\u0000\u0000\u031a\u031b\u0003"+ - "\u00a5M\u0000\u031b\u00a8\u0001\u0000\u0000\u0000\u031c\u031d\u0003/\u0012"+ - "\u0000\u031d\u031e\u0001\u0000\u0000\u0000\u031e\u031f\u0006O\b\u0000"+ - "\u031f\u00aa\u0001\u0000\u0000\u0000\u0320\u0321\u00031\u0013\u0000\u0321"+ - "\u0322\u0001\u0000\u0000\u0000\u0322\u0323\u0006P\b\u0000\u0323\u00ac"+ - "\u0001\u0000\u0000\u0000\u0324\u0325\u00033\u0014\u0000\u0325\u0326\u0001"+ - "\u0000\u0000\u0000\u0326\u0327\u0006Q\b\u0000\u0327\u00ae\u0001\u0000"+ - "\u0000\u0000\u0328\u0329\u0003?\u001a\u0000\u0329\u032a\u0001\u0000\u0000"+ - "\u0000\u032a\u032b\u0006R\u000b\u0000\u032b\u032c\u0006R\f\u0000\u032c"+ - "\u00b0\u0001\u0000\u0000\u0000\u032d\u032e\u0003\u009fJ\u0000\u032e\u032f"+ - "\u0001\u0000\u0000\u0000\u032f\u0330\u0006S\t\u0000\u0330\u00b2\u0001"+ - "\u0000\u0000\u0000\u0331\u0332\u0003\u00a1K\u0000\u0332\u0333\u0001\u0000"+ - "\u0000\u0000\u0333\u0334\u0006T\r\u0000\u0334\u00b4\u0001\u0000\u0000"+ - "\u0000\u0335\u0336\u0003c,\u0000\u0336\u0337\u0001\u0000\u0000\u0000\u0337"+ - "\u0338\u0006U\u000e\u0000\u0338\u00b6\u0001\u0000\u0000\u0000\u0339\u033a"+ - "\u0003a+\u0000\u033a\u033b\u0001\u0000\u0000\u0000\u033b\u033c\u0006V"+ - "\u000f\u0000\u033c\u00b8\u0001\u0000\u0000\u0000\u033d\u033e\u0005m\u0000"+ - "\u0000\u033e\u033f\u0005e\u0000\u0000\u033f\u0340\u0005t\u0000\u0000\u0340"+ - "\u0341\u0005a\u0000\u0000\u0341\u0342\u0005d\u0000\u0000\u0342\u0343\u0005"+ - "a\u0000\u0000\u0343\u0344\u0005t\u0000\u0000\u0344\u0345\u0005a\u0000"+ - "\u0000\u0345\u00ba\u0001\u0000\u0000\u0000\u0346\u034a\b\n\u0000\u0000"+ - "\u0347\u0348\u0005/\u0000\u0000\u0348\u034a\b\u000b\u0000\u0000\u0349"+ - "\u0346\u0001\u0000\u0000\u0000\u0349\u0347\u0001\u0000\u0000\u0000\u034a"+ - "\u00bc\u0001\u0000\u0000\u0000\u034b\u034d\u0003\u00bbX\u0000\u034c\u034b"+ - "\u0001\u0000\u0000\u0000\u034d\u034e\u0001\u0000\u0000\u0000\u034e\u034c"+ - "\u0001\u0000\u0000\u0000\u034e\u034f\u0001\u0000\u0000\u0000\u034f\u00be"+ - "\u0001\u0000\u0000\u0000\u0350\u0351\u0003\u00a7N\u0000\u0351\u0352\u0001"+ - "\u0000\u0000\u0000\u0352\u0353\u0006Z\u0010\u0000\u0353\u00c0\u0001\u0000"+ - "\u0000\u0000\u0354\u0355\u0003/\u0012\u0000\u0355\u0356\u0001\u0000\u0000"+ - "\u0000\u0356\u0357\u0006[\b\u0000\u0357\u00c2\u0001\u0000\u0000\u0000"+ - "\u0358\u0359\u00031\u0013\u0000\u0359\u035a\u0001\u0000\u0000\u0000\u035a"+ - "\u035b\u0006\\\b\u0000\u035b\u00c4\u0001\u0000\u0000\u0000\u035c\u035d"+ - "\u00033\u0014\u0000\u035d\u035e\u0001\u0000\u0000\u0000\u035e\u035f\u0006"+ - "]\b\u0000\u035f\u00c6\u0001\u0000\u0000\u0000\u0360\u0361\u0003?\u001a"+ - "\u0000\u0361\u0362\u0001\u0000\u0000\u0000\u0362\u0363\u0006^\u000b\u0000"+ - "\u0363\u0364\u0006^\f\u0000\u0364\u00c8\u0001\u0000\u0000\u0000\u0365"+ - "\u0366\u0003g.\u0000\u0366\u0367\u0001\u0000\u0000\u0000\u0367\u0368\u0006"+ - "_\u0011\u0000\u0368\u00ca\u0001\u0000\u0000\u0000\u0369\u036a\u0003c,"+ - "\u0000\u036a\u036b\u0001\u0000\u0000\u0000\u036b\u036c\u0006`\u000e\u0000"+ - "\u036c\u00cc\u0001\u0000\u0000\u0000\u036d\u0372\u0003C\u001c\u0000\u036e"+ - "\u0372\u0003A\u001b\u0000\u036f\u0372\u0003Q#\u0000\u0370\u0372\u0003"+ - "\u0099G\u0000\u0371\u036d\u0001\u0000\u0000\u0000\u0371\u036e\u0001\u0000"+ - "\u0000\u0000\u0371\u036f\u0001\u0000\u0000\u0000\u0371\u0370\u0001\u0000"+ - "\u0000\u0000\u0372\u00ce\u0001\u0000\u0000\u0000\u0373\u0376\u0003C\u001c"+ - "\u0000\u0374\u0376\u0003\u0099G\u0000\u0375\u0373\u0001\u0000\u0000\u0000"+ - "\u0375\u0374\u0001\u0000\u0000\u0000\u0376\u037a\u0001\u0000\u0000\u0000"+ - "\u0377\u0379\u0003\u00cda\u0000\u0378\u0377\u0001\u0000\u0000\u0000\u0379"+ - "\u037c\u0001\u0000\u0000\u0000\u037a\u0378\u0001\u0000\u0000\u0000\u037a"+ - "\u037b\u0001\u0000\u0000\u0000\u037b\u0387\u0001\u0000\u0000\u0000\u037c"+ - "\u037a\u0001\u0000\u0000\u0000\u037d\u0380\u0003Q#\u0000\u037e\u0380\u0003"+ - "K \u0000\u037f\u037d\u0001\u0000\u0000\u0000\u037f\u037e\u0001\u0000\u0000"+ - "\u0000\u0380\u0382\u0001\u0000\u0000\u0000\u0381\u0383\u0003\u00cda\u0000"+ - "\u0382\u0381\u0001\u0000\u0000\u0000\u0383\u0384\u0001\u0000\u0000\u0000"+ - "\u0384\u0382\u0001\u0000\u0000\u0000\u0384\u0385\u0001\u0000\u0000\u0000"+ - "\u0385\u0387\u0001\u0000\u0000\u0000\u0386\u0375\u0001\u0000\u0000\u0000"+ - "\u0386\u037f\u0001\u0000\u0000\u0000\u0387\u00d0\u0001\u0000\u0000\u0000"+ - "\u0388\u038b\u0003\u00cfb\u0000\u0389\u038b\u0003\u00a5M\u0000\u038a\u0388"+ - "\u0001\u0000\u0000\u0000\u038a\u0389\u0001\u0000\u0000\u0000\u038b\u038c"+ + "\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f"+ + "\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090"+ + "\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092"+ + "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093"+ + "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093"+ + "\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096"+ + "\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097"+ + "\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099"+ + "\u0001\u0099\u0004\u0099\u048f\b\u0099\u000b\u0099\f\u0099\u0490\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ + "\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0002"+ + "\u01fb\u0256\u0000\u009d\f\u0001\u000e\u0002\u0010\u0003\u0012\u0004\u0014"+ + "\u0005\u0016\u0006\u0018\u0007\u001a\b\u001c\t\u001e\n \u000b\"\f$\r&"+ + "\u000e(\u000f*\u0010,\u0011.\u00120\u00132\u00144\u00156\u00168\u0000"+ + ":\u0000<\u0017>\u0018@\u0019B\u001aD\u0000F\u0000H\u0000J\u0000L\u0000"+ + "N\u0000P\u0000R\u0000T\u0000V\u0000X\u001bZ\u001c\\\u001d^\u001e`\u001f"+ + "b d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a"+ + "4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e"+ + ">\u00a0?\u00a2@\u00a4A\u00a6B\u00a8\u0000\u00aaC\u00acD\u00aeE\u00b0F"+ + "\u00b2\u0000\u00b4\u0000\u00b6\u0000\u00b8\u0000\u00ba\u0000\u00bcG\u00be"+ + "\u0000\u00c0H\u00c2\u0000\u00c4I\u00c6J\u00c8K\u00ca\u0000\u00cc\u0000"+ + "\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4L\u00d6M\u00d8N\u00daO\u00dc"+ + "\u0000\u00de\u0000\u00e0\u0000\u00e2\u0000\u00e4P\u00e6\u0000\u00e8Q\u00ea"+ + "R\u00ecS\u00ee\u0000\u00f0\u0000\u00f2T\u00f4U\u00f6\u0000\u00f8V\u00fa"+ + "\u0000\u00fc\u0000\u00feW\u0100X\u0102Y\u0104\u0000\u0106\u0000\u0108"+ + "\u0000\u010a\u0000\u010c\u0000\u010e\u0000\u0110\u0000\u0112Z\u0114[\u0116"+ + "\\\u0118\u0000\u011a\u0000\u011c\u0000\u011e\u0000\u0120]\u0122^\u0124"+ + "_\u0126\u0000\u0128`\u012aa\u012cb\u012ec\u0130\u0000\u0132d\u0134e\u0136"+ + "f\u0138g\u013a\u0000\u013ch\u013ei\u0140j\u0142k\u0144l\f\u0000\u0001"+ + "\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t\n\r\r"+ + " //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002"+ + "\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ + "\u0000EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]`"+ + "`||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04b9\u0000"+ + "\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001\u0000\u0000\u0000\u0000\u0010"+ + "\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014"+ + "\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018"+ + "\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c"+ + "\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001"+ + "\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000"+ + "\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000"+ + "\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000."+ + "\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001\u0000"+ + "\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000"+ + "\u00018\u0001\u0000\u0000\u0000\u0001:\u0001\u0000\u0000\u0000\u0001<"+ + "\u0001\u0000\u0000\u0000\u0001>\u0001\u0000\u0000\u0000\u0001@\u0001\u0000"+ + "\u0000\u0000\u0002B\u0001\u0000\u0000\u0000\u0002X\u0001\u0000\u0000\u0000"+ + "\u0002Z\u0001\u0000\u0000\u0000\u0002\\\u0001\u0000\u0000\u0000\u0002"+ + "^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000\u0000\u0000\u0002b\u0001"+ + "\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000\u0002f\u0001\u0000\u0000"+ + "\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j\u0001\u0000\u0000\u0000\u0002"+ + "l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000\u0000\u0000\u0002p\u0001"+ + "\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000\u0002t\u0001\u0000\u0000"+ + "\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000\u0002"+ + "z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~\u0001"+ + "\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000\u0000\u0002\u0082\u0001"+ + "\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000\u0000\u0002\u0086\u0001"+ + "\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000\u0000\u0002\u008a\u0001"+ + "\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000\u0000\u0002\u008e\u0001"+ + "\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000\u0000\u0002\u0092\u0001"+ + "\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000\u0000\u0002\u0096\u0001"+ + "\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000\u0000\u0002\u009a\u0001"+ + "\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000\u0000\u0002\u009e\u0001"+ + "\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000\u0000\u0002\u00a2\u0001"+ + "\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000\u0000\u0002\u00a6\u0001"+ + "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0002\u00ac\u0001"+ + "\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0\u0001"+ + "\u0000\u0000\u0000\u0003\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4\u0001"+ + "\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00b8\u0001"+ + "\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000\u0000\u0003\u00bc\u0001"+ + "\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ + "\u0000\u0000\u0000\u0003\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001"+ + "\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0004\u00ca\u0001"+ + "\u0000\u0000\u0000\u0004\u00cc\u0001\u0000\u0000\u0000\u0004\u00ce\u0001"+ + "\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00d6\u0001"+ + "\u0000\u0000\u0000\u0004\u00d8\u0001\u0000\u0000\u0000\u0004\u00da\u0001"+ + "\u0000\u0000\u0000\u0005\u00dc\u0001\u0000\u0000\u0000\u0005\u00de\u0001"+ + "\u0000\u0000\u0000\u0005\u00e0\u0001\u0000\u0000\u0000\u0005\u00e2\u0001"+ + "\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6\u0001"+ + "\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea\u0001"+ + "\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0006\u00ee\u0001"+ + "\u0000\u0000\u0000\u0006\u00f0\u0001\u0000\u0000\u0000\u0006\u00f2\u0001"+ + "\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f8\u0001"+ + "\u0000\u0000\u0000\u0006\u00fa\u0001\u0000\u0000\u0000\u0006\u00fc\u0001"+ + "\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000\u0000\u0006\u0100\u0001"+ + "\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0007\u0104\u0001"+ + "\u0000\u0000\u0000\u0007\u0106\u0001\u0000\u0000\u0000\u0007\u0108\u0001"+ + "\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c\u0001"+ + "\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110\u0001"+ + "\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114\u0001"+ + "\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\b\u0118\u0001\u0000"+ + "\u0000\u0000\b\u011a\u0001\u0000\u0000\u0000\b\u011c\u0001\u0000\u0000"+ + "\u0000\b\u011e\u0001\u0000\u0000\u0000\b\u0120\u0001\u0000\u0000\u0000"+ + "\b\u0122\u0001\u0000\u0000\u0000\b\u0124\u0001\u0000\u0000\u0000\t\u0126"+ + "\u0001\u0000\u0000\u0000\t\u0128\u0001\u0000\u0000\u0000\t\u012a\u0001"+ + "\u0000\u0000\u0000\t\u012c\u0001\u0000\u0000\u0000\t\u012e\u0001\u0000"+ + "\u0000\u0000\n\u0130\u0001\u0000\u0000\u0000\n\u0132\u0001\u0000\u0000"+ + "\u0000\n\u0134\u0001\u0000\u0000\u0000\n\u0136\u0001\u0000\u0000\u0000"+ + "\n\u0138\u0001\u0000\u0000\u0000\u000b\u013a\u0001\u0000\u0000\u0000\u000b"+ + "\u013c\u0001\u0000\u0000\u0000\u000b\u013e\u0001\u0000\u0000\u0000\u000b"+ + "\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001\u0000\u0000\u0000\u000b"+ + "\u0144\u0001\u0000\u0000\u0000\f\u0146\u0001\u0000\u0000\u0000\u000e\u0150"+ + "\u0001\u0000\u0000\u0000\u0010\u0157\u0001\u0000\u0000\u0000\u0012\u0160"+ + "\u0001\u0000\u0000\u0000\u0014\u0167\u0001\u0000\u0000\u0000\u0016\u0171"+ + "\u0001\u0000\u0000\u0000\u0018\u0178\u0001\u0000\u0000\u0000\u001a\u017f"+ + "\u0001\u0000\u0000\u0000\u001c\u018d\u0001\u0000\u0000\u0000\u001e\u0194"+ + "\u0001\u0000\u0000\u0000 \u019c\u0001\u0000\u0000\u0000\"\u01a3\u0001"+ + "\u0000\u0000\u0000$\u01af\u0001\u0000\u0000\u0000&\u01b8\u0001\u0000\u0000"+ + "\u0000(\u01be\u0001\u0000\u0000\u0000*\u01c5\u0001\u0000\u0000\u0000,"+ + "\u01cc\u0001\u0000\u0000\u0000.\u01d4\u0001\u0000\u0000\u00000\u01dd\u0001"+ + "\u0000\u0000\u00002\u01e3\u0001\u0000\u0000\u00004\u01f4\u0001\u0000\u0000"+ + "\u00006\u0204\u0001\u0000\u0000\u00008\u020a\u0001\u0000\u0000\u0000:"+ + "\u020f\u0001\u0000\u0000\u0000<\u0214\u0001\u0000\u0000\u0000>\u0218\u0001"+ + "\u0000\u0000\u0000@\u021c\u0001\u0000\u0000\u0000B\u0220\u0001\u0000\u0000"+ + "\u0000D\u0224\u0001\u0000\u0000\u0000F\u0226\u0001\u0000\u0000\u0000H"+ + "\u0228\u0001\u0000\u0000\u0000J\u022b\u0001\u0000\u0000\u0000L\u022d\u0001"+ + "\u0000\u0000\u0000N\u0236\u0001\u0000\u0000\u0000P\u0238\u0001\u0000\u0000"+ + "\u0000R\u023d\u0001\u0000\u0000\u0000T\u023f\u0001\u0000\u0000\u0000V"+ + "\u0244\u0001\u0000\u0000\u0000X\u0263\u0001\u0000\u0000\u0000Z\u0266\u0001"+ + "\u0000\u0000\u0000\\\u0294\u0001\u0000\u0000\u0000^\u0296\u0001\u0000"+ + "\u0000\u0000`\u0299\u0001\u0000\u0000\u0000b\u029d\u0001\u0000\u0000\u0000"+ + "d\u02a1\u0001\u0000\u0000\u0000f\u02a3\u0001\u0000\u0000\u0000h\u02a5"+ + "\u0001\u0000\u0000\u0000j\u02aa\u0001\u0000\u0000\u0000l\u02ac\u0001\u0000"+ + "\u0000\u0000n\u02b2\u0001\u0000\u0000\u0000p\u02b8\u0001\u0000\u0000\u0000"+ + "r\u02bd\u0001\u0000\u0000\u0000t\u02bf\u0001\u0000\u0000\u0000v\u02c2"+ + "\u0001\u0000\u0000\u0000x\u02c5\u0001\u0000\u0000\u0000z\u02ca\u0001\u0000"+ + "\u0000\u0000|\u02ce\u0001\u0000\u0000\u0000~\u02d3\u0001\u0000\u0000\u0000"+ + "\u0080\u02d9\u0001\u0000\u0000\u0000\u0082\u02dc\u0001\u0000\u0000\u0000"+ + "\u0084\u02de\u0001\u0000\u0000\u0000\u0086\u02e4\u0001\u0000\u0000\u0000"+ + "\u0088\u02e6\u0001\u0000\u0000\u0000\u008a\u02eb\u0001\u0000\u0000\u0000"+ + "\u008c\u02ee\u0001\u0000\u0000\u0000\u008e\u02f1\u0001\u0000\u0000\u0000"+ + "\u0090\u02f4\u0001\u0000\u0000\u0000\u0092\u02f6\u0001\u0000\u0000\u0000"+ + "\u0094\u02f9\u0001\u0000\u0000\u0000\u0096\u02fb\u0001\u0000\u0000\u0000"+ + "\u0098\u02fe\u0001\u0000\u0000\u0000\u009a\u0300\u0001\u0000\u0000\u0000"+ + "\u009c\u0302\u0001\u0000\u0000\u0000\u009e\u0304\u0001\u0000\u0000\u0000"+ + "\u00a0\u0306\u0001\u0000\u0000\u0000\u00a2\u0308\u0001\u0000\u0000\u0000"+ + "\u00a4\u030d\u0001\u0000\u0000\u0000\u00a6\u0322\u0001\u0000\u0000\u0000"+ + "\u00a8\u0324\u0001\u0000\u0000\u0000\u00aa\u032c\u0001\u0000\u0000\u0000"+ + "\u00ac\u032e\u0001\u0000\u0000\u0000\u00ae\u0332\u0001\u0000\u0000\u0000"+ + "\u00b0\u0336\u0001\u0000\u0000\u0000\u00b2\u033a\u0001\u0000\u0000\u0000"+ + "\u00b4\u033f\u0001\u0000\u0000\u0000\u00b6\u0343\u0001\u0000\u0000\u0000"+ + "\u00b8\u0347\u0001\u0000\u0000\u0000\u00ba\u034b\u0001\u0000\u0000\u0000"+ + "\u00bc\u034f\u0001\u0000\u0000\u0000\u00be\u035b\u0001\u0000\u0000\u0000"+ + "\u00c0\u035e\u0001\u0000\u0000\u0000\u00c2\u0362\u0001\u0000\u0000\u0000"+ + "\u00c4\u0366\u0001\u0000\u0000\u0000\u00c6\u036a\u0001\u0000\u0000\u0000"+ + "\u00c8\u036e\u0001\u0000\u0000\u0000\u00ca\u0372\u0001\u0000\u0000\u0000"+ + "\u00cc\u0377\u0001\u0000\u0000\u0000\u00ce\u037b\u0001\u0000\u0000\u0000"+ + "\u00d0\u0383\u0001\u0000\u0000\u0000\u00d2\u0398\u0001\u0000\u0000\u0000"+ + "\u00d4\u039c\u0001\u0000\u0000\u0000\u00d6\u03a0\u0001\u0000\u0000\u0000"+ + "\u00d8\u03a4\u0001\u0000\u0000\u0000\u00da\u03a8\u0001\u0000\u0000\u0000"+ + "\u00dc\u03ac\u0001\u0000\u0000\u0000\u00de\u03b1\u0001\u0000\u0000\u0000"+ + "\u00e0\u03b5\u0001\u0000\u0000\u0000\u00e2\u03b9\u0001\u0000\u0000\u0000"+ + "\u00e4\u03bd\u0001\u0000\u0000\u0000\u00e6\u03c0\u0001\u0000\u0000\u0000"+ + "\u00e8\u03c4\u0001\u0000\u0000\u0000\u00ea\u03c8\u0001\u0000\u0000\u0000"+ + "\u00ec\u03cc\u0001\u0000\u0000\u0000\u00ee\u03d0\u0001\u0000\u0000\u0000"+ + "\u00f0\u03d5\u0001\u0000\u0000\u0000\u00f2\u03da\u0001\u0000\u0000\u0000"+ + "\u00f4\u03df\u0001\u0000\u0000\u0000\u00f6\u03e6\u0001\u0000\u0000\u0000"+ + "\u00f8\u03ef\u0001\u0000\u0000\u0000\u00fa\u03f6\u0001\u0000\u0000\u0000"+ + "\u00fc\u03fa\u0001\u0000\u0000\u0000\u00fe\u03fe\u0001\u0000\u0000\u0000"+ + "\u0100\u0402\u0001\u0000\u0000\u0000\u0102\u0406\u0001\u0000\u0000\u0000"+ + "\u0104\u040a\u0001\u0000\u0000\u0000\u0106\u0410\u0001\u0000\u0000\u0000"+ + "\u0108\u0414\u0001\u0000\u0000\u0000\u010a\u0418\u0001\u0000\u0000\u0000"+ + "\u010c\u041c\u0001\u0000\u0000\u0000\u010e\u0420\u0001\u0000\u0000\u0000"+ + "\u0110\u0424\u0001\u0000\u0000\u0000\u0112\u0428\u0001\u0000\u0000\u0000"+ + "\u0114\u042c\u0001\u0000\u0000\u0000\u0116\u0430\u0001\u0000\u0000\u0000"+ + "\u0118\u0434\u0001\u0000\u0000\u0000\u011a\u0439\u0001\u0000\u0000\u0000"+ + "\u011c\u043d\u0001\u0000\u0000\u0000\u011e\u0441\u0001\u0000\u0000\u0000"+ + "\u0120\u0445\u0001\u0000\u0000\u0000\u0122\u0449\u0001\u0000\u0000\u0000"+ + "\u0124\u044d\u0001\u0000\u0000\u0000\u0126\u0451\u0001\u0000\u0000\u0000"+ + "\u0128\u0456\u0001\u0000\u0000\u0000\u012a\u045b\u0001\u0000\u0000\u0000"+ + "\u012c\u045f\u0001\u0000\u0000\u0000\u012e\u0463\u0001\u0000\u0000\u0000"+ + "\u0130\u0467\u0001\u0000\u0000\u0000\u0132\u046c\u0001\u0000\u0000\u0000"+ + "\u0134\u0476\u0001\u0000\u0000\u0000\u0136\u047a\u0001\u0000\u0000\u0000"+ + "\u0138\u047e\u0001\u0000\u0000\u0000\u013a\u0482\u0001\u0000\u0000\u0000"+ + "\u013c\u0487\u0001\u0000\u0000\u0000\u013e\u048e\u0001\u0000\u0000\u0000"+ + "\u0140\u0492\u0001\u0000\u0000\u0000\u0142\u0496\u0001\u0000\u0000\u0000"+ + "\u0144\u049a\u0001\u0000\u0000\u0000\u0146\u0147\u0005d\u0000\u0000\u0147"+ + "\u0148\u0005i\u0000\u0000\u0148\u0149\u0005s\u0000\u0000\u0149\u014a\u0005"+ + "s\u0000\u0000\u014a\u014b\u0005e\u0000\u0000\u014b\u014c\u0005c\u0000"+ + "\u0000\u014c\u014d\u0005t\u0000\u0000\u014d\u014e\u0001\u0000\u0000\u0000"+ + "\u014e\u014f\u0006\u0000\u0000\u0000\u014f\r\u0001\u0000\u0000\u0000\u0150"+ + "\u0151\u0005d\u0000\u0000\u0151\u0152\u0005r\u0000\u0000\u0152\u0153\u0005"+ + "o\u0000\u0000\u0153\u0154\u0005p\u0000\u0000\u0154\u0155\u0001\u0000\u0000"+ + "\u0000\u0155\u0156\u0006\u0001\u0001\u0000\u0156\u000f\u0001\u0000\u0000"+ + "\u0000\u0157\u0158\u0005e\u0000\u0000\u0158\u0159\u0005n\u0000\u0000\u0159"+ + "\u015a\u0005r\u0000\u0000\u015a\u015b\u0005i\u0000\u0000\u015b\u015c\u0005"+ + "c\u0000\u0000\u015c\u015d\u0005h\u0000\u0000\u015d\u015e\u0001\u0000\u0000"+ + "\u0000\u015e\u015f\u0006\u0002\u0002\u0000\u015f\u0011\u0001\u0000\u0000"+ + "\u0000\u0160\u0161\u0005e\u0000\u0000\u0161\u0162\u0005v\u0000\u0000\u0162"+ + "\u0163\u0005a\u0000\u0000\u0163\u0164\u0005l\u0000\u0000\u0164\u0165\u0001"+ + "\u0000\u0000\u0000\u0165\u0166\u0006\u0003\u0000\u0000\u0166\u0013\u0001"+ + "\u0000\u0000\u0000\u0167\u0168\u0005e\u0000\u0000\u0168\u0169\u0005x\u0000"+ + "\u0000\u0169\u016a\u0005p\u0000\u0000\u016a\u016b\u0005l\u0000\u0000\u016b"+ + "\u016c\u0005a\u0000\u0000\u016c\u016d\u0005i\u0000\u0000\u016d\u016e\u0005"+ + "n\u0000\u0000\u016e\u016f\u0001\u0000\u0000\u0000\u016f\u0170\u0006\u0004"+ + "\u0003\u0000\u0170\u0015\u0001\u0000\u0000\u0000\u0171\u0172\u0005f\u0000"+ + "\u0000\u0172\u0173\u0005r\u0000\u0000\u0173\u0174\u0005o\u0000\u0000\u0174"+ + "\u0175\u0005m\u0000\u0000\u0175\u0176\u0001\u0000\u0000\u0000\u0176\u0177"+ + "\u0006\u0005\u0004\u0000\u0177\u0017\u0001\u0000\u0000\u0000\u0178\u0179"+ + "\u0005g\u0000\u0000\u0179\u017a\u0005r\u0000\u0000\u017a\u017b\u0005o"+ + "\u0000\u0000\u017b\u017c\u0005k\u0000\u0000\u017c\u017d\u0001\u0000\u0000"+ + "\u0000\u017d\u017e\u0006\u0006\u0000\u0000\u017e\u0019\u0001\u0000\u0000"+ + "\u0000\u017f\u0180\u0005i\u0000\u0000\u0180\u0181\u0005n\u0000\u0000\u0181"+ + "\u0182\u0005l\u0000\u0000\u0182\u0183\u0005i\u0000\u0000\u0183\u0184\u0005"+ + "n\u0000\u0000\u0184\u0185\u0005e\u0000\u0000\u0185\u0186\u0005s\u0000"+ + "\u0000\u0186\u0187\u0005t\u0000\u0000\u0187\u0188\u0005a\u0000\u0000\u0188"+ + "\u0189\u0005t\u0000\u0000\u0189\u018a\u0005s\u0000\u0000\u018a\u018b\u0001"+ + "\u0000\u0000\u0000\u018b\u018c\u0006\u0007\u0000\u0000\u018c\u001b\u0001"+ + "\u0000\u0000\u0000\u018d\u018e\u0005k\u0000\u0000\u018e\u018f\u0005e\u0000"+ + "\u0000\u018f\u0190\u0005e\u0000\u0000\u0190\u0191\u0005p\u0000\u0000\u0191"+ + "\u0192\u0001\u0000\u0000\u0000\u0192\u0193\u0006\b\u0001\u0000\u0193\u001d"+ + "\u0001\u0000\u0000\u0000\u0194\u0195\u0005l\u0000\u0000\u0195\u0196\u0005"+ + "i\u0000\u0000\u0196\u0197\u0005m\u0000\u0000\u0197\u0198\u0005i\u0000"+ + "\u0000\u0198\u0199\u0005t\u0000\u0000\u0199\u019a\u0001\u0000\u0000\u0000"+ + "\u019a\u019b\u0006\t\u0000\u0000\u019b\u001f\u0001\u0000\u0000\u0000\u019c"+ + "\u019d\u0005m\u0000\u0000\u019d\u019e\u0005e\u0000\u0000\u019e\u019f\u0005"+ + "t\u0000\u0000\u019f\u01a0\u0005a\u0000\u0000\u01a0\u01a1\u0001\u0000\u0000"+ + "\u0000\u01a1\u01a2\u0006\n\u0005\u0000\u01a2!\u0001\u0000\u0000\u0000"+ + "\u01a3\u01a4\u0005m\u0000\u0000\u01a4\u01a5\u0005v\u0000\u0000\u01a5\u01a6"+ + "\u0005_\u0000\u0000\u01a6\u01a7\u0005e\u0000\u0000\u01a7\u01a8\u0005x"+ + "\u0000\u0000\u01a8\u01a9\u0005p\u0000\u0000\u01a9\u01aa\u0005a\u0000\u0000"+ + "\u01aa\u01ab\u0005n\u0000\u0000\u01ab\u01ac\u0005d\u0000\u0000\u01ac\u01ad"+ + "\u0001\u0000\u0000\u0000\u01ad\u01ae\u0006\u000b\u0006\u0000\u01ae#\u0001"+ + "\u0000\u0000\u0000\u01af\u01b0\u0005r\u0000\u0000\u01b0\u01b1\u0005e\u0000"+ + "\u0000\u01b1\u01b2\u0005n\u0000\u0000\u01b2\u01b3\u0005a\u0000\u0000\u01b3"+ + "\u01b4\u0005m\u0000\u0000\u01b4\u01b5\u0005e\u0000\u0000\u01b5\u01b6\u0001"+ + "\u0000\u0000\u0000\u01b6\u01b7\u0006\f\u0007\u0000\u01b7%\u0001\u0000"+ + "\u0000\u0000\u01b8\u01b9\u0005r\u0000\u0000\u01b9\u01ba\u0005o\u0000\u0000"+ + "\u01ba\u01bb\u0005w\u0000\u0000\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc"+ + "\u01bd\u0006\r\u0000\u0000\u01bd\'\u0001\u0000\u0000\u0000\u01be\u01bf"+ + "\u0005s\u0000\u0000\u01bf\u01c0\u0005h\u0000\u0000\u01c0\u01c1\u0005o"+ + "\u0000\u0000\u01c1\u01c2\u0005w\u0000\u0000\u01c2\u01c3\u0001\u0000\u0000"+ + "\u0000\u01c3\u01c4\u0006\u000e\b\u0000\u01c4)\u0001\u0000\u0000\u0000"+ + "\u01c5\u01c6\u0005s\u0000\u0000\u01c6\u01c7\u0005o\u0000\u0000\u01c7\u01c8"+ + "\u0005r\u0000\u0000\u01c8\u01c9\u0005t\u0000\u0000\u01c9\u01ca\u0001\u0000"+ + "\u0000\u0000\u01ca\u01cb\u0006\u000f\u0000\u0000\u01cb+\u0001\u0000\u0000"+ + "\u0000\u01cc\u01cd\u0005s\u0000\u0000\u01cd\u01ce\u0005t\u0000\u0000\u01ce"+ + "\u01cf\u0005a\u0000\u0000\u01cf\u01d0\u0005t\u0000\u0000\u01d0\u01d1\u0005"+ + "s\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0010"+ + "\u0000\u0000\u01d3-\u0001\u0000\u0000\u0000\u01d4\u01d5\u0005w\u0000\u0000"+ + "\u01d5\u01d6\u0005h\u0000\u0000\u01d6\u01d7\u0005e\u0000\u0000\u01d7\u01d8"+ + "\u0005r\u0000\u0000\u01d8\u01d9\u0005e\u0000\u0000\u01d9\u01da\u0001\u0000"+ + "\u0000\u0000\u01da\u01db\u0006\u0011\u0000\u0000\u01db/\u0001\u0000\u0000"+ + "\u0000\u01dc\u01de\b\u0000\u0000\u0000\u01dd\u01dc\u0001\u0000\u0000\u0000"+ + "\u01de\u01df\u0001\u0000\u0000\u0000\u01df\u01dd\u0001\u0000\u0000\u0000"+ + "\u01df\u01e0\u0001\u0000\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000"+ + "\u01e1\u01e2\u0006\u0012\u0000\u0000\u01e21\u0001\u0000\u0000\u0000\u01e3"+ + "\u01e4\u0005/\u0000\u0000\u01e4\u01e5\u0005/\u0000\u0000\u01e5\u01e9\u0001"+ + "\u0000\u0000\u0000\u01e6\u01e8\b\u0001\u0000\u0000\u01e7\u01e6\u0001\u0000"+ + "\u0000\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000\u01e9\u01e7\u0001\u0000"+ + "\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01ed\u0001\u0000"+ + "\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000\u01ec\u01ee\u0005\r\u0000"+ + "\u0000\u01ed\u01ec\u0001\u0000\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000"+ + "\u0000\u01ee\u01f0\u0001\u0000\u0000\u0000\u01ef\u01f1\u0005\n\u0000\u0000"+ + "\u01f0\u01ef\u0001\u0000\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000"+ + "\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3\u0006\u0013\t\u0000\u01f3"+ + "3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0005/\u0000\u0000\u01f5\u01f6\u0005"+ + "*\u0000\u0000\u01f6\u01fb\u0001\u0000\u0000\u0000\u01f7\u01fa\u00034\u0014"+ + "\u0000\u01f8\u01fa\t\u0000\u0000\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000"+ + "\u01f9\u01f8\u0001\u0000\u0000\u0000\u01fa\u01fd\u0001\u0000\u0000\u0000"+ + "\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000\u0000"+ + "\u01fc\u01fe\u0001\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000"+ + "\u01fe\u01ff\u0005*\u0000\u0000\u01ff\u0200\u0005/\u0000\u0000\u0200\u0201"+ + "\u0001\u0000\u0000\u0000\u0201\u0202\u0006\u0014\t\u0000\u02025\u0001"+ + "\u0000\u0000\u0000\u0203\u0205\u0007\u0002\u0000\u0000\u0204\u0203\u0001"+ + "\u0000\u0000\u0000\u0205\u0206\u0001\u0000\u0000\u0000\u0206\u0204\u0001"+ + "\u0000\u0000\u0000\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208\u0001"+ + "\u0000\u0000\u0000\u0208\u0209\u0006\u0015\t\u0000\u02097\u0001\u0000"+ + "\u0000\u0000\u020a\u020b\u0003\u00a2K\u0000\u020b\u020c\u0001\u0000\u0000"+ + "\u0000\u020c\u020d\u0006\u0016\n\u0000\u020d\u020e\u0006\u0016\u000b\u0000"+ + "\u020e9\u0001\u0000\u0000\u0000\u020f\u0210\u0003B\u001b\u0000\u0210\u0211"+ + "\u0001\u0000\u0000\u0000\u0211\u0212\u0006\u0017\f\u0000\u0212\u0213\u0006"+ + "\u0017\r\u0000\u0213;\u0001\u0000\u0000\u0000\u0214\u0215\u00036\u0015"+ + "\u0000\u0215\u0216\u0001\u0000\u0000\u0000\u0216\u0217\u0006\u0018\t\u0000"+ + "\u0217=\u0001\u0000\u0000\u0000\u0218\u0219\u00032\u0013\u0000\u0219\u021a"+ + "\u0001\u0000\u0000\u0000\u021a\u021b\u0006\u0019\t\u0000\u021b?\u0001"+ + "\u0000\u0000\u0000\u021c\u021d\u00034\u0014\u0000\u021d\u021e\u0001\u0000"+ + "\u0000\u0000\u021e\u021f\u0006\u001a\t\u0000\u021fA\u0001\u0000\u0000"+ + "\u0000\u0220\u0221\u0005|\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000"+ + "\u0222\u0223\u0006\u001b\r\u0000\u0223C\u0001\u0000\u0000\u0000\u0224"+ + "\u0225\u0007\u0003\u0000\u0000\u0225E\u0001\u0000\u0000\u0000\u0226\u0227"+ + "\u0007\u0004\u0000\u0000\u0227G\u0001\u0000\u0000\u0000\u0228\u0229\u0005"+ + "\\\u0000\u0000\u0229\u022a\u0007\u0005\u0000\u0000\u022aI\u0001\u0000"+ + "\u0000\u0000\u022b\u022c\b\u0006\u0000\u0000\u022cK\u0001\u0000\u0000"+ + "\u0000\u022d\u022f\u0007\u0007\u0000\u0000\u022e\u0230\u0007\b\u0000\u0000"+ + "\u022f\u022e\u0001\u0000\u0000\u0000\u022f\u0230\u0001\u0000\u0000\u0000"+ + "\u0230\u0232\u0001\u0000\u0000\u0000\u0231\u0233\u0003D\u001c\u0000\u0232"+ + "\u0231\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000\u0234"+ + "\u0232\u0001\u0000\u0000\u0000\u0234\u0235\u0001\u0000\u0000\u0000\u0235"+ + "M\u0001\u0000\u0000\u0000\u0236\u0237\u0005@\u0000\u0000\u0237O\u0001"+ + "\u0000\u0000\u0000\u0238\u0239\u0005`\u0000\u0000\u0239Q\u0001\u0000\u0000"+ + "\u0000\u023a\u023e\b\t\u0000\u0000\u023b\u023c\u0005`\u0000\u0000\u023c"+ + "\u023e\u0005`\u0000\u0000\u023d\u023a\u0001\u0000\u0000\u0000\u023d\u023b"+ + "\u0001\u0000\u0000\u0000\u023eS\u0001\u0000\u0000\u0000\u023f\u0240\u0005"+ + "_\u0000\u0000\u0240U\u0001\u0000\u0000\u0000\u0241\u0245\u0003F\u001d"+ + "\u0000\u0242\u0245\u0003D\u001c\u0000\u0243\u0245\u0003T$\u0000\u0244"+ + "\u0241\u0001\u0000\u0000\u0000\u0244\u0242\u0001\u0000\u0000\u0000\u0244"+ + "\u0243\u0001\u0000\u0000\u0000\u0245W\u0001\u0000\u0000\u0000\u0246\u024b"+ + "\u0005\"\u0000\u0000\u0247\u024a\u0003H\u001e\u0000\u0248\u024a\u0003"+ + "J\u001f\u0000\u0249\u0247\u0001\u0000\u0000\u0000\u0249\u0248\u0001\u0000"+ + "\u0000\u0000\u024a\u024d\u0001\u0000\u0000\u0000\u024b\u0249\u0001\u0000"+ + "\u0000\u0000\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024e\u0001\u0000"+ + "\u0000\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024e\u0264\u0005\"\u0000"+ + "\u0000\u024f\u0250\u0005\"\u0000\u0000\u0250\u0251\u0005\"\u0000\u0000"+ + "\u0251\u0252\u0005\"\u0000\u0000\u0252\u0256\u0001\u0000\u0000\u0000\u0253"+ + "\u0255\b\u0001\u0000\u0000\u0254\u0253\u0001\u0000\u0000\u0000\u0255\u0258"+ + "\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000\u0000\u0000\u0256\u0254"+ + "\u0001\u0000\u0000\u0000\u0257\u0259\u0001\u0000\u0000\u0000\u0258\u0256"+ + "\u0001\u0000\u0000\u0000\u0259\u025a\u0005\"\u0000\u0000\u025a\u025b\u0005"+ + "\"\u0000\u0000\u025b\u025c\u0005\"\u0000\u0000\u025c\u025e\u0001\u0000"+ + "\u0000\u0000\u025d\u025f\u0005\"\u0000\u0000\u025e\u025d\u0001\u0000\u0000"+ + "\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f\u0261\u0001\u0000\u0000"+ + "\u0000\u0260\u0262\u0005\"\u0000\u0000\u0261\u0260\u0001\u0000\u0000\u0000"+ + "\u0261\u0262\u0001\u0000\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000"+ + "\u0263\u0246\u0001\u0000\u0000\u0000\u0263\u024f\u0001\u0000\u0000\u0000"+ + "\u0264Y\u0001\u0000\u0000\u0000\u0265\u0267\u0003D\u001c\u0000\u0266\u0265"+ + "\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u0266"+ + "\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269[\u0001"+ + "\u0000\u0000\u0000\u026a\u026c\u0003D\u001c\u0000\u026b\u026a\u0001\u0000"+ + "\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026b\u0001\u0000"+ + "\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e\u026f\u0001\u0000"+ + "\u0000\u0000\u026f\u0273\u0003j/\u0000\u0270\u0272\u0003D\u001c\u0000"+ + "\u0271\u0270\u0001\u0000\u0000\u0000\u0272\u0275\u0001\u0000\u0000\u0000"+ + "\u0273\u0271\u0001\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000"+ + "\u0274\u0295\u0001\u0000\u0000\u0000\u0275\u0273\u0001\u0000\u0000\u0000"+ + "\u0276\u0278\u0003j/\u0000\u0277\u0279\u0003D\u001c\u0000\u0278\u0277"+ + "\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000\u0000\u027a\u0278"+ + "\u0001\u0000\u0000\u0000\u027a\u027b\u0001\u0000\u0000\u0000\u027b\u0295"+ + "\u0001\u0000\u0000\u0000\u027c\u027e\u0003D\u001c\u0000\u027d\u027c\u0001"+ + "\u0000\u0000\u0000\u027e\u027f\u0001\u0000\u0000\u0000\u027f\u027d\u0001"+ + "\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280\u0288\u0001"+ + "\u0000\u0000\u0000\u0281\u0285\u0003j/\u0000\u0282\u0284\u0003D\u001c"+ + "\u0000\u0283\u0282\u0001\u0000\u0000\u0000\u0284\u0287\u0001\u0000\u0000"+ + "\u0000\u0285\u0283\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000"+ + "\u0000\u0286\u0289\u0001\u0000\u0000\u0000\u0287\u0285\u0001\u0000\u0000"+ + "\u0000\u0288\u0281\u0001\u0000\u0000\u0000\u0288\u0289\u0001\u0000\u0000"+ + "\u0000\u0289\u028a\u0001\u0000\u0000\u0000\u028a\u028b\u0003L \u0000\u028b"+ + "\u0295\u0001\u0000\u0000\u0000\u028c\u028e\u0003j/\u0000\u028d\u028f\u0003"+ + "D\u001c\u0000\u028e\u028d\u0001\u0000\u0000\u0000\u028f\u0290\u0001\u0000"+ + "\u0000\u0000\u0290\u028e\u0001\u0000\u0000\u0000\u0290\u0291\u0001\u0000"+ + "\u0000\u0000\u0291\u0292\u0001\u0000\u0000\u0000\u0292\u0293\u0003L \u0000"+ + "\u0293\u0295\u0001\u0000\u0000\u0000\u0294\u026b\u0001\u0000\u0000\u0000"+ + "\u0294\u0276\u0001\u0000\u0000\u0000\u0294\u027d\u0001\u0000\u0000\u0000"+ + "\u0294\u028c\u0001\u0000\u0000\u0000\u0295]\u0001\u0000\u0000\u0000\u0296"+ + "\u0297\u0005b\u0000\u0000\u0297\u0298\u0005y\u0000\u0000\u0298_\u0001"+ + "\u0000\u0000\u0000\u0299\u029a\u0005a\u0000\u0000\u029a\u029b\u0005n\u0000"+ + "\u0000\u029b\u029c\u0005d\u0000\u0000\u029ca\u0001\u0000\u0000\u0000\u029d"+ + "\u029e\u0005a\u0000\u0000\u029e\u029f\u0005s\u0000\u0000\u029f\u02a0\u0005"+ + "c\u0000\u0000\u02a0c\u0001\u0000\u0000\u0000\u02a1\u02a2\u0005=\u0000"+ + "\u0000\u02a2e\u0001\u0000\u0000\u0000\u02a3\u02a4\u0005,\u0000\u0000\u02a4"+ + "g\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005d\u0000\u0000\u02a6\u02a7\u0005"+ + "e\u0000\u0000\u02a7\u02a8\u0005s\u0000\u0000\u02a8\u02a9\u0005c\u0000"+ + "\u0000\u02a9i\u0001\u0000\u0000\u0000\u02aa\u02ab\u0005.\u0000\u0000\u02ab"+ + "k\u0001\u0000\u0000\u0000\u02ac\u02ad\u0005f\u0000\u0000\u02ad\u02ae\u0005"+ + "a\u0000\u0000\u02ae\u02af\u0005l\u0000\u0000\u02af\u02b0\u0005s\u0000"+ + "\u0000\u02b0\u02b1\u0005e\u0000\u0000\u02b1m\u0001\u0000\u0000\u0000\u02b2"+ + "\u02b3\u0005f\u0000\u0000\u02b3\u02b4\u0005i\u0000\u0000\u02b4\u02b5\u0005"+ + "r\u0000\u0000\u02b5\u02b6\u0005s\u0000\u0000\u02b6\u02b7\u0005t\u0000"+ + "\u0000\u02b7o\u0001\u0000\u0000\u0000\u02b8\u02b9\u0005l\u0000\u0000\u02b9"+ + "\u02ba\u0005a\u0000\u0000\u02ba\u02bb\u0005s\u0000\u0000\u02bb\u02bc\u0005"+ + "t\u0000\u0000\u02bcq\u0001\u0000\u0000\u0000\u02bd\u02be\u0005(\u0000"+ + "\u0000\u02bes\u0001\u0000\u0000\u0000\u02bf\u02c0\u0005i\u0000\u0000\u02c0"+ + "\u02c1\u0005n\u0000\u0000\u02c1u\u0001\u0000\u0000\u0000\u02c2\u02c3\u0005"+ + "i\u0000\u0000\u02c3\u02c4\u0005s\u0000\u0000\u02c4w\u0001\u0000\u0000"+ + "\u0000\u02c5\u02c6\u0005l\u0000\u0000\u02c6\u02c7\u0005i\u0000\u0000\u02c7"+ + "\u02c8\u0005k\u0000\u0000\u02c8\u02c9\u0005e\u0000\u0000\u02c9y\u0001"+ + "\u0000\u0000\u0000\u02ca\u02cb\u0005n\u0000\u0000\u02cb\u02cc\u0005o\u0000"+ + "\u0000\u02cc\u02cd\u0005t\u0000\u0000\u02cd{\u0001\u0000\u0000\u0000\u02ce"+ + "\u02cf\u0005n\u0000\u0000\u02cf\u02d0\u0005u\u0000\u0000\u02d0\u02d1\u0005"+ + "l\u0000\u0000\u02d1\u02d2\u0005l\u0000\u0000\u02d2}\u0001\u0000\u0000"+ + "\u0000\u02d3\u02d4\u0005n\u0000\u0000\u02d4\u02d5\u0005u\u0000\u0000\u02d5"+ + "\u02d6\u0005l\u0000\u0000\u02d6\u02d7\u0005l\u0000\u0000\u02d7\u02d8\u0005"+ + "s\u0000\u0000\u02d8\u007f\u0001\u0000\u0000\u0000\u02d9\u02da\u0005o\u0000"+ + "\u0000\u02da\u02db\u0005r\u0000\u0000\u02db\u0081\u0001\u0000\u0000\u0000"+ + "\u02dc\u02dd\u0005?\u0000\u0000\u02dd\u0083\u0001\u0000\u0000\u0000\u02de"+ + "\u02df\u0005r\u0000\u0000\u02df\u02e0\u0005l\u0000\u0000\u02e0\u02e1\u0005"+ + "i\u0000\u0000\u02e1\u02e2\u0005k\u0000\u0000\u02e2\u02e3\u0005e\u0000"+ + "\u0000\u02e3\u0085\u0001\u0000\u0000\u0000\u02e4\u02e5\u0005)\u0000\u0000"+ + "\u02e5\u0087\u0001\u0000\u0000\u0000\u02e6\u02e7\u0005t\u0000\u0000\u02e7"+ + "\u02e8\u0005r\u0000\u0000\u02e8\u02e9\u0005u\u0000\u0000\u02e9\u02ea\u0005"+ + "e\u0000\u0000\u02ea\u0089\u0001\u0000\u0000\u0000\u02eb\u02ec\u0005=\u0000"+ + "\u0000\u02ec\u02ed\u0005=\u0000\u0000\u02ed\u008b\u0001\u0000\u0000\u0000"+ + "\u02ee\u02ef\u0005=\u0000\u0000\u02ef\u02f0\u0005~\u0000\u0000\u02f0\u008d"+ + "\u0001\u0000\u0000\u0000\u02f1\u02f2\u0005!\u0000\u0000\u02f2\u02f3\u0005"+ + "=\u0000\u0000\u02f3\u008f\u0001\u0000\u0000\u0000\u02f4\u02f5\u0005<\u0000"+ + "\u0000\u02f5\u0091\u0001\u0000\u0000\u0000\u02f6\u02f7\u0005<\u0000\u0000"+ + "\u02f7\u02f8\u0005=\u0000\u0000\u02f8\u0093\u0001\u0000\u0000\u0000\u02f9"+ + "\u02fa\u0005>\u0000\u0000\u02fa\u0095\u0001\u0000\u0000\u0000\u02fb\u02fc"+ + "\u0005>\u0000\u0000\u02fc\u02fd\u0005=\u0000\u0000\u02fd\u0097\u0001\u0000"+ + "\u0000\u0000\u02fe\u02ff\u0005+\u0000\u0000\u02ff\u0099\u0001\u0000\u0000"+ + "\u0000\u0300\u0301\u0005-\u0000\u0000\u0301\u009b\u0001\u0000\u0000\u0000"+ + "\u0302\u0303\u0005*\u0000\u0000\u0303\u009d\u0001\u0000\u0000\u0000\u0304"+ + "\u0305\u0005/\u0000\u0000\u0305\u009f\u0001\u0000\u0000\u0000\u0306\u0307"+ + "\u0005%\u0000\u0000\u0307\u00a1\u0001\u0000\u0000\u0000\u0308\u0309\u0005"+ + "[\u0000\u0000\u0309\u030a\u0001\u0000\u0000\u0000\u030a\u030b\u0006K\u0000"+ + "\u0000\u030b\u030c\u0006K\u0000\u0000\u030c\u00a3\u0001\u0000\u0000\u0000"+ + "\u030d\u030e\u0005]\u0000\u0000\u030e\u030f\u0001\u0000\u0000\u0000\u030f"+ + "\u0310\u0006L\r\u0000\u0310\u0311\u0006L\r\u0000\u0311\u00a5\u0001\u0000"+ + "\u0000\u0000\u0312\u0316\u0003F\u001d\u0000\u0313\u0315\u0003V%\u0000"+ + "\u0314\u0313\u0001\u0000\u0000\u0000\u0315\u0318\u0001\u0000\u0000\u0000"+ + "\u0316\u0314\u0001\u0000\u0000\u0000\u0316\u0317\u0001\u0000\u0000\u0000"+ + "\u0317\u0323\u0001\u0000\u0000\u0000\u0318\u0316\u0001\u0000\u0000\u0000"+ + "\u0319\u031c\u0003T$\u0000\u031a\u031c\u0003N!\u0000\u031b\u0319\u0001"+ + "\u0000\u0000\u0000\u031b\u031a\u0001\u0000\u0000\u0000\u031c\u031e\u0001"+ + "\u0000\u0000\u0000\u031d\u031f\u0003V%\u0000\u031e\u031d\u0001\u0000\u0000"+ + "\u0000\u031f\u0320\u0001\u0000\u0000\u0000\u0320\u031e\u0001\u0000\u0000"+ + "\u0000\u0320\u0321\u0001\u0000\u0000\u0000\u0321\u0323\u0001\u0000\u0000"+ + "\u0000\u0322\u0312\u0001\u0000\u0000\u0000\u0322\u031b\u0001\u0000\u0000"+ + "\u0000\u0323\u00a7\u0001\u0000\u0000\u0000\u0324\u0326\u0003P\"\u0000"+ + "\u0325\u0327\u0003R#\u0000\u0326\u0325\u0001\u0000\u0000\u0000\u0327\u0328"+ + "\u0001\u0000\u0000\u0000\u0328\u0326\u0001\u0000\u0000\u0000\u0328\u0329"+ + "\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000\u0000\u0000\u032a\u032b"+ + "\u0003P\"\u0000\u032b\u00a9\u0001\u0000\u0000\u0000\u032c\u032d\u0003"+ + "\u00a8N\u0000\u032d\u00ab\u0001\u0000\u0000\u0000\u032e\u032f\u00032\u0013"+ + "\u0000\u032f\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0006P\t\u0000"+ + "\u0331\u00ad\u0001\u0000\u0000\u0000\u0332\u0333\u00034\u0014\u0000\u0333"+ + "\u0334\u0001\u0000\u0000\u0000\u0334\u0335\u0006Q\t\u0000\u0335\u00af"+ + "\u0001\u0000\u0000\u0000\u0336\u0337\u00036\u0015\u0000\u0337\u0338\u0001"+ + "\u0000\u0000\u0000\u0338\u0339\u0006R\t\u0000\u0339\u00b1\u0001\u0000"+ + "\u0000\u0000\u033a\u033b\u0003B\u001b\u0000\u033b\u033c\u0001\u0000\u0000"+ + "\u0000\u033c\u033d\u0006S\f\u0000\u033d\u033e\u0006S\r\u0000\u033e\u00b3"+ + "\u0001\u0000\u0000\u0000\u033f\u0340\u0003\u00a2K\u0000\u0340\u0341\u0001"+ + "\u0000\u0000\u0000\u0341\u0342\u0006T\n\u0000\u0342\u00b5\u0001\u0000"+ + "\u0000\u0000\u0343\u0344\u0003\u00a4L\u0000\u0344\u0345\u0001\u0000\u0000"+ + "\u0000\u0345\u0346\u0006U\u000e\u0000\u0346\u00b7\u0001\u0000\u0000\u0000"+ + "\u0347\u0348\u0003f-\u0000\u0348\u0349\u0001\u0000\u0000\u0000\u0349\u034a"+ + "\u0006V\u000f\u0000\u034a\u00b9\u0001\u0000\u0000\u0000\u034b\u034c\u0003"+ + "d,\u0000\u034c\u034d\u0001\u0000\u0000\u0000\u034d\u034e\u0006W\u0010"+ + "\u0000\u034e\u00bb\u0001\u0000\u0000\u0000\u034f\u0350\u0005m\u0000\u0000"+ + "\u0350\u0351\u0005e\u0000\u0000\u0351\u0352\u0005t\u0000\u0000\u0352\u0353"+ + "\u0005a\u0000\u0000\u0353\u0354\u0005d\u0000\u0000\u0354\u0355\u0005a"+ + "\u0000\u0000\u0355\u0356\u0005t\u0000\u0000\u0356\u0357\u0005a\u0000\u0000"+ + "\u0357\u00bd\u0001\u0000\u0000\u0000\u0358\u035c\b\n\u0000\u0000\u0359"+ + "\u035a\u0005/\u0000\u0000\u035a\u035c\b\u000b\u0000\u0000\u035b\u0358"+ + "\u0001\u0000\u0000\u0000\u035b\u0359\u0001\u0000\u0000\u0000\u035c\u00bf"+ + "\u0001\u0000\u0000\u0000\u035d\u035f\u0003\u00beY\u0000\u035e\u035d\u0001"+ + "\u0000\u0000\u0000\u035f\u0360\u0001\u0000\u0000\u0000\u0360\u035e\u0001"+ + "\u0000\u0000\u0000\u0360\u0361\u0001\u0000\u0000\u0000\u0361\u00c1\u0001"+ + "\u0000\u0000\u0000\u0362\u0363\u0003\u00aaO\u0000\u0363\u0364\u0001\u0000"+ + "\u0000\u0000\u0364\u0365\u0006[\u0011\u0000\u0365\u00c3\u0001\u0000\u0000"+ + "\u0000\u0366\u0367\u00032\u0013\u0000\u0367\u0368\u0001\u0000\u0000\u0000"+ + "\u0368\u0369\u0006\\\t\u0000\u0369\u00c5\u0001\u0000\u0000\u0000\u036a"+ + "\u036b\u00034\u0014\u0000\u036b\u036c\u0001\u0000\u0000\u0000\u036c\u036d"+ + "\u0006]\t\u0000\u036d\u00c7\u0001\u0000\u0000\u0000\u036e\u036f\u0003"+ + "6\u0015\u0000\u036f\u0370\u0001\u0000\u0000\u0000\u0370\u0371\u0006^\t"+ + "\u0000\u0371\u00c9\u0001\u0000\u0000\u0000\u0372\u0373\u0003B\u001b\u0000"+ + "\u0373\u0374\u0001\u0000\u0000\u0000\u0374\u0375\u0006_\f\u0000\u0375"+ + "\u0376\u0006_\r\u0000\u0376\u00cb\u0001\u0000\u0000\u0000\u0377\u0378"+ + "\u0003j/\u0000\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a\u0006`"+ + "\u0012\u0000\u037a\u00cd\u0001\u0000\u0000\u0000\u037b\u037c\u0003f-\u0000"+ + "\u037c\u037d\u0001\u0000\u0000\u0000\u037d\u037e\u0006a\u000f\u0000\u037e"+ + "\u00cf\u0001\u0000\u0000\u0000\u037f\u0384\u0003F\u001d\u0000\u0380\u0384"+ + "\u0003D\u001c\u0000\u0381\u0384\u0003T$\u0000\u0382\u0384\u0003\u009c"+ + "H\u0000\u0383\u037f\u0001\u0000\u0000\u0000\u0383\u0380\u0001\u0000\u0000"+ + "\u0000\u0383\u0381\u0001\u0000\u0000\u0000\u0383\u0382\u0001\u0000\u0000"+ + "\u0000\u0384\u00d1\u0001\u0000\u0000\u0000\u0385\u0388\u0003F\u001d\u0000"+ + "\u0386\u0388\u0003\u009cH\u0000\u0387\u0385\u0001\u0000\u0000\u0000\u0387"+ + "\u0386\u0001\u0000\u0000\u0000\u0388\u038c\u0001\u0000\u0000\u0000\u0389"+ + "\u038b\u0003\u00d0b\u0000\u038a\u0389\u0001\u0000\u0000\u0000\u038b\u038e"+ "\u0001\u0000\u0000\u0000\u038c\u038a\u0001\u0000\u0000\u0000\u038c\u038d"+ - "\u0001\u0000\u0000\u0000\u038d\u00d2\u0001\u0000\u0000\u0000\u038e\u038f"+ - "\u0003/\u0012\u0000\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u0391\u0006"+ - "d\b\u0000\u0391\u00d4\u0001\u0000\u0000\u0000\u0392\u0393\u00031\u0013"+ - "\u0000\u0393\u0394\u0001\u0000\u0000\u0000\u0394\u0395\u0006e\b\u0000"+ - "\u0395\u00d6\u0001\u0000\u0000\u0000\u0396\u0397\u00033\u0014\u0000\u0397"+ - "\u0398\u0001\u0000\u0000\u0000\u0398\u0399\u0006f\b\u0000\u0399\u00d8"+ - "\u0001\u0000\u0000\u0000\u039a\u039b\u0003?\u001a\u0000\u039b\u039c\u0001"+ - "\u0000\u0000\u0000\u039c\u039d\u0006g\u000b\u0000\u039d\u039e\u0006g\f"+ - "\u0000\u039e\u00da\u0001\u0000\u0000\u0000\u039f\u03a0\u0003a+\u0000\u03a0"+ - "\u03a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006h\u000f\u0000\u03a2\u00dc"+ - "\u0001\u0000\u0000\u0000\u03a3\u03a4\u0003c,\u0000\u03a4\u03a5\u0001\u0000"+ - "\u0000\u0000\u03a5\u03a6\u0006i\u000e\u0000\u03a6\u00de\u0001\u0000\u0000"+ - "\u0000\u03a7\u03a8\u0003g.\u0000\u03a8\u03a9\u0001\u0000\u0000\u0000\u03a9"+ - "\u03aa\u0006j\u0011\u0000\u03aa\u00e0\u0001\u0000\u0000\u0000\u03ab\u03ac"+ - "\u0005a\u0000\u0000\u03ac\u03ad\u0005s\u0000\u0000\u03ad\u00e2\u0001\u0000"+ - "\u0000\u0000\u03ae\u03af\u0003\u00d1c\u0000\u03af\u03b0\u0001\u0000\u0000"+ - "\u0000\u03b0\u03b1\u0006l\u0012\u0000\u03b1\u00e4\u0001\u0000\u0000\u0000"+ - "\u03b2\u03b3\u0003/\u0012\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4"+ - "\u03b5\u0006m\b\u0000\u03b5\u00e6\u0001\u0000\u0000\u0000\u03b6\u03b7"+ - "\u00031\u0013\u0000\u03b7\u03b8\u0001\u0000\u0000\u0000\u03b8\u03b9\u0006"+ - "n\b\u0000\u03b9\u00e8\u0001\u0000\u0000\u0000\u03ba\u03bb\u00033\u0014"+ - "\u0000\u03bb\u03bc\u0001\u0000\u0000\u0000\u03bc\u03bd\u0006o\b\u0000"+ - "\u03bd\u00ea\u0001\u0000\u0000\u0000\u03be\u03bf\u0003?\u001a\u0000\u03bf"+ - "\u03c0\u0001\u0000\u0000\u0000\u03c0\u03c1\u0006p\u000b\u0000\u03c1\u03c2"+ - "\u0006p\f\u0000\u03c2\u00ec\u0001\u0000\u0000\u0000\u03c3\u03c4\u0003"+ - "\u009fJ\u0000\u03c4\u03c5\u0001\u0000\u0000\u0000\u03c5\u03c6\u0006q\t"+ - "\u0000\u03c6\u03c7\u0006q\u0013\u0000\u03c7\u00ee\u0001\u0000\u0000\u0000"+ - "\u03c8\u03c9\u0005o\u0000\u0000\u03c9\u03ca\u0005n\u0000\u0000\u03ca\u03cb"+ - "\u0001\u0000\u0000\u0000\u03cb\u03cc\u0006r\u0014\u0000\u03cc\u00f0\u0001"+ - "\u0000\u0000\u0000\u03cd\u03ce\u0005w\u0000\u0000\u03ce\u03cf\u0005i\u0000"+ - "\u0000\u03cf\u03d0\u0005t\u0000\u0000\u03d0\u03d1\u0005h\u0000\u0000\u03d1"+ - "\u03d2\u0001\u0000\u0000\u0000\u03d2\u03d3\u0006s\u0014\u0000\u03d3\u00f2"+ - "\u0001\u0000\u0000\u0000\u03d4\u03d5\b\f\u0000\u0000\u03d5\u00f4\u0001"+ - "\u0000\u0000\u0000\u03d6\u03d8\u0003\u00f3t\u0000\u03d7\u03d6\u0001\u0000"+ - "\u0000\u0000\u03d8\u03d9\u0001\u0000\u0000\u0000\u03d9\u03d7\u0001\u0000"+ - "\u0000\u0000\u03d9\u03da\u0001\u0000\u0000\u0000\u03da\u03db\u0001\u0000"+ - "\u0000\u0000\u03db\u03dc\u0003\u0131\u0093\u0000\u03dc\u03de\u0001\u0000"+ - "\u0000\u0000\u03dd\u03d7\u0001\u0000\u0000\u0000\u03dd\u03de\u0001\u0000"+ - "\u0000\u0000\u03de\u03e0\u0001\u0000\u0000\u0000\u03df\u03e1\u0003\u00f3"+ - "t\u0000\u03e0\u03df\u0001\u0000\u0000\u0000\u03e1\u03e2\u0001\u0000\u0000"+ - "\u0000\u03e2\u03e0\u0001\u0000\u0000\u0000\u03e2\u03e3\u0001\u0000\u0000"+ - "\u0000\u03e3\u00f6\u0001\u0000\u0000\u0000\u03e4\u03e5\u0003\u00a7N\u0000"+ - "\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6\u03e7\u0006v\u0010\u0000\u03e7"+ - "\u00f8\u0001\u0000\u0000\u0000\u03e8\u03e9\u0003\u00f5u\u0000\u03e9\u03ea"+ - "\u0001\u0000\u0000\u0000\u03ea\u03eb\u0006w\u0015\u0000\u03eb\u00fa\u0001"+ - "\u0000\u0000\u0000\u03ec\u03ed\u0003/\u0012\u0000\u03ed\u03ee\u0001\u0000"+ - "\u0000\u0000\u03ee\u03ef\u0006x\b\u0000\u03ef\u00fc\u0001\u0000\u0000"+ - "\u0000\u03f0\u03f1\u00031\u0013\u0000\u03f1\u03f2\u0001\u0000\u0000\u0000"+ - "\u03f2\u03f3\u0006y\b\u0000\u03f3\u00fe\u0001\u0000\u0000\u0000\u03f4"+ - "\u03f5\u00033\u0014\u0000\u03f5\u03f6\u0001\u0000\u0000\u0000\u03f6\u03f7"+ - "\u0006z\b\u0000\u03f7\u0100\u0001\u0000\u0000\u0000\u03f8\u03f9\u0003"+ - "?\u001a\u0000\u03f9\u03fa\u0001\u0000\u0000\u0000\u03fa\u03fb\u0006{\u000b"+ - "\u0000\u03fb\u03fc\u0006{\f\u0000\u03fc\u03fd\u0006{\f\u0000\u03fd\u0102"+ - "\u0001\u0000\u0000\u0000\u03fe\u03ff\u0003a+\u0000\u03ff\u0400\u0001\u0000"+ - "\u0000\u0000\u0400\u0401\u0006|\u000f\u0000\u0401\u0104\u0001\u0000\u0000"+ - "\u0000\u0402\u0403\u0003c,\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404"+ - "\u0405\u0006}\u000e\u0000\u0405\u0106\u0001\u0000\u0000\u0000\u0406\u0407"+ - "\u0003g.\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006~"+ - "\u0011\u0000\u0409\u0108\u0001\u0000\u0000\u0000\u040a\u040b\u0003\u00f1"+ - "s\u0000\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006\u007f\u0016"+ - "\u0000\u040d\u010a\u0001\u0000\u0000\u0000\u040e\u040f\u0003\u00d1c\u0000"+ - "\u040f\u0410\u0001\u0000\u0000\u0000\u0410\u0411\u0006\u0080\u0012\u0000"+ - "\u0411\u010c\u0001\u0000\u0000\u0000\u0412\u0413\u0003\u00a7N\u0000\u0413"+ - "\u0414\u0001\u0000\u0000\u0000\u0414\u0415\u0006\u0081\u0010\u0000\u0415"+ - "\u010e\u0001\u0000\u0000\u0000\u0416\u0417\u0003/\u0012\u0000\u0417\u0418"+ - "\u0001\u0000\u0000\u0000\u0418\u0419\u0006\u0082\b\u0000\u0419\u0110\u0001"+ - "\u0000\u0000\u0000\u041a\u041b\u00031\u0013\u0000\u041b\u041c\u0001\u0000"+ - "\u0000\u0000\u041c\u041d\u0006\u0083\b\u0000\u041d\u0112\u0001\u0000\u0000"+ - "\u0000\u041e\u041f\u00033\u0014\u0000\u041f\u0420\u0001\u0000\u0000\u0000"+ - "\u0420\u0421\u0006\u0084\b\u0000\u0421\u0114\u0001\u0000\u0000\u0000\u0422"+ - "\u0423\u0003?\u001a\u0000\u0423\u0424\u0001\u0000\u0000\u0000\u0424\u0425"+ - "\u0006\u0085\u000b\u0000\u0425\u0426\u0006\u0085\f\u0000\u0426\u0116\u0001"+ - "\u0000\u0000\u0000\u0427\u0428\u0003g.\u0000\u0428\u0429\u0001\u0000\u0000"+ - "\u0000\u0429\u042a\u0006\u0086\u0011\u0000\u042a\u0118\u0001\u0000\u0000"+ - "\u0000\u042b\u042c\u0003\u00a7N\u0000\u042c\u042d\u0001\u0000\u0000\u0000"+ - "\u042d\u042e\u0006\u0087\u0010\u0000\u042e\u011a\u0001\u0000\u0000\u0000"+ - "\u042f\u0430\u0003\u00a3L\u0000\u0430\u0431\u0001\u0000\u0000\u0000\u0431"+ - "\u0432\u0006\u0088\u0017\u0000\u0432\u011c\u0001\u0000\u0000\u0000\u0433"+ - "\u0434\u0003/\u0012\u0000\u0434\u0435\u0001\u0000\u0000\u0000\u0435\u0436"+ - "\u0006\u0089\b\u0000\u0436\u011e\u0001\u0000\u0000\u0000\u0437\u0438\u0003"+ - "1\u0013\u0000\u0438\u0439\u0001\u0000\u0000\u0000\u0439\u043a\u0006\u008a"+ - "\b\u0000\u043a\u0120\u0001\u0000\u0000\u0000\u043b\u043c\u00033\u0014"+ - "\u0000\u043c\u043d\u0001\u0000\u0000\u0000\u043d\u043e\u0006\u008b\b\u0000"+ - "\u043e\u0122\u0001\u0000\u0000\u0000\u043f\u0440\u0003?\u001a\u0000\u0440"+ - "\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006\u008c\u000b\u0000\u0442"+ - "\u0443\u0006\u008c\f\u0000\u0443\u0124\u0001\u0000\u0000\u0000\u0444\u0445"+ - "\u0005i\u0000\u0000\u0445\u0446\u0005n\u0000\u0000\u0446\u0447\u0005f"+ - "\u0000\u0000\u0447\u0448\u0005o\u0000\u0000\u0448\u0126\u0001\u0000\u0000"+ - "\u0000\u0449\u044a\u0005f\u0000\u0000\u044a\u044b\u0005u\u0000\u0000\u044b"+ - "\u044c\u0005n\u0000\u0000\u044c\u044d\u0005c\u0000\u0000\u044d\u044e\u0005"+ - "t\u0000\u0000\u044e\u044f\u0005i\u0000\u0000\u044f\u0450\u0005o\u0000"+ - "\u0000\u0450\u0451\u0005n\u0000\u0000\u0451\u0452\u0005s\u0000\u0000\u0452"+ - "\u0128\u0001\u0000\u0000\u0000\u0453\u0454\u0003/\u0012\u0000\u0454\u0455"+ - "\u0001\u0000\u0000\u0000\u0455\u0456\u0006\u008f\b\u0000\u0456\u012a\u0001"+ - "\u0000\u0000\u0000\u0457\u0458\u00031\u0013\u0000\u0458\u0459\u0001\u0000"+ - "\u0000\u0000\u0459\u045a\u0006\u0090\b\u0000\u045a\u012c\u0001\u0000\u0000"+ - "\u0000\u045b\u045c\u00033\u0014\u0000\u045c\u045d\u0001\u0000\u0000\u0000"+ - "\u045d\u045e\u0006\u0091\b\u0000\u045e\u012e\u0001\u0000\u0000\u0000\u045f"+ - "\u0460\u0003\u00a1K\u0000\u0460\u0461\u0001\u0000\u0000\u0000\u0461\u0462"+ - "\u0006\u0092\r\u0000\u0462\u0463\u0006\u0092\f\u0000\u0463\u0130\u0001"+ - "\u0000\u0000\u0000\u0464\u0465\u0005:\u0000\u0000\u0465\u0132\u0001\u0000"+ - "\u0000\u0000\u0466\u046c\u0003K \u0000\u0467\u046c\u0003A\u001b\u0000"+ - "\u0468\u046c\u0003g.\u0000\u0469\u046c\u0003C\u001c\u0000\u046a\u046c"+ - "\u0003Q#\u0000\u046b\u0466\u0001\u0000\u0000\u0000\u046b\u0467\u0001\u0000"+ - "\u0000\u0000\u046b\u0468\u0001\u0000\u0000\u0000\u046b\u0469\u0001\u0000"+ - "\u0000\u0000\u046b\u046a\u0001\u0000\u0000\u0000\u046c\u046d\u0001\u0000"+ - "\u0000\u0000\u046d\u046b\u0001\u0000\u0000\u0000\u046d\u046e\u0001\u0000"+ - "\u0000\u0000\u046e\u0134\u0001\u0000\u0000\u0000\u046f\u0470\u0003/\u0012"+ - "\u0000\u0470\u0471\u0001\u0000\u0000\u0000\u0471\u0472\u0006\u0095\b\u0000"+ - "\u0472\u0136\u0001\u0000\u0000\u0000\u0473\u0474\u00031\u0013\u0000\u0474"+ - "\u0475\u0001\u0000\u0000\u0000\u0475\u0476\u0006\u0096\b\u0000\u0476\u0138"+ - "\u0001\u0000\u0000\u0000\u0477\u0478\u00033\u0014\u0000\u0478\u0479\u0001"+ - "\u0000\u0000\u0000\u0479\u047a\u0006\u0097\b\u0000\u047a\u013a\u0001\u0000"+ - "\u0000\u00009\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u01cd"+ - "\u01d7\u01db\u01de\u01e7\u01e9\u01f4\u021d\u0222\u022b\u0232\u0237\u0239"+ - "\u0244\u024c\u024f\u0251\u0256\u025b\u0261\u0268\u026d\u0273\u0276\u027e"+ - "\u0282\u0304\u0309\u030e\u0310\u0316\u0349\u034e\u0371\u0375\u037a\u037f"+ - "\u0384\u0386\u038a\u038c\u03d9\u03dd\u03e2\u046b\u046d\u0018\u0005\u0002"+ - "\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003"+ - "\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000"+ - "\u0007?\u0000\u0005\u0000\u0000\u0007\u0019\u0000\u0004\u0000\u0000\u0007"+ - "@\u0000\u0007!\u0000\u0007 \u0000\u0007B\u0000\u0007#\u0000\u0007K\u0000"+ - "\u0005\n\u0000\u0005\u0007\u0000\u0007U\u0000\u0007T\u0000\u0007A\u0000"; + "\u0001\u0000\u0000\u0000\u038d\u0399\u0001\u0000\u0000\u0000\u038e\u038c"+ + "\u0001\u0000\u0000\u0000\u038f\u0392\u0003T$\u0000\u0390\u0392\u0003N"+ + "!\u0000\u0391\u038f\u0001\u0000\u0000\u0000\u0391\u0390\u0001\u0000\u0000"+ + "\u0000\u0392\u0394\u0001\u0000\u0000\u0000\u0393\u0395\u0003\u00d0b\u0000"+ + "\u0394\u0393\u0001\u0000\u0000\u0000\u0395\u0396\u0001\u0000\u0000\u0000"+ + "\u0396\u0394\u0001\u0000\u0000\u0000\u0396\u0397\u0001\u0000\u0000\u0000"+ + "\u0397\u0399\u0001\u0000\u0000\u0000\u0398\u0387\u0001\u0000\u0000\u0000"+ + "\u0398\u0391\u0001\u0000\u0000\u0000\u0399\u00d3\u0001\u0000\u0000\u0000"+ + "\u039a\u039d\u0003\u00d2c\u0000\u039b\u039d\u0003\u00a8N\u0000\u039c\u039a"+ + "\u0001\u0000\u0000\u0000\u039c\u039b\u0001\u0000\u0000\u0000\u039d\u039e"+ + "\u0001\u0000\u0000\u0000\u039e\u039c\u0001\u0000\u0000\u0000\u039e\u039f"+ + "\u0001\u0000\u0000\u0000\u039f\u00d5\u0001\u0000\u0000\u0000\u03a0\u03a1"+ + "\u00032\u0013\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03a3\u0006"+ + "e\t\u0000\u03a3\u00d7\u0001\u0000\u0000\u0000\u03a4\u03a5\u00034\u0014"+ + "\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a7\u0006f\t\u0000"+ + "\u03a7\u00d9\u0001\u0000\u0000\u0000\u03a8\u03a9\u00036\u0015\u0000\u03a9"+ + "\u03aa\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006g\t\u0000\u03ab\u00db"+ + "\u0001\u0000\u0000\u0000\u03ac\u03ad\u0003B\u001b\u0000\u03ad\u03ae\u0001"+ + "\u0000\u0000\u0000\u03ae\u03af\u0006h\f\u0000\u03af\u03b0\u0006h\r\u0000"+ + "\u03b0\u00dd\u0001\u0000\u0000\u0000\u03b1\u03b2\u0003d,\u0000\u03b2\u03b3"+ + "\u0001\u0000\u0000\u0000\u03b3\u03b4\u0006i\u0010\u0000\u03b4\u00df\u0001"+ + "\u0000\u0000\u0000\u03b5\u03b6\u0003f-\u0000\u03b6\u03b7\u0001\u0000\u0000"+ + "\u0000\u03b7\u03b8\u0006j\u000f\u0000\u03b8\u00e1\u0001\u0000\u0000\u0000"+ + "\u03b9\u03ba\u0003j/\u0000\u03ba\u03bb\u0001\u0000\u0000\u0000\u03bb\u03bc"+ + "\u0006k\u0012\u0000\u03bc\u00e3\u0001\u0000\u0000\u0000\u03bd\u03be\u0005"+ + "a\u0000\u0000\u03be\u03bf\u0005s\u0000\u0000\u03bf\u00e5\u0001\u0000\u0000"+ + "\u0000\u03c0\u03c1\u0003\u00d4d\u0000\u03c1\u03c2\u0001\u0000\u0000\u0000"+ + "\u03c2\u03c3\u0006m\u0013\u0000\u03c3\u00e7\u0001\u0000\u0000\u0000\u03c4"+ + "\u03c5\u00032\u0013\u0000\u03c5\u03c6\u0001\u0000\u0000\u0000\u03c6\u03c7"+ + "\u0006n\t\u0000\u03c7\u00e9\u0001\u0000\u0000\u0000\u03c8\u03c9\u0003"+ + "4\u0014\u0000\u03c9\u03ca\u0001\u0000\u0000\u0000\u03ca\u03cb\u0006o\t"+ + "\u0000\u03cb\u00eb\u0001\u0000\u0000\u0000\u03cc\u03cd\u00036\u0015\u0000"+ + "\u03cd\u03ce\u0001\u0000\u0000\u0000\u03ce\u03cf\u0006p\t\u0000\u03cf"+ + "\u00ed\u0001\u0000\u0000\u0000\u03d0\u03d1\u0003B\u001b\u0000\u03d1\u03d2"+ + "\u0001\u0000\u0000\u0000\u03d2\u03d3\u0006q\f\u0000\u03d3\u03d4\u0006"+ + "q\r\u0000\u03d4\u00ef\u0001\u0000\u0000\u0000\u03d5\u03d6\u0003\u00a2"+ + "K\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7\u03d8\u0006r\n\u0000"+ + "\u03d8\u03d9\u0006r\u0014\u0000\u03d9\u00f1\u0001\u0000\u0000\u0000\u03da"+ + "\u03db\u0005o\u0000\u0000\u03db\u03dc\u0005n\u0000\u0000\u03dc\u03dd\u0001"+ + "\u0000\u0000\u0000\u03dd\u03de\u0006s\u0015\u0000\u03de\u00f3\u0001\u0000"+ + "\u0000\u0000\u03df\u03e0\u0005w\u0000\u0000\u03e0\u03e1\u0005i\u0000\u0000"+ + "\u03e1\u03e2\u0005t\u0000\u0000\u03e2\u03e3\u0005h\u0000\u0000\u03e3\u03e4"+ + "\u0001\u0000\u0000\u0000\u03e4\u03e5\u0006t\u0015\u0000\u03e5\u00f5\u0001"+ + "\u0000\u0000\u0000\u03e6\u03e7\b\f\u0000\u0000\u03e7\u00f7\u0001\u0000"+ + "\u0000\u0000\u03e8\u03ea\u0003\u00f6u\u0000\u03e9\u03e8\u0001\u0000\u0000"+ + "\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb\u03e9\u0001\u0000\u0000"+ + "\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ed\u0001\u0000\u0000"+ + "\u0000\u03ed\u03ee\u0003\u013c\u0098\u0000\u03ee\u03f0\u0001\u0000\u0000"+ + "\u0000\u03ef\u03e9\u0001\u0000\u0000\u0000\u03ef\u03f0\u0001\u0000\u0000"+ + "\u0000\u03f0\u03f2\u0001\u0000\u0000\u0000\u03f1\u03f3\u0003\u00f6u\u0000"+ + "\u03f2\u03f1\u0001\u0000\u0000\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000"+ + "\u03f4\u03f2\u0001\u0000\u0000\u0000\u03f4\u03f5\u0001\u0000\u0000\u0000"+ + "\u03f5\u00f9\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003\u00aaO\u0000\u03f7"+ + "\u03f8\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006w\u0011\u0000\u03f9\u00fb"+ + "\u0001\u0000\u0000\u0000\u03fa\u03fb\u0003\u00f8v\u0000\u03fb\u03fc\u0001"+ + "\u0000\u0000\u0000\u03fc\u03fd\u0006x\u0016\u0000\u03fd\u00fd\u0001\u0000"+ + "\u0000\u0000\u03fe\u03ff\u00032\u0013\u0000\u03ff\u0400\u0001\u0000\u0000"+ + "\u0000\u0400\u0401\u0006y\t\u0000\u0401\u00ff\u0001\u0000\u0000\u0000"+ + "\u0402\u0403\u00034\u0014\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404"+ + "\u0405\u0006z\t\u0000\u0405\u0101\u0001\u0000\u0000\u0000\u0406\u0407"+ + "\u00036\u0015\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006"+ + "{\t\u0000\u0409\u0103\u0001\u0000\u0000\u0000\u040a\u040b\u0003B\u001b"+ + "\u0000\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006|\f\u0000"+ + "\u040d\u040e\u0006|\r\u0000\u040e\u040f\u0006|\r\u0000\u040f\u0105\u0001"+ + "\u0000\u0000\u0000\u0410\u0411\u0003d,\u0000\u0411\u0412\u0001\u0000\u0000"+ + "\u0000\u0412\u0413\u0006}\u0010\u0000\u0413\u0107\u0001\u0000\u0000\u0000"+ + "\u0414\u0415\u0003f-\u0000\u0415\u0416\u0001\u0000\u0000\u0000\u0416\u0417"+ + "\u0006~\u000f\u0000\u0417\u0109\u0001\u0000\u0000\u0000\u0418\u0419\u0003"+ + "j/\u0000\u0419\u041a\u0001\u0000\u0000\u0000\u041a\u041b\u0006\u007f\u0012"+ + "\u0000\u041b\u010b\u0001\u0000\u0000\u0000\u041c\u041d\u0003\u00f4t\u0000"+ + "\u041d\u041e\u0001\u0000\u0000\u0000\u041e\u041f\u0006\u0080\u0017\u0000"+ + "\u041f\u010d\u0001\u0000\u0000\u0000\u0420\u0421\u0003\u00d4d\u0000\u0421"+ + "\u0422\u0001\u0000\u0000\u0000\u0422\u0423\u0006\u0081\u0013\u0000\u0423"+ + "\u010f\u0001\u0000\u0000\u0000\u0424\u0425\u0003\u00aaO\u0000\u0425\u0426"+ + "\u0001\u0000\u0000\u0000\u0426\u0427\u0006\u0082\u0011\u0000\u0427\u0111"+ + "\u0001\u0000\u0000\u0000\u0428\u0429\u00032\u0013\u0000\u0429\u042a\u0001"+ + "\u0000\u0000\u0000\u042a\u042b\u0006\u0083\t\u0000\u042b\u0113\u0001\u0000"+ + "\u0000\u0000\u042c\u042d\u00034\u0014\u0000\u042d\u042e\u0001\u0000\u0000"+ + "\u0000\u042e\u042f\u0006\u0084\t\u0000\u042f\u0115\u0001\u0000\u0000\u0000"+ + "\u0430\u0431\u00036\u0015\u0000\u0431\u0432\u0001\u0000\u0000\u0000\u0432"+ + "\u0433\u0006\u0085\t\u0000\u0433\u0117\u0001\u0000\u0000\u0000\u0434\u0435"+ + "\u0003B\u001b\u0000\u0435\u0436\u0001\u0000\u0000\u0000\u0436\u0437\u0006"+ + "\u0086\f\u0000\u0437\u0438\u0006\u0086\r\u0000\u0438\u0119\u0001\u0000"+ + "\u0000\u0000\u0439\u043a\u0003j/\u0000\u043a\u043b\u0001\u0000\u0000\u0000"+ + "\u043b\u043c\u0006\u0087\u0012\u0000\u043c\u011b\u0001\u0000\u0000\u0000"+ + "\u043d\u043e\u0003\u00aaO\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f"+ + "\u0440\u0006\u0088\u0011\u0000\u0440\u011d\u0001\u0000\u0000\u0000\u0441"+ + "\u0442\u0003\u00a6M\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443\u0444"+ + "\u0006\u0089\u0018\u0000\u0444\u011f\u0001\u0000\u0000\u0000\u0445\u0446"+ + "\u00032\u0013\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ + "\u008a\t\u0000\u0448\u0121\u0001\u0000\u0000\u0000\u0449\u044a\u00034"+ + "\u0014\u0000\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u008b"+ + "\t\u0000\u044c\u0123\u0001\u0000\u0000\u0000\u044d\u044e\u00036\u0015"+ + "\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f\u0450\u0006\u008c\t\u0000"+ + "\u0450\u0125\u0001\u0000\u0000\u0000\u0451\u0452\u0003B\u001b\u0000\u0452"+ + "\u0453\u0001\u0000\u0000\u0000\u0453\u0454\u0006\u008d\f\u0000\u0454\u0455"+ + "\u0006\u008d\r\u0000\u0455\u0127\u0001\u0000\u0000\u0000\u0456\u0457\u0005"+ + "i\u0000\u0000\u0457\u0458\u0005n\u0000\u0000\u0458\u0459\u0005f\u0000"+ + "\u0000\u0459\u045a\u0005o\u0000\u0000\u045a\u0129\u0001\u0000\u0000\u0000"+ + "\u045b\u045c\u00032\u0013\u0000\u045c\u045d\u0001\u0000\u0000\u0000\u045d"+ + "\u045e\u0006\u008f\t\u0000\u045e\u012b\u0001\u0000\u0000\u0000\u045f\u0460"+ + "\u00034\u0014\u0000\u0460\u0461\u0001\u0000\u0000\u0000\u0461\u0462\u0006"+ + "\u0090\t\u0000\u0462\u012d\u0001\u0000\u0000\u0000\u0463\u0464\u00036"+ + "\u0015\u0000\u0464\u0465\u0001\u0000\u0000\u0000\u0465\u0466\u0006\u0091"+ + "\t\u0000\u0466\u012f\u0001\u0000\u0000\u0000\u0467\u0468\u0003B\u001b"+ + "\u0000\u0468\u0469\u0001\u0000\u0000\u0000\u0469\u046a\u0006\u0092\f\u0000"+ + "\u046a\u046b\u0006\u0092\r\u0000\u046b\u0131\u0001\u0000\u0000\u0000\u046c"+ + "\u046d\u0005f\u0000\u0000\u046d\u046e\u0005u\u0000\u0000\u046e\u046f\u0005"+ + "n\u0000\u0000\u046f\u0470\u0005c\u0000\u0000\u0470\u0471\u0005t\u0000"+ + "\u0000\u0471\u0472\u0005i\u0000\u0000\u0472\u0473\u0005o\u0000\u0000\u0473"+ + "\u0474\u0005n\u0000\u0000\u0474\u0475\u0005s\u0000\u0000\u0475\u0133\u0001"+ + "\u0000\u0000\u0000\u0476\u0477\u00032\u0013\u0000\u0477\u0478\u0001\u0000"+ + "\u0000\u0000\u0478\u0479\u0006\u0094\t\u0000\u0479\u0135\u0001\u0000\u0000"+ + "\u0000\u047a\u047b\u00034\u0014\u0000\u047b\u047c\u0001\u0000\u0000\u0000"+ + "\u047c\u047d\u0006\u0095\t\u0000\u047d\u0137\u0001\u0000\u0000\u0000\u047e"+ + "\u047f\u00036\u0015\u0000\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481"+ + "\u0006\u0096\t\u0000\u0481\u0139\u0001\u0000\u0000\u0000\u0482\u0483\u0003"+ + "\u00a4L\u0000\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0485\u0006\u0097"+ + "\u000e\u0000\u0485\u0486\u0006\u0097\r\u0000\u0486\u013b\u0001\u0000\u0000"+ + "\u0000\u0487\u0488\u0005:\u0000\u0000\u0488\u013d\u0001\u0000\u0000\u0000"+ + "\u0489\u048f\u0003N!\u0000\u048a\u048f\u0003D\u001c\u0000\u048b\u048f"+ + "\u0003j/\u0000\u048c\u048f\u0003F\u001d\u0000\u048d\u048f\u0003T$\u0000"+ + "\u048e\u0489\u0001\u0000\u0000\u0000\u048e\u048a\u0001\u0000\u0000\u0000"+ + "\u048e\u048b\u0001\u0000\u0000\u0000\u048e\u048c\u0001\u0000\u0000\u0000"+ + "\u048e\u048d\u0001\u0000\u0000\u0000\u048f\u0490\u0001\u0000\u0000\u0000"+ + "\u0490\u048e\u0001\u0000\u0000\u0000\u0490\u0491\u0001\u0000\u0000\u0000"+ + "\u0491\u013f\u0001\u0000\u0000\u0000\u0492\u0493\u00032\u0013\u0000\u0493"+ + "\u0494\u0001\u0000\u0000\u0000\u0494\u0495\u0006\u009a\t\u0000\u0495\u0141"+ + "\u0001\u0000\u0000\u0000\u0496\u0497\u00034\u0014\u0000\u0497\u0498\u0001"+ + "\u0000\u0000\u0000\u0498\u0499\u0006\u009b\t\u0000\u0499\u0143\u0001\u0000"+ + "\u0000\u0000\u049a\u049b\u00036\u0015\u0000\u049b\u049c\u0001\u0000\u0000"+ + "\u0000\u049c\u049d\u0006\u009c\t\u0000\u049d\u0145\u0001\u0000\u0000\u0000"+ + ":\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01df\u01e9"+ + "\u01ed\u01f0\u01f9\u01fb\u0206\u022f\u0234\u023d\u0244\u0249\u024b\u0256"+ + "\u025e\u0261\u0263\u0268\u026d\u0273\u027a\u027f\u0285\u0288\u0290\u0294"+ + "\u0316\u031b\u0320\u0322\u0328\u035b\u0360\u0383\u0387\u038c\u0391\u0396"+ + "\u0398\u039c\u039e\u03eb\u03ef\u03f4\u048e\u0490\u0019\u0005\u0002\u0000"+ + "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000"+ + "\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001"+ + "\u0000\u0007@\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000"+ + "\u0007A\u0000\u0007\"\u0000\u0007!\u0000\u0007C\u0000\u0007$\u0000\u0007"+ + "L\u0000\u0005\u000b\u0000\u0005\u0007\u0000\u0007V\u0000\u0007U\u0000"+ + "\u0007B\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index a75449a305d3f..b8c5f609e75e5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -10,6 +10,7 @@ null 'inlinestats' 'keep' 'limit' +'meta' 'mv_expand' 'rename' 'row' @@ -95,6 +96,9 @@ null null null 'info' +null +null +null 'functions' null null @@ -117,6 +121,7 @@ GROK INLINESTATS KEEP LIMIT +META MV_EXPAND RENAME ROW @@ -202,10 +207,13 @@ MVEXPAND_LINE_COMMENT MVEXPAND_MULTILINE_COMMENT MVEXPAND_WS INFO -FUNCTIONS SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS +FUNCTIONS +META_LINE_COMMENT +META_MULTILINE_COMMENT +META_WS COLON SETTING SETTING_LINE_COMMENT @@ -261,9 +269,10 @@ comparisonOperator explainCommand subqueryExpression showCommand +metaCommand enrichCommand enrichWithClause atn: -[4, 1, 104, 507, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 110, 8, 1, 10, 1, 12, 1, 113, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 119, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 134, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 146, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 153, 8, 5, 10, 5, 12, 5, 156, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 163, 8, 5, 1, 5, 1, 5, 3, 5, 167, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 175, 8, 5, 10, 5, 12, 5, 178, 9, 5, 1, 6, 1, 6, 3, 6, 182, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 189, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 194, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 201, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 207, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 215, 8, 8, 10, 8, 12, 8, 218, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 227, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 235, 8, 10, 10, 10, 12, 10, 238, 9, 10, 3, 10, 240, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 250, 8, 12, 10, 12, 12, 12, 253, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 260, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 266, 8, 14, 10, 14, 12, 14, 269, 9, 14, 1, 14, 3, 14, 272, 8, 14, 1, 15, 1, 15, 3, 15, 276, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 282, 8, 16, 10, 16, 12, 16, 285, 9, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 296, 8, 19, 1, 19, 1, 19, 3, 19, 300, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 3, 20, 306, 8, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 5, 22, 313, 8, 22, 10, 22, 12, 22, 316, 9, 22, 1, 23, 1, 23, 1, 23, 5, 23, 321, 8, 23, 10, 23, 12, 23, 324, 9, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 343, 8, 26, 10, 26, 12, 26, 346, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 354, 8, 26, 10, 26, 12, 26, 357, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 365, 8, 26, 10, 26, 12, 26, 368, 9, 26, 1, 26, 1, 26, 3, 26, 372, 8, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 381, 8, 28, 10, 28, 12, 28, 384, 9, 28, 1, 29, 1, 29, 3, 29, 388, 8, 29, 1, 29, 1, 29, 3, 29, 392, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 398, 8, 30, 10, 30, 12, 30, 401, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 407, 8, 31, 10, 31, 12, 31, 410, 9, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 416, 8, 32, 10, 32, 12, 32, 419, 9, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 429, 8, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 5, 37, 441, 8, 37, 10, 37, 12, 37, 444, 9, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 40, 1, 40, 3, 40, 454, 8, 40, 1, 41, 3, 41, 457, 8, 41, 1, 41, 1, 41, 1, 42, 3, 42, 462, 8, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 481, 8, 47, 1, 48, 1, 48, 1, 48, 1, 48, 3, 48, 487, 8, 48, 1, 48, 1, 48, 1, 48, 1, 48, 5, 48, 493, 8, 48, 10, 48, 12, 48, 496, 9, 48, 3, 48, 498, 8, 48, 1, 49, 1, 49, 1, 49, 3, 49, 503, 8, 49, 1, 49, 1, 49, 1, 49, 0, 3, 2, 10, 16, 50, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 66, 66, 71, 71, 1, 0, 65, 66, 2, 0, 31, 31, 34, 34, 1, 0, 37, 38, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 533, 0, 100, 1, 0, 0, 0, 2, 103, 1, 0, 0, 0, 4, 118, 1, 0, 0, 0, 6, 133, 1, 0, 0, 0, 8, 135, 1, 0, 0, 0, 10, 166, 1, 0, 0, 0, 12, 193, 1, 0, 0, 0, 14, 200, 1, 0, 0, 0, 16, 206, 1, 0, 0, 0, 18, 226, 1, 0, 0, 0, 20, 228, 1, 0, 0, 0, 22, 243, 1, 0, 0, 0, 24, 246, 1, 0, 0, 0, 26, 259, 1, 0, 0, 0, 28, 261, 1, 0, 0, 0, 30, 275, 1, 0, 0, 0, 32, 277, 1, 0, 0, 0, 34, 286, 1, 0, 0, 0, 36, 290, 1, 0, 0, 0, 38, 293, 1, 0, 0, 0, 40, 301, 1, 0, 0, 0, 42, 307, 1, 0, 0, 0, 44, 309, 1, 0, 0, 0, 46, 317, 1, 0, 0, 0, 48, 325, 1, 0, 0, 0, 50, 327, 1, 0, 0, 0, 52, 371, 1, 0, 0, 0, 54, 373, 1, 0, 0, 0, 56, 376, 1, 0, 0, 0, 58, 385, 1, 0, 0, 0, 60, 393, 1, 0, 0, 0, 62, 402, 1, 0, 0, 0, 64, 411, 1, 0, 0, 0, 66, 420, 1, 0, 0, 0, 68, 424, 1, 0, 0, 0, 70, 430, 1, 0, 0, 0, 72, 434, 1, 0, 0, 0, 74, 437, 1, 0, 0, 0, 76, 445, 1, 0, 0, 0, 78, 449, 1, 0, 0, 0, 80, 453, 1, 0, 0, 0, 82, 456, 1, 0, 0, 0, 84, 461, 1, 0, 0, 0, 86, 465, 1, 0, 0, 0, 88, 467, 1, 0, 0, 0, 90, 469, 1, 0, 0, 0, 92, 472, 1, 0, 0, 0, 94, 480, 1, 0, 0, 0, 96, 482, 1, 0, 0, 0, 98, 502, 1, 0, 0, 0, 100, 101, 3, 2, 1, 0, 101, 102, 5, 0, 0, 1, 102, 1, 1, 0, 0, 0, 103, 104, 6, 1, -1, 0, 104, 105, 3, 4, 2, 0, 105, 111, 1, 0, 0, 0, 106, 107, 10, 1, 0, 0, 107, 108, 5, 25, 0, 0, 108, 110, 3, 6, 3, 0, 109, 106, 1, 0, 0, 0, 110, 113, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 111, 112, 1, 0, 0, 0, 112, 3, 1, 0, 0, 0, 113, 111, 1, 0, 0, 0, 114, 119, 3, 90, 45, 0, 115, 119, 3, 28, 14, 0, 116, 119, 3, 22, 11, 0, 117, 119, 3, 94, 47, 0, 118, 114, 1, 0, 0, 0, 118, 115, 1, 0, 0, 0, 118, 116, 1, 0, 0, 0, 118, 117, 1, 0, 0, 0, 119, 5, 1, 0, 0, 0, 120, 134, 3, 36, 18, 0, 121, 134, 3, 40, 20, 0, 122, 134, 3, 54, 27, 0, 123, 134, 3, 60, 30, 0, 124, 134, 3, 56, 28, 0, 125, 134, 3, 38, 19, 0, 126, 134, 3, 8, 4, 0, 127, 134, 3, 62, 31, 0, 128, 134, 3, 64, 32, 0, 129, 134, 3, 68, 34, 0, 130, 134, 3, 70, 35, 0, 131, 134, 3, 96, 48, 0, 132, 134, 3, 72, 36, 0, 133, 120, 1, 0, 0, 0, 133, 121, 1, 0, 0, 0, 133, 122, 1, 0, 0, 0, 133, 123, 1, 0, 0, 0, 133, 124, 1, 0, 0, 0, 133, 125, 1, 0, 0, 0, 133, 126, 1, 0, 0, 0, 133, 127, 1, 0, 0, 0, 133, 128, 1, 0, 0, 0, 133, 129, 1, 0, 0, 0, 133, 130, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 133, 132, 1, 0, 0, 0, 134, 7, 1, 0, 0, 0, 135, 136, 5, 17, 0, 0, 136, 137, 3, 10, 5, 0, 137, 9, 1, 0, 0, 0, 138, 139, 6, 5, -1, 0, 139, 140, 5, 43, 0, 0, 140, 167, 3, 10, 5, 7, 141, 167, 3, 14, 7, 0, 142, 167, 3, 12, 6, 0, 143, 145, 3, 14, 7, 0, 144, 146, 5, 43, 0, 0, 145, 144, 1, 0, 0, 0, 145, 146, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 148, 5, 40, 0, 0, 148, 149, 5, 39, 0, 0, 149, 154, 3, 14, 7, 0, 150, 151, 5, 33, 0, 0, 151, 153, 3, 14, 7, 0, 152, 150, 1, 0, 0, 0, 153, 156, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 157, 1, 0, 0, 0, 156, 154, 1, 0, 0, 0, 157, 158, 5, 49, 0, 0, 158, 167, 1, 0, 0, 0, 159, 160, 3, 14, 7, 0, 160, 162, 5, 41, 0, 0, 161, 163, 5, 43, 0, 0, 162, 161, 1, 0, 0, 0, 162, 163, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 165, 5, 44, 0, 0, 165, 167, 1, 0, 0, 0, 166, 138, 1, 0, 0, 0, 166, 141, 1, 0, 0, 0, 166, 142, 1, 0, 0, 0, 166, 143, 1, 0, 0, 0, 166, 159, 1, 0, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 10, 4, 0, 0, 169, 170, 5, 30, 0, 0, 170, 175, 3, 10, 5, 5, 171, 172, 10, 3, 0, 0, 172, 173, 5, 46, 0, 0, 173, 175, 3, 10, 5, 4, 174, 168, 1, 0, 0, 0, 174, 171, 1, 0, 0, 0, 175, 178, 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 11, 1, 0, 0, 0, 178, 176, 1, 0, 0, 0, 179, 181, 3, 14, 7, 0, 180, 182, 5, 43, 0, 0, 181, 180, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 184, 5, 42, 0, 0, 184, 185, 3, 86, 43, 0, 185, 194, 1, 0, 0, 0, 186, 188, 3, 14, 7, 0, 187, 189, 5, 43, 0, 0, 188, 187, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 191, 5, 48, 0, 0, 191, 192, 3, 86, 43, 0, 192, 194, 1, 0, 0, 0, 193, 179, 1, 0, 0, 0, 193, 186, 1, 0, 0, 0, 194, 13, 1, 0, 0, 0, 195, 201, 3, 16, 8, 0, 196, 197, 3, 16, 8, 0, 197, 198, 3, 88, 44, 0, 198, 199, 3, 16, 8, 0, 199, 201, 1, 0, 0, 0, 200, 195, 1, 0, 0, 0, 200, 196, 1, 0, 0, 0, 201, 15, 1, 0, 0, 0, 202, 203, 6, 8, -1, 0, 203, 207, 3, 18, 9, 0, 204, 205, 7, 0, 0, 0, 205, 207, 3, 16, 8, 3, 206, 202, 1, 0, 0, 0, 206, 204, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 2, 0, 0, 209, 210, 7, 1, 0, 0, 210, 215, 3, 16, 8, 3, 211, 212, 10, 1, 0, 0, 212, 213, 7, 0, 0, 0, 213, 215, 3, 16, 8, 2, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 17, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 227, 3, 52, 26, 0, 220, 227, 3, 44, 22, 0, 221, 227, 3, 20, 10, 0, 222, 223, 5, 39, 0, 0, 223, 224, 3, 10, 5, 0, 224, 225, 5, 49, 0, 0, 225, 227, 1, 0, 0, 0, 226, 219, 1, 0, 0, 0, 226, 220, 1, 0, 0, 0, 226, 221, 1, 0, 0, 0, 226, 222, 1, 0, 0, 0, 227, 19, 1, 0, 0, 0, 228, 229, 3, 48, 24, 0, 229, 239, 5, 39, 0, 0, 230, 240, 5, 60, 0, 0, 231, 236, 3, 10, 5, 0, 232, 233, 5, 33, 0, 0, 233, 235, 3, 10, 5, 0, 234, 232, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, 236, 1, 0, 0, 0, 239, 230, 1, 0, 0, 0, 239, 231, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 1, 0, 0, 0, 241, 242, 5, 49, 0, 0, 242, 21, 1, 0, 0, 0, 243, 244, 5, 13, 0, 0, 244, 245, 3, 24, 12, 0, 245, 23, 1, 0, 0, 0, 246, 251, 3, 26, 13, 0, 247, 248, 5, 33, 0, 0, 248, 250, 3, 26, 13, 0, 249, 247, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 25, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 260, 3, 10, 5, 0, 255, 256, 3, 44, 22, 0, 256, 257, 5, 32, 0, 0, 257, 258, 3, 10, 5, 0, 258, 260, 1, 0, 0, 0, 259, 254, 1, 0, 0, 0, 259, 255, 1, 0, 0, 0, 260, 27, 1, 0, 0, 0, 261, 262, 5, 6, 0, 0, 262, 267, 3, 42, 21, 0, 263, 264, 5, 33, 0, 0, 264, 266, 3, 42, 21, 0, 265, 263, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 270, 272, 3, 30, 15, 0, 271, 270, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 29, 1, 0, 0, 0, 273, 276, 3, 32, 16, 0, 274, 276, 3, 34, 17, 0, 275, 273, 1, 0, 0, 0, 275, 274, 1, 0, 0, 0, 276, 31, 1, 0, 0, 0, 277, 278, 5, 70, 0, 0, 278, 283, 3, 42, 21, 0, 279, 280, 5, 33, 0, 0, 280, 282, 3, 42, 21, 0, 281, 279, 1, 0, 0, 0, 282, 285, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 33, 1, 0, 0, 0, 285, 283, 1, 0, 0, 0, 286, 287, 5, 63, 0, 0, 287, 288, 3, 32, 16, 0, 288, 289, 5, 64, 0, 0, 289, 35, 1, 0, 0, 0, 290, 291, 5, 4, 0, 0, 291, 292, 3, 24, 12, 0, 292, 37, 1, 0, 0, 0, 293, 295, 5, 16, 0, 0, 294, 296, 3, 24, 12, 0, 295, 294, 1, 0, 0, 0, 295, 296, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 298, 5, 29, 0, 0, 298, 300, 3, 24, 12, 0, 299, 297, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 39, 1, 0, 0, 0, 301, 302, 5, 8, 0, 0, 302, 305, 3, 24, 12, 0, 303, 304, 5, 29, 0, 0, 304, 306, 3, 24, 12, 0, 305, 303, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 41, 1, 0, 0, 0, 307, 308, 7, 2, 0, 0, 308, 43, 1, 0, 0, 0, 309, 314, 3, 48, 24, 0, 310, 311, 5, 35, 0, 0, 311, 313, 3, 48, 24, 0, 312, 310, 1, 0, 0, 0, 313, 316, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 45, 1, 0, 0, 0, 316, 314, 1, 0, 0, 0, 317, 322, 3, 50, 25, 0, 318, 319, 5, 35, 0, 0, 319, 321, 3, 50, 25, 0, 320, 318, 1, 0, 0, 0, 321, 324, 1, 0, 0, 0, 322, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 47, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 325, 326, 7, 3, 0, 0, 326, 49, 1, 0, 0, 0, 327, 328, 5, 75, 0, 0, 328, 51, 1, 0, 0, 0, 329, 372, 5, 44, 0, 0, 330, 331, 3, 84, 42, 0, 331, 332, 5, 65, 0, 0, 332, 372, 1, 0, 0, 0, 333, 372, 3, 82, 41, 0, 334, 372, 3, 84, 42, 0, 335, 372, 3, 78, 39, 0, 336, 372, 5, 47, 0, 0, 337, 372, 3, 86, 43, 0, 338, 339, 5, 63, 0, 0, 339, 344, 3, 80, 40, 0, 340, 341, 5, 33, 0, 0, 341, 343, 3, 80, 40, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 347, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 348, 5, 64, 0, 0, 348, 372, 1, 0, 0, 0, 349, 350, 5, 63, 0, 0, 350, 355, 3, 78, 39, 0, 351, 352, 5, 33, 0, 0, 352, 354, 3, 78, 39, 0, 353, 351, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, 356, 358, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 358, 359, 5, 64, 0, 0, 359, 372, 1, 0, 0, 0, 360, 361, 5, 63, 0, 0, 361, 366, 3, 86, 43, 0, 362, 363, 5, 33, 0, 0, 363, 365, 3, 86, 43, 0, 364, 362, 1, 0, 0, 0, 365, 368, 1, 0, 0, 0, 366, 364, 1, 0, 0, 0, 366, 367, 1, 0, 0, 0, 367, 369, 1, 0, 0, 0, 368, 366, 1, 0, 0, 0, 369, 370, 5, 64, 0, 0, 370, 372, 1, 0, 0, 0, 371, 329, 1, 0, 0, 0, 371, 330, 1, 0, 0, 0, 371, 333, 1, 0, 0, 0, 371, 334, 1, 0, 0, 0, 371, 335, 1, 0, 0, 0, 371, 336, 1, 0, 0, 0, 371, 337, 1, 0, 0, 0, 371, 338, 1, 0, 0, 0, 371, 349, 1, 0, 0, 0, 371, 360, 1, 0, 0, 0, 372, 53, 1, 0, 0, 0, 373, 374, 5, 10, 0, 0, 374, 375, 5, 27, 0, 0, 375, 55, 1, 0, 0, 0, 376, 377, 5, 15, 0, 0, 377, 382, 3, 58, 29, 0, 378, 379, 5, 33, 0, 0, 379, 381, 3, 58, 29, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 57, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 387, 3, 10, 5, 0, 386, 388, 7, 4, 0, 0, 387, 386, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 391, 1, 0, 0, 0, 389, 390, 5, 45, 0, 0, 390, 392, 7, 5, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 59, 1, 0, 0, 0, 393, 394, 5, 9, 0, 0, 394, 399, 3, 46, 23, 0, 395, 396, 5, 33, 0, 0, 396, 398, 3, 46, 23, 0, 397, 395, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 397, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 400, 61, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402, 403, 5, 2, 0, 0, 403, 408, 3, 46, 23, 0, 404, 405, 5, 33, 0, 0, 405, 407, 3, 46, 23, 0, 406, 404, 1, 0, 0, 0, 407, 410, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 408, 409, 1, 0, 0, 0, 409, 63, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 411, 412, 5, 12, 0, 0, 412, 417, 3, 66, 33, 0, 413, 414, 5, 33, 0, 0, 414, 416, 3, 66, 33, 0, 415, 413, 1, 0, 0, 0, 416, 419, 1, 0, 0, 0, 417, 415, 1, 0, 0, 0, 417, 418, 1, 0, 0, 0, 418, 65, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 420, 421, 3, 46, 23, 0, 421, 422, 5, 79, 0, 0, 422, 423, 3, 46, 23, 0, 423, 67, 1, 0, 0, 0, 424, 425, 5, 1, 0, 0, 425, 426, 3, 18, 9, 0, 426, 428, 3, 86, 43, 0, 427, 429, 3, 74, 37, 0, 428, 427, 1, 0, 0, 0, 428, 429, 1, 0, 0, 0, 429, 69, 1, 0, 0, 0, 430, 431, 5, 7, 0, 0, 431, 432, 3, 18, 9, 0, 432, 433, 3, 86, 43, 0, 433, 71, 1, 0, 0, 0, 434, 435, 5, 11, 0, 0, 435, 436, 3, 44, 22, 0, 436, 73, 1, 0, 0, 0, 437, 442, 3, 76, 38, 0, 438, 439, 5, 33, 0, 0, 439, 441, 3, 76, 38, 0, 440, 438, 1, 0, 0, 0, 441, 444, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 75, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 445, 446, 3, 48, 24, 0, 446, 447, 5, 32, 0, 0, 447, 448, 3, 52, 26, 0, 448, 77, 1, 0, 0, 0, 449, 450, 7, 6, 0, 0, 450, 79, 1, 0, 0, 0, 451, 454, 3, 82, 41, 0, 452, 454, 3, 84, 42, 0, 453, 451, 1, 0, 0, 0, 453, 452, 1, 0, 0, 0, 454, 81, 1, 0, 0, 0, 455, 457, 7, 0, 0, 0, 456, 455, 1, 0, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 1, 0, 0, 0, 458, 459, 5, 28, 0, 0, 459, 83, 1, 0, 0, 0, 460, 462, 7, 0, 0, 0, 461, 460, 1, 0, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 1, 0, 0, 0, 463, 464, 5, 27, 0, 0, 464, 85, 1, 0, 0, 0, 465, 466, 5, 26, 0, 0, 466, 87, 1, 0, 0, 0, 467, 468, 7, 7, 0, 0, 468, 89, 1, 0, 0, 0, 469, 470, 5, 5, 0, 0, 470, 471, 3, 92, 46, 0, 471, 91, 1, 0, 0, 0, 472, 473, 5, 63, 0, 0, 473, 474, 3, 2, 1, 0, 474, 475, 5, 64, 0, 0, 475, 93, 1, 0, 0, 0, 476, 477, 5, 14, 0, 0, 477, 481, 5, 95, 0, 0, 478, 479, 5, 14, 0, 0, 479, 481, 5, 96, 0, 0, 480, 476, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 95, 1, 0, 0, 0, 482, 483, 5, 3, 0, 0, 483, 486, 5, 85, 0, 0, 484, 485, 5, 83, 0, 0, 485, 487, 3, 46, 23, 0, 486, 484, 1, 0, 0, 0, 486, 487, 1, 0, 0, 0, 487, 497, 1, 0, 0, 0, 488, 489, 5, 84, 0, 0, 489, 494, 3, 98, 49, 0, 490, 491, 5, 33, 0, 0, 491, 493, 3, 98, 49, 0, 492, 490, 1, 0, 0, 0, 493, 496, 1, 0, 0, 0, 494, 492, 1, 0, 0, 0, 494, 495, 1, 0, 0, 0, 495, 498, 1, 0, 0, 0, 496, 494, 1, 0, 0, 0, 497, 488, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 97, 1, 0, 0, 0, 499, 500, 3, 46, 23, 0, 500, 501, 5, 32, 0, 0, 501, 503, 1, 0, 0, 0, 502, 499, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 3, 46, 23, 0, 505, 99, 1, 0, 0, 0, 50, 111, 118, 133, 145, 154, 162, 166, 174, 176, 181, 188, 193, 200, 206, 214, 216, 226, 236, 239, 251, 259, 267, 271, 275, 283, 295, 299, 305, 314, 322, 344, 355, 366, 371, 382, 387, 391, 399, 408, 417, 428, 442, 453, 456, 461, 480, 486, 494, 497, 502] \ No newline at end of file +[4, 1, 108, 510, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 112, 8, 1, 10, 1, 12, 1, 115, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 122, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 137, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 149, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 156, 8, 5, 10, 5, 12, 5, 159, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 166, 8, 5, 1, 5, 1, 5, 3, 5, 170, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 178, 8, 5, 10, 5, 12, 5, 181, 9, 5, 1, 6, 1, 6, 3, 6, 185, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 192, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 197, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 204, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 210, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 218, 8, 8, 10, 8, 12, 8, 221, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 230, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 238, 8, 10, 10, 10, 12, 10, 241, 9, 10, 3, 10, 243, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 253, 8, 12, 10, 12, 12, 12, 256, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 263, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 269, 8, 14, 10, 14, 12, 14, 272, 9, 14, 1, 14, 3, 14, 275, 8, 14, 1, 15, 1, 15, 3, 15, 279, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 285, 8, 16, 10, 16, 12, 16, 288, 9, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 299, 8, 19, 1, 19, 1, 19, 3, 19, 303, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 3, 20, 309, 8, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 5, 22, 316, 8, 22, 10, 22, 12, 22, 319, 9, 22, 1, 23, 1, 23, 1, 23, 5, 23, 324, 8, 23, 10, 23, 12, 23, 327, 9, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 346, 8, 26, 10, 26, 12, 26, 349, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 368, 8, 26, 10, 26, 12, 26, 371, 9, 26, 1, 26, 1, 26, 3, 26, 375, 8, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 384, 8, 28, 10, 28, 12, 28, 387, 9, 28, 1, 29, 1, 29, 3, 29, 391, 8, 29, 1, 29, 1, 29, 3, 29, 395, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 401, 8, 30, 10, 30, 12, 30, 404, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 410, 8, 31, 10, 31, 12, 31, 413, 9, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 419, 8, 32, 10, 32, 12, 32, 422, 9, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 432, 8, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 5, 37, 444, 8, 37, 10, 37, 12, 37, 447, 9, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 40, 1, 40, 3, 40, 457, 8, 40, 1, 41, 3, 41, 460, 8, 41, 1, 41, 1, 41, 1, 42, 3, 42, 465, 8, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 3, 49, 490, 8, 49, 1, 49, 1, 49, 1, 49, 1, 49, 5, 49, 496, 8, 49, 10, 49, 12, 49, 499, 9, 49, 3, 49, 501, 8, 49, 1, 50, 1, 50, 1, 50, 3, 50, 506, 8, 50, 1, 50, 1, 50, 1, 50, 0, 3, 2, 10, 16, 51, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 67, 67, 72, 72, 1, 0, 66, 67, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 535, 0, 102, 1, 0, 0, 0, 2, 105, 1, 0, 0, 0, 4, 121, 1, 0, 0, 0, 6, 136, 1, 0, 0, 0, 8, 138, 1, 0, 0, 0, 10, 169, 1, 0, 0, 0, 12, 196, 1, 0, 0, 0, 14, 203, 1, 0, 0, 0, 16, 209, 1, 0, 0, 0, 18, 229, 1, 0, 0, 0, 20, 231, 1, 0, 0, 0, 22, 246, 1, 0, 0, 0, 24, 249, 1, 0, 0, 0, 26, 262, 1, 0, 0, 0, 28, 264, 1, 0, 0, 0, 30, 278, 1, 0, 0, 0, 32, 280, 1, 0, 0, 0, 34, 289, 1, 0, 0, 0, 36, 293, 1, 0, 0, 0, 38, 296, 1, 0, 0, 0, 40, 304, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 312, 1, 0, 0, 0, 46, 320, 1, 0, 0, 0, 48, 328, 1, 0, 0, 0, 50, 330, 1, 0, 0, 0, 52, 374, 1, 0, 0, 0, 54, 376, 1, 0, 0, 0, 56, 379, 1, 0, 0, 0, 58, 388, 1, 0, 0, 0, 60, 396, 1, 0, 0, 0, 62, 405, 1, 0, 0, 0, 64, 414, 1, 0, 0, 0, 66, 423, 1, 0, 0, 0, 68, 427, 1, 0, 0, 0, 70, 433, 1, 0, 0, 0, 72, 437, 1, 0, 0, 0, 74, 440, 1, 0, 0, 0, 76, 448, 1, 0, 0, 0, 78, 452, 1, 0, 0, 0, 80, 456, 1, 0, 0, 0, 82, 459, 1, 0, 0, 0, 84, 464, 1, 0, 0, 0, 86, 468, 1, 0, 0, 0, 88, 470, 1, 0, 0, 0, 90, 472, 1, 0, 0, 0, 92, 475, 1, 0, 0, 0, 94, 479, 1, 0, 0, 0, 96, 482, 1, 0, 0, 0, 98, 485, 1, 0, 0, 0, 100, 505, 1, 0, 0, 0, 102, 103, 3, 2, 1, 0, 103, 104, 5, 0, 0, 1, 104, 1, 1, 0, 0, 0, 105, 106, 6, 1, -1, 0, 106, 107, 3, 4, 2, 0, 107, 113, 1, 0, 0, 0, 108, 109, 10, 1, 0, 0, 109, 110, 5, 26, 0, 0, 110, 112, 3, 6, 3, 0, 111, 108, 1, 0, 0, 0, 112, 115, 1, 0, 0, 0, 113, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 3, 1, 0, 0, 0, 115, 113, 1, 0, 0, 0, 116, 122, 3, 90, 45, 0, 117, 122, 3, 28, 14, 0, 118, 122, 3, 22, 11, 0, 119, 122, 3, 94, 47, 0, 120, 122, 3, 96, 48, 0, 121, 116, 1, 0, 0, 0, 121, 117, 1, 0, 0, 0, 121, 118, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 121, 120, 1, 0, 0, 0, 122, 5, 1, 0, 0, 0, 123, 137, 3, 36, 18, 0, 124, 137, 3, 40, 20, 0, 125, 137, 3, 54, 27, 0, 126, 137, 3, 60, 30, 0, 127, 137, 3, 56, 28, 0, 128, 137, 3, 38, 19, 0, 129, 137, 3, 8, 4, 0, 130, 137, 3, 62, 31, 0, 131, 137, 3, 64, 32, 0, 132, 137, 3, 68, 34, 0, 133, 137, 3, 70, 35, 0, 134, 137, 3, 98, 49, 0, 135, 137, 3, 72, 36, 0, 136, 123, 1, 0, 0, 0, 136, 124, 1, 0, 0, 0, 136, 125, 1, 0, 0, 0, 136, 126, 1, 0, 0, 0, 136, 127, 1, 0, 0, 0, 136, 128, 1, 0, 0, 0, 136, 129, 1, 0, 0, 0, 136, 130, 1, 0, 0, 0, 136, 131, 1, 0, 0, 0, 136, 132, 1, 0, 0, 0, 136, 133, 1, 0, 0, 0, 136, 134, 1, 0, 0, 0, 136, 135, 1, 0, 0, 0, 137, 7, 1, 0, 0, 0, 138, 139, 5, 18, 0, 0, 139, 140, 3, 10, 5, 0, 140, 9, 1, 0, 0, 0, 141, 142, 6, 5, -1, 0, 142, 143, 5, 44, 0, 0, 143, 170, 3, 10, 5, 7, 144, 170, 3, 14, 7, 0, 145, 170, 3, 12, 6, 0, 146, 148, 3, 14, 7, 0, 147, 149, 5, 44, 0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 150, 1, 0, 0, 0, 150, 151, 5, 41, 0, 0, 151, 152, 5, 40, 0, 0, 152, 157, 3, 14, 7, 0, 153, 154, 5, 34, 0, 0, 154, 156, 3, 14, 7, 0, 155, 153, 1, 0, 0, 0, 156, 159, 1, 0, 0, 0, 157, 155, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 160, 1, 0, 0, 0, 159, 157, 1, 0, 0, 0, 160, 161, 5, 50, 0, 0, 161, 170, 1, 0, 0, 0, 162, 163, 3, 14, 7, 0, 163, 165, 5, 42, 0, 0, 164, 166, 5, 44, 0, 0, 165, 164, 1, 0, 0, 0, 165, 166, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 168, 5, 45, 0, 0, 168, 170, 1, 0, 0, 0, 169, 141, 1, 0, 0, 0, 169, 144, 1, 0, 0, 0, 169, 145, 1, 0, 0, 0, 169, 146, 1, 0, 0, 0, 169, 162, 1, 0, 0, 0, 170, 179, 1, 0, 0, 0, 171, 172, 10, 4, 0, 0, 172, 173, 5, 31, 0, 0, 173, 178, 3, 10, 5, 5, 174, 175, 10, 3, 0, 0, 175, 176, 5, 47, 0, 0, 176, 178, 3, 10, 5, 4, 177, 171, 1, 0, 0, 0, 177, 174, 1, 0, 0, 0, 178, 181, 1, 0, 0, 0, 179, 177, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 11, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 182, 184, 3, 14, 7, 0, 183, 185, 5, 44, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 43, 0, 0, 187, 188, 3, 86, 43, 0, 188, 197, 1, 0, 0, 0, 189, 191, 3, 14, 7, 0, 190, 192, 5, 44, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 49, 0, 0, 194, 195, 3, 86, 43, 0, 195, 197, 1, 0, 0, 0, 196, 182, 1, 0, 0, 0, 196, 189, 1, 0, 0, 0, 197, 13, 1, 0, 0, 0, 198, 204, 3, 16, 8, 0, 199, 200, 3, 16, 8, 0, 200, 201, 3, 88, 44, 0, 201, 202, 3, 16, 8, 0, 202, 204, 1, 0, 0, 0, 203, 198, 1, 0, 0, 0, 203, 199, 1, 0, 0, 0, 204, 15, 1, 0, 0, 0, 205, 206, 6, 8, -1, 0, 206, 210, 3, 18, 9, 0, 207, 208, 7, 0, 0, 0, 208, 210, 3, 16, 8, 3, 209, 205, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 219, 1, 0, 0, 0, 211, 212, 10, 2, 0, 0, 212, 213, 7, 1, 0, 0, 213, 218, 3, 16, 8, 3, 214, 215, 10, 1, 0, 0, 215, 216, 7, 0, 0, 0, 216, 218, 3, 16, 8, 2, 217, 211, 1, 0, 0, 0, 217, 214, 1, 0, 0, 0, 218, 221, 1, 0, 0, 0, 219, 217, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 17, 1, 0, 0, 0, 221, 219, 1, 0, 0, 0, 222, 230, 3, 52, 26, 0, 223, 230, 3, 44, 22, 0, 224, 230, 3, 20, 10, 0, 225, 226, 5, 40, 0, 0, 226, 227, 3, 10, 5, 0, 227, 228, 5, 50, 0, 0, 228, 230, 1, 0, 0, 0, 229, 222, 1, 0, 0, 0, 229, 223, 1, 0, 0, 0, 229, 224, 1, 0, 0, 0, 229, 225, 1, 0, 0, 0, 230, 19, 1, 0, 0, 0, 231, 232, 3, 48, 24, 0, 232, 242, 5, 40, 0, 0, 233, 243, 5, 61, 0, 0, 234, 239, 3, 10, 5, 0, 235, 236, 5, 34, 0, 0, 236, 238, 3, 10, 5, 0, 237, 235, 1, 0, 0, 0, 238, 241, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 243, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 233, 1, 0, 0, 0, 242, 234, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 245, 5, 50, 0, 0, 245, 21, 1, 0, 0, 0, 246, 247, 5, 14, 0, 0, 247, 248, 3, 24, 12, 0, 248, 23, 1, 0, 0, 0, 249, 254, 3, 26, 13, 0, 250, 251, 5, 34, 0, 0, 251, 253, 3, 26, 13, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 25, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 263, 3, 10, 5, 0, 258, 259, 3, 44, 22, 0, 259, 260, 5, 33, 0, 0, 260, 261, 3, 10, 5, 0, 261, 263, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 27, 1, 0, 0, 0, 264, 265, 5, 6, 0, 0, 265, 270, 3, 42, 21, 0, 266, 267, 5, 34, 0, 0, 267, 269, 3, 42, 21, 0, 268, 266, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 274, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 3, 30, 15, 0, 274, 273, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 29, 1, 0, 0, 0, 276, 279, 3, 32, 16, 0, 277, 279, 3, 34, 17, 0, 278, 276, 1, 0, 0, 0, 278, 277, 1, 0, 0, 0, 279, 31, 1, 0, 0, 0, 280, 281, 5, 71, 0, 0, 281, 286, 3, 42, 21, 0, 282, 283, 5, 34, 0, 0, 283, 285, 3, 42, 21, 0, 284, 282, 1, 0, 0, 0, 285, 288, 1, 0, 0, 0, 286, 284, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0, 287, 33, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 290, 5, 64, 0, 0, 290, 291, 3, 32, 16, 0, 291, 292, 5, 65, 0, 0, 292, 35, 1, 0, 0, 0, 293, 294, 5, 4, 0, 0, 294, 295, 3, 24, 12, 0, 295, 37, 1, 0, 0, 0, 296, 298, 5, 17, 0, 0, 297, 299, 3, 24, 12, 0, 298, 297, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 301, 5, 30, 0, 0, 301, 303, 3, 24, 12, 0, 302, 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 39, 1, 0, 0, 0, 304, 305, 5, 8, 0, 0, 305, 308, 3, 24, 12, 0, 306, 307, 5, 30, 0, 0, 307, 309, 3, 24, 12, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 41, 1, 0, 0, 0, 310, 311, 7, 2, 0, 0, 311, 43, 1, 0, 0, 0, 312, 317, 3, 48, 24, 0, 313, 314, 5, 36, 0, 0, 314, 316, 3, 48, 24, 0, 315, 313, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 45, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 325, 3, 50, 25, 0, 321, 322, 5, 36, 0, 0, 322, 324, 3, 50, 25, 0, 323, 321, 1, 0, 0, 0, 324, 327, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 47, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 328, 329, 7, 3, 0, 0, 329, 49, 1, 0, 0, 0, 330, 331, 5, 76, 0, 0, 331, 51, 1, 0, 0, 0, 332, 375, 5, 45, 0, 0, 333, 334, 3, 84, 42, 0, 334, 335, 5, 66, 0, 0, 335, 375, 1, 0, 0, 0, 336, 375, 3, 82, 41, 0, 337, 375, 3, 84, 42, 0, 338, 375, 3, 78, 39, 0, 339, 375, 5, 48, 0, 0, 340, 375, 3, 86, 43, 0, 341, 342, 5, 64, 0, 0, 342, 347, 3, 80, 40, 0, 343, 344, 5, 34, 0, 0, 344, 346, 3, 80, 40, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 350, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 351, 5, 65, 0, 0, 351, 375, 1, 0, 0, 0, 352, 353, 5, 64, 0, 0, 353, 358, 3, 78, 39, 0, 354, 355, 5, 34, 0, 0, 355, 357, 3, 78, 39, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 361, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 5, 65, 0, 0, 362, 375, 1, 0, 0, 0, 363, 364, 5, 64, 0, 0, 364, 369, 3, 86, 43, 0, 365, 366, 5, 34, 0, 0, 366, 368, 3, 86, 43, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 372, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 373, 5, 65, 0, 0, 373, 375, 1, 0, 0, 0, 374, 332, 1, 0, 0, 0, 374, 333, 1, 0, 0, 0, 374, 336, 1, 0, 0, 0, 374, 337, 1, 0, 0, 0, 374, 338, 1, 0, 0, 0, 374, 339, 1, 0, 0, 0, 374, 340, 1, 0, 0, 0, 374, 341, 1, 0, 0, 0, 374, 352, 1, 0, 0, 0, 374, 363, 1, 0, 0, 0, 375, 53, 1, 0, 0, 0, 376, 377, 5, 10, 0, 0, 377, 378, 5, 28, 0, 0, 378, 55, 1, 0, 0, 0, 379, 380, 5, 16, 0, 0, 380, 385, 3, 58, 29, 0, 381, 382, 5, 34, 0, 0, 382, 384, 3, 58, 29, 0, 383, 381, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 57, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 388, 390, 3, 10, 5, 0, 389, 391, 7, 4, 0, 0, 390, 389, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 394, 1, 0, 0, 0, 392, 393, 5, 46, 0, 0, 393, 395, 7, 5, 0, 0, 394, 392, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 59, 1, 0, 0, 0, 396, 397, 5, 9, 0, 0, 397, 402, 3, 46, 23, 0, 398, 399, 5, 34, 0, 0, 399, 401, 3, 46, 23, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 61, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 2, 0, 0, 406, 411, 3, 46, 23, 0, 407, 408, 5, 34, 0, 0, 408, 410, 3, 46, 23, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 63, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 415, 5, 13, 0, 0, 415, 420, 3, 66, 33, 0, 416, 417, 5, 34, 0, 0, 417, 419, 3, 66, 33, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 65, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 3, 46, 23, 0, 424, 425, 5, 80, 0, 0, 425, 426, 3, 46, 23, 0, 426, 67, 1, 0, 0, 0, 427, 428, 5, 1, 0, 0, 428, 429, 3, 18, 9, 0, 429, 431, 3, 86, 43, 0, 430, 432, 3, 74, 37, 0, 431, 430, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 69, 1, 0, 0, 0, 433, 434, 5, 7, 0, 0, 434, 435, 3, 18, 9, 0, 435, 436, 3, 86, 43, 0, 436, 71, 1, 0, 0, 0, 437, 438, 5, 12, 0, 0, 438, 439, 3, 44, 22, 0, 439, 73, 1, 0, 0, 0, 440, 445, 3, 76, 38, 0, 441, 442, 5, 34, 0, 0, 442, 444, 3, 76, 38, 0, 443, 441, 1, 0, 0, 0, 444, 447, 1, 0, 0, 0, 445, 443, 1, 0, 0, 0, 445, 446, 1, 0, 0, 0, 446, 75, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 448, 449, 3, 48, 24, 0, 449, 450, 5, 33, 0, 0, 450, 451, 3, 52, 26, 0, 451, 77, 1, 0, 0, 0, 452, 453, 7, 6, 0, 0, 453, 79, 1, 0, 0, 0, 454, 457, 3, 82, 41, 0, 455, 457, 3, 84, 42, 0, 456, 454, 1, 0, 0, 0, 456, 455, 1, 0, 0, 0, 457, 81, 1, 0, 0, 0, 458, 460, 7, 0, 0, 0, 459, 458, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 5, 29, 0, 0, 462, 83, 1, 0, 0, 0, 463, 465, 7, 0, 0, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 5, 28, 0, 0, 467, 85, 1, 0, 0, 0, 468, 469, 5, 27, 0, 0, 469, 87, 1, 0, 0, 0, 470, 471, 7, 7, 0, 0, 471, 89, 1, 0, 0, 0, 472, 473, 5, 5, 0, 0, 473, 474, 3, 92, 46, 0, 474, 91, 1, 0, 0, 0, 475, 476, 5, 64, 0, 0, 476, 477, 3, 2, 1, 0, 477, 478, 5, 65, 0, 0, 478, 93, 1, 0, 0, 0, 479, 480, 5, 15, 0, 0, 480, 481, 5, 96, 0, 0, 481, 95, 1, 0, 0, 0, 482, 483, 5, 11, 0, 0, 483, 484, 5, 100, 0, 0, 484, 97, 1, 0, 0, 0, 485, 486, 5, 3, 0, 0, 486, 489, 5, 86, 0, 0, 487, 488, 5, 84, 0, 0, 488, 490, 3, 46, 23, 0, 489, 487, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 500, 1, 0, 0, 0, 491, 492, 5, 85, 0, 0, 492, 497, 3, 100, 50, 0, 493, 494, 5, 34, 0, 0, 494, 496, 3, 100, 50, 0, 495, 493, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 501, 1, 0, 0, 0, 499, 497, 1, 0, 0, 0, 500, 491, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 99, 1, 0, 0, 0, 502, 503, 3, 46, 23, 0, 503, 504, 5, 33, 0, 0, 504, 506, 1, 0, 0, 0, 505, 502, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 3, 46, 23, 0, 508, 101, 1, 0, 0, 0, 49, 113, 121, 136, 148, 157, 165, 169, 177, 179, 184, 191, 196, 203, 209, 217, 219, 229, 239, 242, 254, 262, 270, 274, 278, 286, 298, 302, 308, 317, 325, 347, 358, 369, 374, 385, 390, 394, 402, 411, 420, 431, 445, 456, 459, 464, 489, 497, 500, 505] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index e0e3a77c0ad6a..88eaf491ca9d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -18,25 +18,26 @@ public class EsqlBaseParser extends Parser { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, MV_EXPAND=11, RENAME=12, ROW=13, SHOW=14, SORT=15, STATS=16, - WHERE=17, UNKNOWN_CMD=18, LINE_COMMENT=19, MULTILINE_COMMENT=20, WS=21, - EXPLAIN_WS=22, EXPLAIN_LINE_COMMENT=23, EXPLAIN_MULTILINE_COMMENT=24, - PIPE=25, STRING=26, INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, - ASC=31, ASSIGN=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, LAST=38, - LP=39, IN=40, IS=41, LIKE=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, - RLIKE=48, RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, - GTE=57, PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, - CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, - EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, - FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, ID_PATTERN=75, - PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, - AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, - ON=83, WITH=84, ENRICH_POLICY_NAME=85, ENRICH_LINE_COMMENT=86, ENRICH_MULTILINE_COMMENT=87, - ENRICH_WS=88, ENRICH_FIELD_LINE_COMMENT=89, ENRICH_FIELD_MULTILINE_COMMENT=90, - ENRICH_FIELD_WS=91, MVEXPAND_LINE_COMMENT=92, MVEXPAND_MULTILINE_COMMENT=93, - MVEXPAND_WS=94, INFO=95, FUNCTIONS=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, - SHOW_WS=99, COLON=100, SETTING=101, SETTING_LINE_COMMENT=102, SETTTING_MULTILINE_COMMENT=103, - SETTING_WS=104; + KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, + STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, + WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, + PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, + ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, + LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, + RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, + GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, + CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, + EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, METADATA=71, FROM_UNQUOTED_IDENTIFIER=72, + FROM_LINE_COMMENT=73, FROM_MULTILINE_COMMENT=74, FROM_WS=75, ID_PATTERN=76, + PROJECT_LINE_COMMENT=77, PROJECT_MULTILINE_COMMENT=78, PROJECT_WS=79, + AS=80, RENAME_LINE_COMMENT=81, RENAME_MULTILINE_COMMENT=82, RENAME_WS=83, + ON=84, WITH=85, ENRICH_POLICY_NAME=86, ENRICH_LINE_COMMENT=87, ENRICH_MULTILINE_COMMENT=88, + ENRICH_WS=89, ENRICH_FIELD_LINE_COMMENT=90, ENRICH_FIELD_MULTILINE_COMMENT=91, + ENRICH_FIELD_WS=92, MVEXPAND_LINE_COMMENT=93, MVEXPAND_MULTILINE_COMMENT=94, + MVEXPAND_WS=95, INFO=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, + SHOW_WS=99, FUNCTIONS=100, META_LINE_COMMENT=101, META_MULTILINE_COMMENT=102, + META_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, + SETTING_WS=108; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, @@ -53,7 +54,7 @@ public class EsqlBaseParser extends Parser { RULE_booleanValue = 39, RULE_numericValue = 40, RULE_decimalValue = 41, RULE_integerValue = 42, RULE_string = 43, RULE_comparisonOperator = 44, RULE_explainCommand = 45, RULE_subqueryExpression = 46, RULE_showCommand = 47, - RULE_enrichCommand = 48, RULE_enrichWithClause = 49; + RULE_metaCommand = 48, RULE_enrichCommand = 49, RULE_enrichWithClause = 50; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", @@ -66,7 +67,7 @@ private static String[] makeRuleNames() { "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause" + "showCommand", "metaCommand", "enrichCommand", "enrichWithClause" }; } public static final String[] ruleNames = makeRuleNames(); @@ -74,25 +75,25 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'mv_expand'", "'rename'", - "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, - null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", - "'='", "','", "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", - "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", - "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", - "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, null, null, - null, null, "'metadata'", null, null, null, null, null, null, null, null, - "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, - null, null, null, null, null, "'info'", "'functions'", null, null, null, - "':'" + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", + "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, + null, null, null, null, null, null, "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", + "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'", null, null, + null, "'functions'", null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", - "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", + "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", @@ -106,8 +107,9 @@ private static String[] makeSymbolicNames() { "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", + "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } @@ -195,9 +197,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(100); + setState(102); query(0); - setState(101); + setState(103); match(EOF); } } @@ -293,11 +295,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(104); + setState(106); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(111); + setState(113); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -308,16 +310,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(106); + setState(108); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(107); + setState(109); match(PIPE); - setState(108); + setState(110); processingCommand(); } } } - setState(113); + setState(115); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -348,6 +350,9 @@ public RowCommandContext rowCommand() { public ShowCommandContext showCommand() { return getRuleContext(ShowCommandContext.class,0); } + public MetaCommandContext metaCommand() { + return getRuleContext(MetaCommandContext.class,0); + } @SuppressWarnings("this-escape") public SourceCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -372,37 +377,44 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(118); + setState(121); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(114); + setState(116); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(115); + setState(117); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(116); + setState(118); rowCommand(); } break; case SHOW: enterOuterAlt(_localctx, 4); { - setState(117); + setState(119); showCommand(); } break; + case META: + enterOuterAlt(_localctx, 5); + { + setState(120); + metaCommand(); + } + break; default: throw new NoViableAltException(this); } @@ -483,97 +495,97 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(133); + setState(136); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(120); + setState(123); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(121); + setState(124); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(122); + setState(125); limitCommand(); } break; case KEEP: enterOuterAlt(_localctx, 4); { - setState(123); + setState(126); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 5); { - setState(124); + setState(127); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 6); { - setState(125); + setState(128); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 7); { - setState(126); + setState(129); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 8); { - setState(127); + setState(130); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 9); { - setState(128); + setState(131); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 10); { - setState(129); + setState(132); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 11); { - setState(130); + setState(133); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 12); { - setState(131); + setState(134); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 13); { - setState(132); + setState(135); mvExpandCommand(); } break; @@ -624,9 +636,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(135); + setState(138); match(WHERE); - setState(136); + setState(139); booleanExpression(0); } } @@ -821,7 +833,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(166); + setState(169); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -830,9 +842,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(139); + setState(142); match(NOT); - setState(140); + setState(143); booleanExpression(7); } break; @@ -841,7 +853,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(141); + setState(144); valueExpression(); } break; @@ -850,7 +862,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(142); + setState(145); regexBooleanExpression(); } break; @@ -859,41 +871,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(143); + setState(146); valueExpression(); - setState(145); + setState(148); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(144); + setState(147); match(NOT); } } - setState(147); + setState(150); match(IN); - setState(148); + setState(151); match(LP); - setState(149); + setState(152); valueExpression(); - setState(154); + setState(157); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(150); + setState(153); match(COMMA); - setState(151); + setState(154); valueExpression(); } } - setState(156); + setState(159); _errHandler.sync(this); _la = _input.LA(1); } - setState(157); + setState(160); match(RP); } break; @@ -902,27 +914,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(159); + setState(162); valueExpression(); - setState(160); + setState(163); match(IS); - setState(162); + setState(165); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(161); + setState(164); match(NOT); } } - setState(164); + setState(167); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(176); + setState(179); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -930,7 +942,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(174); + setState(177); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -938,11 +950,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(168); + setState(171); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(169); + setState(172); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(170); + setState(173); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -951,18 +963,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(171); + setState(174); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(172); + setState(175); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(173); + setState(176); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(178); + setState(181); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1017,48 +1029,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(193); + setState(196); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(179); + setState(182); valueExpression(); - setState(181); + setState(184); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(180); + setState(183); match(NOT); } } - setState(183); + setState(186); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(184); + setState(187); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(186); + setState(189); valueExpression(); - setState(188); + setState(191); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(187); + setState(190); match(NOT); } } - setState(190); + setState(193); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(191); + setState(194); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1144,14 +1156,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(200); + setState(203); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(195); + setState(198); operatorExpression(0); } break; @@ -1159,11 +1171,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(196); + setState(199); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(197); + setState(200); comparisonOperator(); - setState(198); + setState(201); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1288,7 +1300,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(206); + setState(209); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1297,7 +1309,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(203); + setState(206); primaryExpression(); } break; @@ -1306,7 +1318,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(204); + setState(207); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1317,13 +1329,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(205); + setState(208); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(216); + setState(219); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1331,7 +1343,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(214); + setState(217); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1339,12 +1351,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(208); + setState(211); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(209); + setState(212); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 8070450532247928832L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1352,7 +1364,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(210); + setState(213); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1361,9 +1373,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(211); + setState(214); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(212); + setState(215); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1374,14 +1386,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(213); + setState(216); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(218); + setState(221); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1503,14 +1515,14 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState()); enterRule(_localctx, 18, RULE_primaryExpression); try { - setState(226); + setState(229); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(219); + setState(222); constant(); } break; @@ -1518,7 +1530,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new DereferenceContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(220); + setState(223); qualifiedName(); } break; @@ -1526,7 +1538,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new FunctionContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(221); + setState(224); functionExpression(); } break; @@ -1534,11 +1546,11 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new ParenthesizedExpressionContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(222); + setState(225); match(LP); - setState(223); + setState(226); booleanExpression(0); - setState(224); + setState(227); match(RP); } break; @@ -1600,16 +1612,16 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(228); + setState(231); identifier(); - setState(229); + setState(232); match(LP); - setState(239); + setState(242); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(230); + setState(233); match(ASTERISK); } break; @@ -1629,21 +1641,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(231); + setState(234); booleanExpression(0); - setState(236); + setState(239); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(232); + setState(235); match(COMMA); - setState(233); + setState(236); booleanExpression(0); } } - setState(238); + setState(241); _errHandler.sync(this); _la = _input.LA(1); } @@ -1655,7 +1667,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(241); + setState(244); match(RP); } } @@ -1702,9 +1714,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(243); + setState(246); match(ROW); - setState(244); + setState(247); fields(); } } @@ -1758,23 +1770,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(246); + setState(249); field(); - setState(251); + setState(254); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(247); + setState(250); match(COMMA); - setState(248); + setState(251); field(); } } } - setState(253); + setState(256); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } @@ -1824,24 +1836,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 26, RULE_field); try { - setState(259); + setState(262); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(254); + setState(257); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(255); + setState(258); qualifiedName(); - setState(256); + setState(259); match(ASSIGN); - setState(257); + setState(260); booleanExpression(0); } break; @@ -1901,34 +1913,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(261); + setState(264); match(FROM); - setState(262); + setState(265); fromIdentifier(); - setState(267); + setState(270); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(263); + setState(266); match(COMMA); - setState(264); + setState(267); fromIdentifier(); } } } - setState(269); + setState(272); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } - setState(271); + setState(274); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: { - setState(270); + setState(273); metadata(); } break; @@ -1978,20 +1990,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 30, RULE_metadata); try { - setState(275); + setState(278); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(273); + setState(276); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(274); + setState(277); deprecated_metadata(); } break; @@ -2050,25 +2062,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(277); + setState(280); match(METADATA); - setState(278); + setState(281); fromIdentifier(); - setState(283); + setState(286); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,24,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(279); + setState(282); match(COMMA); - setState(280); + setState(283); fromIdentifier(); } } } - setState(285); + setState(288); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,24,_ctx); } @@ -2117,11 +2129,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(286); + setState(289); match(OPENING_BRACKET); - setState(287); + setState(290); metadataOption(); - setState(288); + setState(291); match(CLOSING_BRACKET); } } @@ -2168,9 +2180,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(290); + setState(293); match(EVAL); - setState(291); + setState(294); fields(); } } @@ -2223,26 +2235,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(293); + setState(296); match(STATS); - setState(295); + setState(298); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: { - setState(294); + setState(297); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(299); + setState(302); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { - setState(297); + setState(300); match(BY); - setState(298); + setState(301); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2298,18 +2310,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(301); + setState(304); match(INLINESTATS); - setState(302); - ((InlinestatsCommandContext)_localctx).stats = fields(); setState(305); + ((InlinestatsCommandContext)_localctx).stats = fields(); + setState(308); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: { - setState(303); + setState(306); match(BY); - setState(304); + setState(307); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -2358,7 +2370,7 @@ public final FromIdentifierContext fromIdentifier() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(307); + setState(310); _la = _input.LA(1); if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2420,23 +2432,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(309); + setState(312); identifier(); - setState(314); + setState(317); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(310); + setState(313); match(DOT); - setState(311); + setState(314); identifier(); } } } - setState(316); + setState(319); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -2492,23 +2504,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(317); + setState(320); identifierPattern(); - setState(322); + setState(325); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(318); + setState(321); match(DOT); - setState(319); + setState(322); identifierPattern(); } } } - setState(324); + setState(327); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); } @@ -2556,7 +2568,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(325); + setState(328); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2608,7 +2620,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce try { enterOuterAlt(_localctx, 1); { - setState(327); + setState(330); match(ID_PATTERN); } } @@ -2877,14 +2889,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 52, RULE_constant); int _la; try { - setState(371); + setState(374); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(329); + setState(332); match(NULL); } break; @@ -2892,9 +2904,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(330); + setState(333); integerValue(); - setState(331); + setState(334); match(UNQUOTED_IDENTIFIER); } break; @@ -2902,7 +2914,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(333); + setState(336); decimalValue(); } break; @@ -2910,7 +2922,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(334); + setState(337); integerValue(); } break; @@ -2918,7 +2930,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(335); + setState(338); booleanValue(); } break; @@ -2926,7 +2938,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(336); + setState(339); match(PARAM); } break; @@ -2934,7 +2946,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(337); + setState(340); string(); } break; @@ -2942,27 +2954,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(338); + setState(341); match(OPENING_BRACKET); - setState(339); + setState(342); numericValue(); - setState(344); + setState(347); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(340); + setState(343); match(COMMA); - setState(341); + setState(344); numericValue(); } } - setState(346); + setState(349); _errHandler.sync(this); _la = _input.LA(1); } - setState(347); + setState(350); match(CLOSING_BRACKET); } break; @@ -2970,27 +2982,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(349); + setState(352); match(OPENING_BRACKET); - setState(350); + setState(353); booleanValue(); - setState(355); + setState(358); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(351); + setState(354); match(COMMA); - setState(352); + setState(355); booleanValue(); } } - setState(357); + setState(360); _errHandler.sync(this); _la = _input.LA(1); } - setState(358); + setState(361); match(CLOSING_BRACKET); } break; @@ -2998,27 +3010,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(360); + setState(363); match(OPENING_BRACKET); - setState(361); + setState(364); string(); - setState(366); + setState(369); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(362); + setState(365); match(COMMA); - setState(363); + setState(366); string(); } } - setState(368); + setState(371); _errHandler.sync(this); _la = _input.LA(1); } - setState(369); + setState(372); match(CLOSING_BRACKET); } break; @@ -3065,9 +3077,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(373); + setState(376); match(LIMIT); - setState(374); + setState(377); match(INTEGER_LITERAL); } } @@ -3122,25 +3134,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(376); + setState(379); match(SORT); - setState(377); + setState(380); orderExpression(); - setState(382); + setState(385); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(378); + setState(381); match(COMMA); - setState(379); + setState(382); orderExpression(); } } } - setState(384); + setState(387); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3196,14 +3208,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(385); + setState(388); booleanExpression(0); - setState(387); + setState(390); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(386); + setState(389); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3217,14 +3229,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(391); + setState(394); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: { - setState(389); + setState(392); match(NULLS); - setState(390); + setState(393); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3291,25 +3303,25 @@ public final KeepCommandContext keepCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(393); + setState(396); match(KEEP); - setState(394); + setState(397); qualifiedNamePattern(); - setState(399); + setState(402); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(395); + setState(398); match(COMMA); - setState(396); + setState(399); qualifiedNamePattern(); } } } - setState(401); + setState(404); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } @@ -3366,25 +3378,25 @@ public final DropCommandContext dropCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(402); + setState(405); match(DROP); - setState(403); + setState(406); qualifiedNamePattern(); - setState(408); + setState(411); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(404); + setState(407); match(COMMA); - setState(405); + setState(408); qualifiedNamePattern(); } } } - setState(410); + setState(413); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } @@ -3441,25 +3453,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(411); + setState(414); match(RENAME); - setState(412); + setState(415); renameClause(); - setState(417); + setState(420); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,39,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(413); + setState(416); match(COMMA); - setState(414); + setState(417); renameClause(); } } } - setState(419); + setState(422); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,39,_ctx); } @@ -3513,11 +3525,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(420); + setState(423); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(421); + setState(424); match(AS); - setState(422); + setState(425); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3570,18 +3582,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(424); + setState(427); match(DISSECT); - setState(425); + setState(428); primaryExpression(); - setState(426); + setState(429); string(); - setState(428); + setState(431); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { case 1: { - setState(427); + setState(430); commandOptions(); } break; @@ -3634,11 +3646,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(430); + setState(433); match(GROK); - setState(431); + setState(434); primaryExpression(); - setState(432); + setState(435); string(); } } @@ -3685,9 +3697,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(434); + setState(437); match(MV_EXPAND); - setState(435); + setState(438); qualifiedName(); } } @@ -3741,23 +3753,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(437); + setState(440); commandOption(); - setState(442); + setState(445); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,41,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(438); + setState(441); match(COMMA); - setState(439); + setState(442); commandOption(); } } } - setState(444); + setState(447); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,41,_ctx); } @@ -3809,11 +3821,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(445); + setState(448); identifier(); - setState(446); + setState(449); match(ASSIGN); - setState(447); + setState(450); constant(); } } @@ -3859,7 +3871,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(449); + setState(452); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -3914,20 +3926,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 80, RULE_numericValue); try { - setState(453); + setState(456); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(451); + setState(454); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(452); + setState(455); integerValue(); } break; @@ -3976,12 +3988,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(456); + setState(459); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(455); + setState(458); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -3994,7 +4006,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(458); + setState(461); match(DECIMAL_LITERAL); } } @@ -4041,12 +4053,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(461); + setState(464); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(460); + setState(463); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4059,7 +4071,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(463); + setState(466); match(INTEGER_LITERAL); } } @@ -4103,7 +4115,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(465); + setState(468); match(STRING); } } @@ -4153,9 +4165,9 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(467); + setState(470); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 281474976710656000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 562949953421312000L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4208,9 +4220,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(469); + setState(472); match(EXPLAIN); - setState(470); + setState(473); subqueryExpression(); } } @@ -4258,11 +4270,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(472); + setState(475); match(OPENING_BRACKET); - setState(473); + setState(476); query(0); - setState(474); + setState(477); match(CLOSING_BRACKET); } } @@ -4311,54 +4323,77 @@ public T accept(ParseTreeVisitor visitor) { else return visitor.visitChildren(this); } } + + public final ShowCommandContext showCommand() throws RecognitionException { + ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); + enterRule(_localctx, 94, RULE_showCommand); + try { + _localctx = new ShowInfoContext(_localctx); + enterOuterAlt(_localctx, 1); + { + setState(479); + match(SHOW); + setState(480); + match(INFO); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") - public static class ShowFunctionsContext extends ShowCommandContext { - public TerminalNode SHOW() { return getToken(EsqlBaseParser.SHOW, 0); } + public static class MetaCommandContext extends ParserRuleContext { + @SuppressWarnings("this-escape") + public MetaCommandContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_metaCommand; } + + @SuppressWarnings("this-escape") + public MetaCommandContext() { } + public void copyFrom(MetaCommandContext ctx) { + super.copyFrom(ctx); + } + } + @SuppressWarnings("CheckReturnValue") + public static class MetaFunctionsContext extends MetaCommandContext { + public TerminalNode META() { return getToken(EsqlBaseParser.META, 0); } public TerminalNode FUNCTIONS() { return getToken(EsqlBaseParser.FUNCTIONS, 0); } @SuppressWarnings("this-escape") - public ShowFunctionsContext(ShowCommandContext ctx) { copyFrom(ctx); } + public MetaFunctionsContext(MetaCommandContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterShowFunctions(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMetaFunctions(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitShowFunctions(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMetaFunctions(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitShowFunctions(this); + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMetaFunctions(this); else return visitor.visitChildren(this); } } - public final ShowCommandContext showCommand() throws RecognitionException { - ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_showCommand); + public final MetaCommandContext metaCommand() throws RecognitionException { + MetaCommandContext _localctx = new MetaCommandContext(_ctx, getState()); + enterRule(_localctx, 96, RULE_metaCommand); try { - setState(480); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { - case 1: - _localctx = new ShowInfoContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(476); - match(SHOW); - setState(477); - match(INFO); - } - break; - case 2: - _localctx = new ShowFunctionsContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(478); - match(SHOW); - setState(479); - match(FUNCTIONS); - } - break; + _localctx = new MetaFunctionsContext(_localctx); + enterOuterAlt(_localctx, 1); + { + setState(482); + match(META); + setState(483); + match(FUNCTIONS); } } catch (RecognitionException re) { @@ -4415,53 +4450,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_enrichCommand); + enterRule(_localctx, 98, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(482); + setState(485); match(ENRICH); - setState(483); - ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); setState(486); + ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); + setState(489); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(484); + setState(487); match(ON); - setState(485); + setState(488); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(497); + setState(500); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(488); + setState(491); match(WITH); - setState(489); + setState(492); enrichWithClause(); - setState(494); + setState(497); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,47,_ctx); + _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(490); + setState(493); match(COMMA); - setState(491); + setState(494); enrichWithClause(); } } } - setState(496); + setState(499); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,47,_ctx); + _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } } break; @@ -4512,23 +4547,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_enrichWithClause); + enterRule(_localctx, 100, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(502); + setState(505); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(499); + setState(502); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(500); + setState(503); match(ASSIGN); } break; } - setState(504); + setState(507); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4581,7 +4616,7 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, } public static final String _serializedATN = - "\u0004\u0001h\u01fb\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001l\u01fe\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4594,317 +4629,318 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0005\u0001n\b\u0001\n\u0001\f\u0001q\t"+ - "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002w\b"+ - "\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ + "2\u00072\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001p\b\u0001\n\u0001"+ + "\f\u0001s\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0003\u0002z\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0003\u0003\u0086\b\u0003\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0003\u0005\u0092\b\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u0099\b\u0005\n\u0005\f\u0005"+ - "\u009c\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0003\u0005\u00a3\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a7\b"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0005\u0005\u00af\b\u0005\n\u0005\f\u0005\u00b2\t\u0005\u0001\u0006"+ - "\u0001\u0006\u0003\u0006\u00b6\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0003\u0006\u00bd\b\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0003\u0006\u00c2\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0003\u0007\u00c9\b\u0007\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0003\b\u00cf\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0005\b\u00d7\b\b\n\b\f\b\u00da\t\b\u0001\t\u0001\t\u0001\t\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0003\t\u00e3\b\t\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0005\n\u00eb\b\n\n\n\f\n\u00ee\t\n\u0003\n\u00f0\b"+ - "\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f"+ - "\u0001\f\u0005\f\u00fa\b\f\n\f\f\f\u00fd\t\f\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0003\r\u0104\b\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0005\u000e\u010a\b\u000e\n\u000e\f\u000e\u010d\t\u000e\u0001\u000e"+ - "\u0003\u000e\u0110\b\u000e\u0001\u000f\u0001\u000f\u0003\u000f\u0114\b"+ - "\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005\u0010\u011a"+ - "\b\u0010\n\u0010\f\u0010\u011d\t\u0010\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ - "\u0003\u0013\u0128\b\u0013\u0001\u0013\u0001\u0013\u0003\u0013\u012c\b"+ - "\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0003\u0014\u0132"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0089\b\u0003\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u0095\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u009c"+ + "\b\u0005\n\u0005\f\u0005\u009f\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0003\u0005\u00a6\b\u0005\u0001\u0005\u0001\u0005"+ + "\u0003\u0005\u00aa\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0005\u0005\u00b2\b\u0005\n\u0005\f\u0005\u00b5"+ + "\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00b9\b\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00c0\b\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00c5\b\u0006\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00cc\b\u0007"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00d2\b\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0005\b\u00da\b\b\n\b\f\b\u00dd\t\b\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00e6\b\t\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u00ee\b\n\n\n\f\n\u00f1"+ + "\t\n\u0003\n\u00f3\b\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\f\u0005\f\u00fd\b\f\n\f\f\f\u0100\t\f\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0003\r\u0107\b\r\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0005\u000e\u010d\b\u000e\n\u000e\f\u000e\u0110\t\u000e"+ + "\u0001\u000e\u0003\u000e\u0113\b\u000e\u0001\u000f\u0001\u000f\u0003\u000f"+ + "\u0117\b\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005\u0010"+ + "\u011d\b\u0010\n\u0010\f\u0010\u0120\t\u0010\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ + "\u0013\u0003\u0013\u012b\b\u0013\u0001\u0013\u0001\u0013\u0003\u0013\u012f"+ + "\b\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0003\u0014\u0135"+ "\b\u0014\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0005"+ - "\u0016\u0139\b\u0016\n\u0016\f\u0016\u013c\t\u0016\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0005\u0017\u0141\b\u0017\n\u0017\f\u0017\u0144\t\u0017\u0001"+ + "\u0016\u013c\b\u0016\n\u0016\f\u0016\u013f\t\u0016\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0005\u0017\u0144\b\u0017\n\u0017\f\u0017\u0147\t\u0017\u0001"+ "\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001"+ "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0157"+ - "\b\u001a\n\u001a\f\u001a\u015a\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0162\b\u001a\n\u001a"+ - "\f\u001a\u0165\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0005\u001a\u016d\b\u001a\n\u001a\f\u001a\u0170"+ - "\t\u001a\u0001\u001a\u0001\u001a\u0003\u001a\u0174\b\u001a\u0001\u001b"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u015a"+ + "\b\u001a\n\u001a\f\u001a\u015d\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0165\b\u001a\n\u001a"+ + "\f\u001a\u0168\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0001\u001a\u0001\u001a\u0005\u001a\u0170\b\u001a\n\u001a\f\u001a\u0173"+ + "\t\u001a\u0001\u001a\u0001\u001a\u0003\u001a\u0177\b\u001a\u0001\u001b"+ "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0005\u001c\u017d\b\u001c\n\u001c\f\u001c\u0180\t\u001c\u0001\u001d\u0001"+ - "\u001d\u0003\u001d\u0184\b\u001d\u0001\u001d\u0001\u001d\u0003\u001d\u0188"+ - "\b\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u018e"+ - "\b\u001e\n\u001e\f\u001e\u0191\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0005\u001f\u0197\b\u001f\n\u001f\f\u001f\u019a\t\u001f\u0001"+ - " \u0001 \u0001 \u0001 \u0005 \u01a0\b \n \f \u01a3\t \u0001!\u0001!\u0001"+ - "!\u0001!\u0001\"\u0001\"\u0001\"\u0001\"\u0003\"\u01ad\b\"\u0001#\u0001"+ - "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0005%\u01b9"+ - "\b%\n%\f%\u01bc\t%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001("+ - "\u0001(\u0003(\u01c6\b(\u0001)\u0003)\u01c9\b)\u0001)\u0001)\u0001*\u0003"+ - "*\u01ce\b*\u0001*\u0001*\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001"+ - "-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0003/\u01e1"+ - "\b/\u00010\u00010\u00010\u00010\u00030\u01e7\b0\u00010\u00010\u00010\u0001"+ - "0\u00050\u01ed\b0\n0\f0\u01f0\t0\u00030\u01f2\b0\u00011\u00011\u00011"+ - "\u00031\u01f7\b1\u00011\u00011\u00011\u0000\u0003\u0002\n\u00102\u0000"+ + "\u0005\u001c\u0180\b\u001c\n\u001c\f\u001c\u0183\t\u001c\u0001\u001d\u0001"+ + "\u001d\u0003\u001d\u0187\b\u001d\u0001\u001d\u0001\u001d\u0003\u001d\u018b"+ + "\b\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0191"+ + "\b\u001e\n\u001e\f\u001e\u0194\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f"+ + "\u0001\u001f\u0005\u001f\u019a\b\u001f\n\u001f\f\u001f\u019d\t\u001f\u0001"+ + " \u0001 \u0001 \u0001 \u0005 \u01a3\b \n \f \u01a6\t \u0001!\u0001!\u0001"+ + "!\u0001!\u0001\"\u0001\"\u0001\"\u0001\"\u0003\"\u01b0\b\"\u0001#\u0001"+ + "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0005%\u01bc"+ + "\b%\n%\f%\u01bf\t%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001("+ + "\u0001(\u0003(\u01c9\b(\u0001)\u0003)\u01cc\b)\u0001)\u0001)\u0001*\u0003"+ + "*\u01d1\b*\u0001*\u0001*\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001"+ + "-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u00010\u00010\u0001"+ + "0\u00011\u00011\u00011\u00011\u00031\u01ea\b1\u00011\u00011\u00011\u0001"+ + "1\u00051\u01f0\b1\n1\f1\u01f3\t1\u00031\u01f5\b1\u00012\u00012\u00012"+ + "\u00032\u01fa\b2\u00012\u00012\u00012\u0000\u0003\u0002\n\u00103\u0000"+ "\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c"+ - "\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`b\u0000\b\u0001\u0000:;\u0001"+ - "\u0000<>\u0002\u0000BBGG\u0001\u0000AB\u0002\u0000\u001f\u001f\"\"\u0001"+ - "\u0000%&\u0002\u0000$$22\u0002\u00003359\u0215\u0000d\u0001\u0000\u0000"+ - "\u0000\u0002g\u0001\u0000\u0000\u0000\u0004v\u0001\u0000\u0000\u0000\u0006"+ - "\u0085\u0001\u0000\u0000\u0000\b\u0087\u0001\u0000\u0000\u0000\n\u00a6"+ - "\u0001\u0000\u0000\u0000\f\u00c1\u0001\u0000\u0000\u0000\u000e\u00c8\u0001"+ - "\u0000\u0000\u0000\u0010\u00ce\u0001\u0000\u0000\u0000\u0012\u00e2\u0001"+ - "\u0000\u0000\u0000\u0014\u00e4\u0001\u0000\u0000\u0000\u0016\u00f3\u0001"+ - "\u0000\u0000\u0000\u0018\u00f6\u0001\u0000\u0000\u0000\u001a\u0103\u0001"+ - "\u0000\u0000\u0000\u001c\u0105\u0001\u0000\u0000\u0000\u001e\u0113\u0001"+ - "\u0000\u0000\u0000 \u0115\u0001\u0000\u0000\u0000\"\u011e\u0001\u0000"+ - "\u0000\u0000$\u0122\u0001\u0000\u0000\u0000&\u0125\u0001\u0000\u0000\u0000"+ - "(\u012d\u0001\u0000\u0000\u0000*\u0133\u0001\u0000\u0000\u0000,\u0135"+ - "\u0001\u0000\u0000\u0000.\u013d\u0001\u0000\u0000\u00000\u0145\u0001\u0000"+ - "\u0000\u00002\u0147\u0001\u0000\u0000\u00004\u0173\u0001\u0000\u0000\u0000"+ - "6\u0175\u0001\u0000\u0000\u00008\u0178\u0001\u0000\u0000\u0000:\u0181"+ - "\u0001\u0000\u0000\u0000<\u0189\u0001\u0000\u0000\u0000>\u0192\u0001\u0000"+ - "\u0000\u0000@\u019b\u0001\u0000\u0000\u0000B\u01a4\u0001\u0000\u0000\u0000"+ - "D\u01a8\u0001\u0000\u0000\u0000F\u01ae\u0001\u0000\u0000\u0000H\u01b2"+ - "\u0001\u0000\u0000\u0000J\u01b5\u0001\u0000\u0000\u0000L\u01bd\u0001\u0000"+ - "\u0000\u0000N\u01c1\u0001\u0000\u0000\u0000P\u01c5\u0001\u0000\u0000\u0000"+ - "R\u01c8\u0001\u0000\u0000\u0000T\u01cd\u0001\u0000\u0000\u0000V\u01d1"+ - "\u0001\u0000\u0000\u0000X\u01d3\u0001\u0000\u0000\u0000Z\u01d5\u0001\u0000"+ - "\u0000\u0000\\\u01d8\u0001\u0000\u0000\u0000^\u01e0\u0001\u0000\u0000"+ - "\u0000`\u01e2\u0001\u0000\u0000\u0000b\u01f6\u0001\u0000\u0000\u0000d"+ - "e\u0003\u0002\u0001\u0000ef\u0005\u0000\u0000\u0001f\u0001\u0001\u0000"+ - "\u0000\u0000gh\u0006\u0001\uffff\uffff\u0000hi\u0003\u0004\u0002\u0000"+ - "io\u0001\u0000\u0000\u0000jk\n\u0001\u0000\u0000kl\u0005\u0019\u0000\u0000"+ - "ln\u0003\u0006\u0003\u0000mj\u0001\u0000\u0000\u0000nq\u0001\u0000\u0000"+ - "\u0000om\u0001\u0000\u0000\u0000op\u0001\u0000\u0000\u0000p\u0003\u0001"+ - "\u0000\u0000\u0000qo\u0001\u0000\u0000\u0000rw\u0003Z-\u0000sw\u0003\u001c"+ - "\u000e\u0000tw\u0003\u0016\u000b\u0000uw\u0003^/\u0000vr\u0001\u0000\u0000"+ - "\u0000vs\u0001\u0000\u0000\u0000vt\u0001\u0000\u0000\u0000vu\u0001\u0000"+ - "\u0000\u0000w\u0005\u0001\u0000\u0000\u0000x\u0086\u0003$\u0012\u0000"+ - "y\u0086\u0003(\u0014\u0000z\u0086\u00036\u001b\u0000{\u0086\u0003<\u001e"+ - "\u0000|\u0086\u00038\u001c\u0000}\u0086\u0003&\u0013\u0000~\u0086\u0003"+ - "\b\u0004\u0000\u007f\u0086\u0003>\u001f\u0000\u0080\u0086\u0003@ \u0000"+ - "\u0081\u0086\u0003D\"\u0000\u0082\u0086\u0003F#\u0000\u0083\u0086\u0003"+ - "`0\u0000\u0084\u0086\u0003H$\u0000\u0085x\u0001\u0000\u0000\u0000\u0085"+ - "y\u0001\u0000\u0000\u0000\u0085z\u0001\u0000\u0000\u0000\u0085{\u0001"+ - "\u0000\u0000\u0000\u0085|\u0001\u0000\u0000\u0000\u0085}\u0001\u0000\u0000"+ - "\u0000\u0085~\u0001\u0000\u0000\u0000\u0085\u007f\u0001\u0000\u0000\u0000"+ - "\u0085\u0080\u0001\u0000\u0000\u0000\u0085\u0081\u0001\u0000\u0000\u0000"+ - "\u0085\u0082\u0001\u0000\u0000\u0000\u0085\u0083\u0001\u0000\u0000\u0000"+ - "\u0085\u0084\u0001\u0000\u0000\u0000\u0086\u0007\u0001\u0000\u0000\u0000"+ - "\u0087\u0088\u0005\u0011\u0000\u0000\u0088\u0089\u0003\n\u0005\u0000\u0089"+ - "\t\u0001\u0000\u0000\u0000\u008a\u008b\u0006\u0005\uffff\uffff\u0000\u008b"+ - "\u008c\u0005+\u0000\u0000\u008c\u00a7\u0003\n\u0005\u0007\u008d\u00a7"+ - "\u0003\u000e\u0007\u0000\u008e\u00a7\u0003\f\u0006\u0000\u008f\u0091\u0003"+ - "\u000e\u0007\u0000\u0090\u0092\u0005+\u0000\u0000\u0091\u0090\u0001\u0000"+ - "\u0000\u0000\u0091\u0092\u0001\u0000\u0000\u0000\u0092\u0093\u0001\u0000"+ - "\u0000\u0000\u0093\u0094\u0005(\u0000\u0000\u0094\u0095\u0005\'\u0000"+ - "\u0000\u0095\u009a\u0003\u000e\u0007\u0000\u0096\u0097\u0005!\u0000\u0000"+ - "\u0097\u0099\u0003\u000e\u0007\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ - "\u0099\u009c\u0001\u0000\u0000\u0000\u009a\u0098\u0001\u0000\u0000\u0000"+ - "\u009a\u009b\u0001\u0000\u0000\u0000\u009b\u009d\u0001\u0000\u0000\u0000"+ - "\u009c\u009a\u0001\u0000\u0000\u0000\u009d\u009e\u00051\u0000\u0000\u009e"+ - "\u00a7\u0001\u0000\u0000\u0000\u009f\u00a0\u0003\u000e\u0007\u0000\u00a0"+ - "\u00a2\u0005)\u0000\u0000\u00a1\u00a3\u0005+\u0000\u0000\u00a2\u00a1\u0001"+ - "\u0000\u0000\u0000\u00a2\u00a3\u0001\u0000\u0000\u0000\u00a3\u00a4\u0001"+ - "\u0000\u0000\u0000\u00a4\u00a5\u0005,\u0000\u0000\u00a5\u00a7\u0001\u0000"+ - "\u0000\u0000\u00a6\u008a\u0001\u0000\u0000\u0000\u00a6\u008d\u0001\u0000"+ - "\u0000\u0000\u00a6\u008e\u0001\u0000\u0000\u0000\u00a6\u008f\u0001\u0000"+ - "\u0000\u0000\u00a6\u009f\u0001\u0000\u0000\u0000\u00a7\u00b0\u0001\u0000"+ - "\u0000\u0000\u00a8\u00a9\n\u0004\u0000\u0000\u00a9\u00aa\u0005\u001e\u0000"+ - "\u0000\u00aa\u00af\u0003\n\u0005\u0005\u00ab\u00ac\n\u0003\u0000\u0000"+ - "\u00ac\u00ad\u0005.\u0000\u0000\u00ad\u00af\u0003\n\u0005\u0004\u00ae"+ - "\u00a8\u0001\u0000\u0000\u0000\u00ae\u00ab\u0001\u0000\u0000\u0000\u00af"+ - "\u00b2\u0001\u0000\u0000\u0000\u00b0\u00ae\u0001\u0000\u0000\u0000\u00b0"+ - "\u00b1\u0001\u0000\u0000\u0000\u00b1\u000b\u0001\u0000\u0000\u0000\u00b2"+ - "\u00b0\u0001\u0000\u0000\u0000\u00b3\u00b5\u0003\u000e\u0007\u0000\u00b4"+ - "\u00b6\u0005+\u0000\u0000\u00b5\u00b4\u0001\u0000\u0000\u0000\u00b5\u00b6"+ - "\u0001\u0000\u0000\u0000\u00b6\u00b7\u0001\u0000\u0000\u0000\u00b7\u00b8"+ - "\u0005*\u0000\u0000\u00b8\u00b9\u0003V+\u0000\u00b9\u00c2\u0001\u0000"+ - "\u0000\u0000\u00ba\u00bc\u0003\u000e\u0007\u0000\u00bb\u00bd\u0005+\u0000"+ - "\u0000\u00bc\u00bb\u0001\u0000\u0000\u0000\u00bc\u00bd\u0001\u0000\u0000"+ - "\u0000\u00bd\u00be\u0001\u0000\u0000\u0000\u00be\u00bf\u00050\u0000\u0000"+ - "\u00bf\u00c0\u0003V+\u0000\u00c0\u00c2\u0001\u0000\u0000\u0000\u00c1\u00b3"+ - "\u0001\u0000\u0000\u0000\u00c1\u00ba\u0001\u0000\u0000\u0000\u00c2\r\u0001"+ - "\u0000\u0000\u0000\u00c3\u00c9\u0003\u0010\b\u0000\u00c4\u00c5\u0003\u0010"+ - "\b\u0000\u00c5\u00c6\u0003X,\u0000\u00c6\u00c7\u0003\u0010\b\u0000\u00c7"+ - "\u00c9\u0001\u0000\u0000\u0000\u00c8\u00c3\u0001\u0000\u0000\u0000\u00c8"+ - "\u00c4\u0001\u0000\u0000\u0000\u00c9\u000f\u0001\u0000\u0000\u0000\u00ca"+ - "\u00cb\u0006\b\uffff\uffff\u0000\u00cb\u00cf\u0003\u0012\t\u0000\u00cc"+ - "\u00cd\u0007\u0000\u0000\u0000\u00cd\u00cf\u0003\u0010\b\u0003\u00ce\u00ca"+ - "\u0001\u0000\u0000\u0000\u00ce\u00cc\u0001\u0000\u0000\u0000\u00cf\u00d8"+ - "\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0002\u0000\u0000\u00d1\u00d2\u0007"+ - "\u0001\u0000\u0000\u00d2\u00d7\u0003\u0010\b\u0003\u00d3\u00d4\n\u0001"+ - "\u0000\u0000\u00d4\u00d5\u0007\u0000\u0000\u0000\u00d5\u00d7\u0003\u0010"+ - "\b\u0002\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3\u0001\u0000\u0000"+ - "\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6\u0001\u0000\u0000"+ - "\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u0011\u0001\u0000\u0000"+ - "\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00e3\u00034\u001a\u0000"+ - "\u00dc\u00e3\u0003,\u0016\u0000\u00dd\u00e3\u0003\u0014\n\u0000\u00de"+ - "\u00df\u0005\'\u0000\u0000\u00df\u00e0\u0003\n\u0005\u0000\u00e0\u00e1"+ - "\u00051\u0000\u0000\u00e1\u00e3\u0001\u0000\u0000\u0000\u00e2\u00db\u0001"+ - "\u0000\u0000\u0000\u00e2\u00dc\u0001\u0000\u0000\u0000\u00e2\u00dd\u0001"+ - "\u0000\u0000\u0000\u00e2\u00de\u0001\u0000\u0000\u0000\u00e3\u0013\u0001"+ - "\u0000\u0000\u0000\u00e4\u00e5\u00030\u0018\u0000\u00e5\u00ef\u0005\'"+ - "\u0000\u0000\u00e6\u00f0\u0005<\u0000\u0000\u00e7\u00ec\u0003\n\u0005"+ - "\u0000\u00e8\u00e9\u0005!\u0000\u0000\u00e9\u00eb\u0003\n\u0005\u0000"+ - "\u00ea\u00e8\u0001\u0000\u0000\u0000\u00eb\u00ee\u0001\u0000\u0000\u0000"+ - "\u00ec\u00ea\u0001\u0000\u0000\u0000\u00ec\u00ed\u0001\u0000\u0000\u0000"+ - "\u00ed\u00f0\u0001\u0000\u0000\u0000\u00ee\u00ec\u0001\u0000\u0000\u0000"+ - "\u00ef\u00e6\u0001\u0000\u0000\u0000\u00ef\u00e7\u0001\u0000\u0000\u0000"+ - "\u00ef\u00f0\u0001\u0000\u0000\u0000\u00f0\u00f1\u0001\u0000\u0000\u0000"+ - "\u00f1\u00f2\u00051\u0000\u0000\u00f2\u0015\u0001\u0000\u0000\u0000\u00f3"+ - "\u00f4\u0005\r\u0000\u0000\u00f4\u00f5\u0003\u0018\f\u0000\u00f5\u0017"+ - "\u0001\u0000\u0000\u0000\u00f6\u00fb\u0003\u001a\r\u0000\u00f7\u00f8\u0005"+ - "!\u0000\u0000\u00f8\u00fa\u0003\u001a\r\u0000\u00f9\u00f7\u0001\u0000"+ - "\u0000\u0000\u00fa\u00fd\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000"+ - "\u0000\u0000\u00fb\u00fc\u0001\u0000\u0000\u0000\u00fc\u0019\u0001\u0000"+ - "\u0000\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fe\u0104\u0003\n\u0005"+ - "\u0000\u00ff\u0100\u0003,\u0016\u0000\u0100\u0101\u0005 \u0000\u0000\u0101"+ - "\u0102\u0003\n\u0005\u0000\u0102\u0104\u0001\u0000\u0000\u0000\u0103\u00fe"+ - "\u0001\u0000\u0000\u0000\u0103\u00ff\u0001\u0000\u0000\u0000\u0104\u001b"+ - "\u0001\u0000\u0000\u0000\u0105\u0106\u0005\u0006\u0000\u0000\u0106\u010b"+ - "\u0003*\u0015\u0000\u0107\u0108\u0005!\u0000\u0000\u0108\u010a\u0003*"+ - "\u0015\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u010a\u010d\u0001\u0000"+ - "\u0000\u0000\u010b\u0109\u0001\u0000\u0000\u0000\u010b\u010c\u0001\u0000"+ - "\u0000\u0000\u010c\u010f\u0001\u0000\u0000\u0000\u010d\u010b\u0001\u0000"+ - "\u0000\u0000\u010e\u0110\u0003\u001e\u000f\u0000\u010f\u010e\u0001\u0000"+ - "\u0000\u0000\u010f\u0110\u0001\u0000\u0000\u0000\u0110\u001d\u0001\u0000"+ - "\u0000\u0000\u0111\u0114\u0003 \u0010\u0000\u0112\u0114\u0003\"\u0011"+ - "\u0000\u0113\u0111\u0001\u0000\u0000\u0000\u0113\u0112\u0001\u0000\u0000"+ - "\u0000\u0114\u001f\u0001\u0000\u0000\u0000\u0115\u0116\u0005F\u0000\u0000"+ - "\u0116\u011b\u0003*\u0015\u0000\u0117\u0118\u0005!\u0000\u0000\u0118\u011a"+ - "\u0003*\u0015\u0000\u0119\u0117\u0001\u0000\u0000\u0000\u011a\u011d\u0001"+ - "\u0000\u0000\u0000\u011b\u0119\u0001\u0000\u0000\u0000\u011b\u011c\u0001"+ - "\u0000\u0000\u0000\u011c!\u0001\u0000\u0000\u0000\u011d\u011b\u0001\u0000"+ - "\u0000\u0000\u011e\u011f\u0005?\u0000\u0000\u011f\u0120\u0003 \u0010\u0000"+ - "\u0120\u0121\u0005@\u0000\u0000\u0121#\u0001\u0000\u0000\u0000\u0122\u0123"+ - "\u0005\u0004\u0000\u0000\u0123\u0124\u0003\u0018\f\u0000\u0124%\u0001"+ - "\u0000\u0000\u0000\u0125\u0127\u0005\u0010\u0000\u0000\u0126\u0128\u0003"+ - "\u0018\f\u0000\u0127\u0126\u0001\u0000\u0000\u0000\u0127\u0128\u0001\u0000"+ - "\u0000\u0000\u0128\u012b\u0001\u0000\u0000\u0000\u0129\u012a\u0005\u001d"+ - "\u0000\u0000\u012a\u012c\u0003\u0018\f\u0000\u012b\u0129\u0001\u0000\u0000"+ - "\u0000\u012b\u012c\u0001\u0000\u0000\u0000\u012c\'\u0001\u0000\u0000\u0000"+ - "\u012d\u012e\u0005\b\u0000\u0000\u012e\u0131\u0003\u0018\f\u0000\u012f"+ - "\u0130\u0005\u001d\u0000\u0000\u0130\u0132\u0003\u0018\f\u0000\u0131\u012f"+ - "\u0001\u0000\u0000\u0000\u0131\u0132\u0001\u0000\u0000\u0000\u0132)\u0001"+ - "\u0000\u0000\u0000\u0133\u0134\u0007\u0002\u0000\u0000\u0134+\u0001\u0000"+ - "\u0000\u0000\u0135\u013a\u00030\u0018\u0000\u0136\u0137\u0005#\u0000\u0000"+ - "\u0137\u0139\u00030\u0018\u0000\u0138\u0136\u0001\u0000\u0000\u0000\u0139"+ - "\u013c\u0001\u0000\u0000\u0000\u013a\u0138\u0001\u0000\u0000\u0000\u013a"+ - "\u013b\u0001\u0000\u0000\u0000\u013b-\u0001\u0000\u0000\u0000\u013c\u013a"+ - "\u0001\u0000\u0000\u0000\u013d\u0142\u00032\u0019\u0000\u013e\u013f\u0005"+ - "#\u0000\u0000\u013f\u0141\u00032\u0019\u0000\u0140\u013e\u0001\u0000\u0000"+ - "\u0000\u0141\u0144\u0001\u0000\u0000\u0000\u0142\u0140\u0001\u0000\u0000"+ - "\u0000\u0142\u0143\u0001\u0000\u0000\u0000\u0143/\u0001\u0000\u0000\u0000"+ - "\u0144\u0142\u0001\u0000\u0000\u0000\u0145\u0146\u0007\u0003\u0000\u0000"+ - "\u01461\u0001\u0000\u0000\u0000\u0147\u0148\u0005K\u0000\u0000\u01483"+ - "\u0001\u0000\u0000\u0000\u0149\u0174\u0005,\u0000\u0000\u014a\u014b\u0003"+ - "T*\u0000\u014b\u014c\u0005A\u0000\u0000\u014c\u0174\u0001\u0000\u0000"+ - "\u0000\u014d\u0174\u0003R)\u0000\u014e\u0174\u0003T*\u0000\u014f\u0174"+ - "\u0003N\'\u0000\u0150\u0174\u0005/\u0000\u0000\u0151\u0174\u0003V+\u0000"+ - "\u0152\u0153\u0005?\u0000\u0000\u0153\u0158\u0003P(\u0000\u0154\u0155"+ - "\u0005!\u0000\u0000\u0155\u0157\u0003P(\u0000\u0156\u0154\u0001\u0000"+ - "\u0000\u0000\u0157\u015a\u0001\u0000\u0000\u0000\u0158\u0156\u0001\u0000"+ - "\u0000\u0000\u0158\u0159\u0001\u0000\u0000\u0000\u0159\u015b\u0001\u0000"+ - "\u0000\u0000\u015a\u0158\u0001\u0000\u0000\u0000\u015b\u015c\u0005@\u0000"+ - "\u0000\u015c\u0174\u0001\u0000\u0000\u0000\u015d\u015e\u0005?\u0000\u0000"+ - "\u015e\u0163\u0003N\'\u0000\u015f\u0160\u0005!\u0000\u0000\u0160\u0162"+ - "\u0003N\'\u0000\u0161\u015f\u0001\u0000\u0000\u0000\u0162\u0165\u0001"+ - "\u0000\u0000\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0163\u0164\u0001"+ - "\u0000\u0000\u0000\u0164\u0166\u0001\u0000\u0000\u0000\u0165\u0163\u0001"+ - "\u0000\u0000\u0000\u0166\u0167\u0005@\u0000\u0000\u0167\u0174\u0001\u0000"+ - "\u0000\u0000\u0168\u0169\u0005?\u0000\u0000\u0169\u016e\u0003V+\u0000"+ - "\u016a\u016b\u0005!\u0000\u0000\u016b\u016d\u0003V+\u0000\u016c\u016a"+ - "\u0001\u0000\u0000\u0000\u016d\u0170\u0001\u0000\u0000\u0000\u016e\u016c"+ - "\u0001\u0000\u0000\u0000\u016e\u016f\u0001\u0000\u0000\u0000\u016f\u0171"+ - "\u0001\u0000\u0000\u0000\u0170\u016e\u0001\u0000\u0000\u0000\u0171\u0172"+ - "\u0005@\u0000\u0000\u0172\u0174\u0001\u0000\u0000\u0000\u0173\u0149\u0001"+ - "\u0000\u0000\u0000\u0173\u014a\u0001\u0000\u0000\u0000\u0173\u014d\u0001"+ - "\u0000\u0000\u0000\u0173\u014e\u0001\u0000\u0000\u0000\u0173\u014f\u0001"+ - "\u0000\u0000\u0000\u0173\u0150\u0001\u0000\u0000\u0000\u0173\u0151\u0001"+ - "\u0000\u0000\u0000\u0173\u0152\u0001\u0000\u0000\u0000\u0173\u015d\u0001"+ - "\u0000\u0000\u0000\u0173\u0168\u0001\u0000\u0000\u0000\u01745\u0001\u0000"+ - "\u0000\u0000\u0175\u0176\u0005\n\u0000\u0000\u0176\u0177\u0005\u001b\u0000"+ - "\u0000\u01777\u0001\u0000\u0000\u0000\u0178\u0179\u0005\u000f\u0000\u0000"+ - "\u0179\u017e\u0003:\u001d\u0000\u017a\u017b\u0005!\u0000\u0000\u017b\u017d"+ - "\u0003:\u001d\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017d\u0180\u0001"+ - "\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017e\u017f\u0001"+ - "\u0000\u0000\u0000\u017f9\u0001\u0000\u0000\u0000\u0180\u017e\u0001\u0000"+ - "\u0000\u0000\u0181\u0183\u0003\n\u0005\u0000\u0182\u0184\u0007\u0004\u0000"+ - "\u0000\u0183\u0182\u0001\u0000\u0000\u0000\u0183\u0184\u0001\u0000\u0000"+ - "\u0000\u0184\u0187\u0001\u0000\u0000\u0000\u0185\u0186\u0005-\u0000\u0000"+ - "\u0186\u0188\u0007\u0005\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000"+ - "\u0187\u0188\u0001\u0000\u0000\u0000\u0188;\u0001\u0000\u0000\u0000\u0189"+ - "\u018a\u0005\t\u0000\u0000\u018a\u018f\u0003.\u0017\u0000\u018b\u018c"+ - "\u0005!\u0000\u0000\u018c\u018e\u0003.\u0017\u0000\u018d\u018b\u0001\u0000"+ - "\u0000\u0000\u018e\u0191\u0001\u0000\u0000\u0000\u018f\u018d\u0001\u0000"+ - "\u0000\u0000\u018f\u0190\u0001\u0000\u0000\u0000\u0190=\u0001\u0000\u0000"+ - "\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0192\u0193\u0005\u0002\u0000"+ - "\u0000\u0193\u0198\u0003.\u0017\u0000\u0194\u0195\u0005!\u0000\u0000\u0195"+ - "\u0197\u0003.\u0017\u0000\u0196\u0194\u0001\u0000\u0000\u0000\u0197\u019a"+ - "\u0001\u0000\u0000\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0198\u0199"+ - "\u0001\u0000\u0000\u0000\u0199?\u0001\u0000\u0000\u0000\u019a\u0198\u0001"+ - "\u0000\u0000\u0000\u019b\u019c\u0005\f\u0000\u0000\u019c\u01a1\u0003B"+ - "!\u0000\u019d\u019e\u0005!\u0000\u0000\u019e\u01a0\u0003B!\u0000\u019f"+ - "\u019d\u0001\u0000\u0000\u0000\u01a0\u01a3\u0001\u0000\u0000\u0000\u01a1"+ - "\u019f\u0001\u0000\u0000\u0000\u01a1\u01a2\u0001\u0000\u0000\u0000\u01a2"+ - "A\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000\u0000\u0000\u01a4\u01a5"+ - "\u0003.\u0017\u0000\u01a5\u01a6\u0005O\u0000\u0000\u01a6\u01a7\u0003."+ - "\u0017\u0000\u01a7C\u0001\u0000\u0000\u0000\u01a8\u01a9\u0005\u0001\u0000"+ - "\u0000\u01a9\u01aa\u0003\u0012\t\u0000\u01aa\u01ac\u0003V+\u0000\u01ab"+ - "\u01ad\u0003J%\u0000\u01ac\u01ab\u0001\u0000\u0000\u0000\u01ac\u01ad\u0001"+ - "\u0000\u0000\u0000\u01adE\u0001\u0000\u0000\u0000\u01ae\u01af\u0005\u0007"+ - "\u0000\u0000\u01af\u01b0\u0003\u0012\t\u0000\u01b0\u01b1\u0003V+\u0000"+ - "\u01b1G\u0001\u0000\u0000\u0000\u01b2\u01b3\u0005\u000b\u0000\u0000\u01b3"+ - "\u01b4\u0003,\u0016\u0000\u01b4I\u0001\u0000\u0000\u0000\u01b5\u01ba\u0003"+ - "L&\u0000\u01b6\u01b7\u0005!\u0000\u0000\u01b7\u01b9\u0003L&\u0000\u01b8"+ - "\u01b6\u0001\u0000\u0000\u0000\u01b9\u01bc\u0001\u0000\u0000\u0000\u01ba"+ - "\u01b8\u0001\u0000\u0000\u0000\u01ba\u01bb\u0001\u0000\u0000\u0000\u01bb"+ - "K\u0001\u0000\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bd\u01be"+ - "\u00030\u0018\u0000\u01be\u01bf\u0005 \u0000\u0000\u01bf\u01c0\u00034"+ - "\u001a\u0000\u01c0M\u0001\u0000\u0000\u0000\u01c1\u01c2\u0007\u0006\u0000"+ - "\u0000\u01c2O\u0001\u0000\u0000\u0000\u01c3\u01c6\u0003R)\u0000\u01c4"+ - "\u01c6\u0003T*\u0000\u01c5\u01c3\u0001\u0000\u0000\u0000\u01c5\u01c4\u0001"+ - "\u0000\u0000\u0000\u01c6Q\u0001\u0000\u0000\u0000\u01c7\u01c9\u0007\u0000"+ - "\u0000\u0000\u01c8\u01c7\u0001\u0000\u0000\u0000\u01c8\u01c9\u0001\u0000"+ - "\u0000\u0000\u01c9\u01ca\u0001\u0000\u0000\u0000\u01ca\u01cb\u0005\u001c"+ - "\u0000\u0000\u01cbS\u0001\u0000\u0000\u0000\u01cc\u01ce\u0007\u0000\u0000"+ - "\u0000\u01cd\u01cc\u0001\u0000\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000"+ - "\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000\u01cf\u01d0\u0005\u001b\u0000"+ - "\u0000\u01d0U\u0001\u0000\u0000\u0000\u01d1\u01d2\u0005\u001a\u0000\u0000"+ - "\u01d2W\u0001\u0000\u0000\u0000\u01d3\u01d4\u0007\u0007\u0000\u0000\u01d4"+ - "Y\u0001\u0000\u0000\u0000\u01d5\u01d6\u0005\u0005\u0000\u0000\u01d6\u01d7"+ - "\u0003\\.\u0000\u01d7[\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005?\u0000"+ - "\u0000\u01d9\u01da\u0003\u0002\u0001\u0000\u01da\u01db\u0005@\u0000\u0000"+ - "\u01db]\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005\u000e\u0000\u0000\u01dd"+ - "\u01e1\u0005_\u0000\u0000\u01de\u01df\u0005\u000e\u0000\u0000\u01df\u01e1"+ - "\u0005`\u0000\u0000\u01e0\u01dc\u0001\u0000\u0000\u0000\u01e0\u01de\u0001"+ - "\u0000\u0000\u0000\u01e1_\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005\u0003"+ - "\u0000\u0000\u01e3\u01e6\u0005U\u0000\u0000\u01e4\u01e5\u0005S\u0000\u0000"+ - "\u01e5\u01e7\u0003.\u0017\u0000\u01e6\u01e4\u0001\u0000\u0000\u0000\u01e6"+ - "\u01e7\u0001\u0000\u0000\u0000\u01e7\u01f1\u0001\u0000\u0000\u0000\u01e8"+ - "\u01e9\u0005T\u0000\u0000\u01e9\u01ee\u0003b1\u0000\u01ea\u01eb\u0005"+ - "!\u0000\u0000\u01eb\u01ed\u0003b1\u0000\u01ec\u01ea\u0001\u0000\u0000"+ - "\u0000\u01ed\u01f0\u0001\u0000\u0000\u0000\u01ee\u01ec\u0001\u0000\u0000"+ - "\u0000\u01ee\u01ef\u0001\u0000\u0000\u0000\u01ef\u01f2\u0001\u0000\u0000"+ - "\u0000\u01f0\u01ee\u0001\u0000\u0000\u0000\u01f1\u01e8\u0001\u0000\u0000"+ - "\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2a\u0001\u0000\u0000\u0000"+ - "\u01f3\u01f4\u0003.\u0017\u0000\u01f4\u01f5\u0005 \u0000\u0000\u01f5\u01f7"+ - "\u0001\u0000\u0000\u0000\u01f6\u01f3\u0001\u0000\u0000\u0000\u01f6\u01f7"+ - "\u0001\u0000\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9"+ - "\u0003.\u0017\u0000\u01f9c\u0001\u0000\u0000\u00002ov\u0085\u0091\u009a"+ - "\u00a2\u00a6\u00ae\u00b0\u00b5\u00bc\u00c1\u00c8\u00ce\u00d6\u00d8\u00e2"+ - "\u00ec\u00ef\u00fb\u0103\u010b\u010f\u0113\u011b\u0127\u012b\u0131\u013a"+ - "\u0142\u0158\u0163\u016e\u0173\u017e\u0183\u0187\u018f\u0198\u01a1\u01ac"+ - "\u01ba\u01c5\u01c8\u01cd\u01e0\u01e6\u01ee\u01f1\u01f6"; + "\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bd\u0000\b\u0001\u0000;<\u0001"+ + "\u0000=?\u0002\u0000CCHH\u0001\u0000BC\u0002\u0000 ##\u0001\u0000&\'"+ + "\u0002\u0000%%33\u0002\u0000446:\u0217\u0000f\u0001\u0000\u0000\u0000"+ + "\u0002i\u0001\u0000\u0000\u0000\u0004y\u0001\u0000\u0000\u0000\u0006\u0088"+ + "\u0001\u0000\u0000\u0000\b\u008a\u0001\u0000\u0000\u0000\n\u00a9\u0001"+ + "\u0000\u0000\u0000\f\u00c4\u0001\u0000\u0000\u0000\u000e\u00cb\u0001\u0000"+ + "\u0000\u0000\u0010\u00d1\u0001\u0000\u0000\u0000\u0012\u00e5\u0001\u0000"+ + "\u0000\u0000\u0014\u00e7\u0001\u0000\u0000\u0000\u0016\u00f6\u0001\u0000"+ + "\u0000\u0000\u0018\u00f9\u0001\u0000\u0000\u0000\u001a\u0106\u0001\u0000"+ + "\u0000\u0000\u001c\u0108\u0001\u0000\u0000\u0000\u001e\u0116\u0001\u0000"+ + "\u0000\u0000 \u0118\u0001\u0000\u0000\u0000\"\u0121\u0001\u0000\u0000"+ + "\u0000$\u0125\u0001\u0000\u0000\u0000&\u0128\u0001\u0000\u0000\u0000("+ + "\u0130\u0001\u0000\u0000\u0000*\u0136\u0001\u0000\u0000\u0000,\u0138\u0001"+ + "\u0000\u0000\u0000.\u0140\u0001\u0000\u0000\u00000\u0148\u0001\u0000\u0000"+ + "\u00002\u014a\u0001\u0000\u0000\u00004\u0176\u0001\u0000\u0000\u00006"+ + "\u0178\u0001\u0000\u0000\u00008\u017b\u0001\u0000\u0000\u0000:\u0184\u0001"+ + "\u0000\u0000\u0000<\u018c\u0001\u0000\u0000\u0000>\u0195\u0001\u0000\u0000"+ + "\u0000@\u019e\u0001\u0000\u0000\u0000B\u01a7\u0001\u0000\u0000\u0000D"+ + "\u01ab\u0001\u0000\u0000\u0000F\u01b1\u0001\u0000\u0000\u0000H\u01b5\u0001"+ + "\u0000\u0000\u0000J\u01b8\u0001\u0000\u0000\u0000L\u01c0\u0001\u0000\u0000"+ + "\u0000N\u01c4\u0001\u0000\u0000\u0000P\u01c8\u0001\u0000\u0000\u0000R"+ + "\u01cb\u0001\u0000\u0000\u0000T\u01d0\u0001\u0000\u0000\u0000V\u01d4\u0001"+ + "\u0000\u0000\u0000X\u01d6\u0001\u0000\u0000\u0000Z\u01d8\u0001\u0000\u0000"+ + "\u0000\\\u01db\u0001\u0000\u0000\u0000^\u01df\u0001\u0000\u0000\u0000"+ + "`\u01e2\u0001\u0000\u0000\u0000b\u01e5\u0001\u0000\u0000\u0000d\u01f9"+ + "\u0001\u0000\u0000\u0000fg\u0003\u0002\u0001\u0000gh\u0005\u0000\u0000"+ + "\u0001h\u0001\u0001\u0000\u0000\u0000ij\u0006\u0001\uffff\uffff\u0000"+ + "jk\u0003\u0004\u0002\u0000kq\u0001\u0000\u0000\u0000lm\n\u0001\u0000\u0000"+ + "mn\u0005\u001a\u0000\u0000np\u0003\u0006\u0003\u0000ol\u0001\u0000\u0000"+ + "\u0000ps\u0001\u0000\u0000\u0000qo\u0001\u0000\u0000\u0000qr\u0001\u0000"+ + "\u0000\u0000r\u0003\u0001\u0000\u0000\u0000sq\u0001\u0000\u0000\u0000"+ + "tz\u0003Z-\u0000uz\u0003\u001c\u000e\u0000vz\u0003\u0016\u000b\u0000w"+ + "z\u0003^/\u0000xz\u0003`0\u0000yt\u0001\u0000\u0000\u0000yu\u0001\u0000"+ + "\u0000\u0000yv\u0001\u0000\u0000\u0000yw\u0001\u0000\u0000\u0000yx\u0001"+ + "\u0000\u0000\u0000z\u0005\u0001\u0000\u0000\u0000{\u0089\u0003$\u0012"+ + "\u0000|\u0089\u0003(\u0014\u0000}\u0089\u00036\u001b\u0000~\u0089\u0003"+ + "<\u001e\u0000\u007f\u0089\u00038\u001c\u0000\u0080\u0089\u0003&\u0013"+ + "\u0000\u0081\u0089\u0003\b\u0004\u0000\u0082\u0089\u0003>\u001f\u0000"+ + "\u0083\u0089\u0003@ \u0000\u0084\u0089\u0003D\"\u0000\u0085\u0089\u0003"+ + "F#\u0000\u0086\u0089\u0003b1\u0000\u0087\u0089\u0003H$\u0000\u0088{\u0001"+ + "\u0000\u0000\u0000\u0088|\u0001\u0000\u0000\u0000\u0088}\u0001\u0000\u0000"+ + "\u0000\u0088~\u0001\u0000\u0000\u0000\u0088\u007f\u0001\u0000\u0000\u0000"+ + "\u0088\u0080\u0001\u0000\u0000\u0000\u0088\u0081\u0001\u0000\u0000\u0000"+ + "\u0088\u0082\u0001\u0000\u0000\u0000\u0088\u0083\u0001\u0000\u0000\u0000"+ + "\u0088\u0084\u0001\u0000\u0000\u0000\u0088\u0085\u0001\u0000\u0000\u0000"+ + "\u0088\u0086\u0001\u0000\u0000\u0000\u0088\u0087\u0001\u0000\u0000\u0000"+ + "\u0089\u0007\u0001\u0000\u0000\u0000\u008a\u008b\u0005\u0012\u0000\u0000"+ + "\u008b\u008c\u0003\n\u0005\u0000\u008c\t\u0001\u0000\u0000\u0000\u008d"+ + "\u008e\u0006\u0005\uffff\uffff\u0000\u008e\u008f\u0005,\u0000\u0000\u008f"+ + "\u00aa\u0003\n\u0005\u0007\u0090\u00aa\u0003\u000e\u0007\u0000\u0091\u00aa"+ + "\u0003\f\u0006\u0000\u0092\u0094\u0003\u000e\u0007\u0000\u0093\u0095\u0005"+ + ",\u0000\u0000\u0094\u0093\u0001\u0000\u0000\u0000\u0094\u0095\u0001\u0000"+ + "\u0000\u0000\u0095\u0096\u0001\u0000\u0000\u0000\u0096\u0097\u0005)\u0000"+ + "\u0000\u0097\u0098\u0005(\u0000\u0000\u0098\u009d\u0003\u000e\u0007\u0000"+ + "\u0099\u009a\u0005\"\u0000\u0000\u009a\u009c\u0003\u000e\u0007\u0000\u009b"+ + "\u0099\u0001\u0000\u0000\u0000\u009c\u009f\u0001\u0000\u0000\u0000\u009d"+ + "\u009b\u0001\u0000\u0000\u0000\u009d\u009e\u0001\u0000\u0000\u0000\u009e"+ + "\u00a0\u0001\u0000\u0000\u0000\u009f\u009d\u0001\u0000\u0000\u0000\u00a0"+ + "\u00a1\u00052\u0000\u0000\u00a1\u00aa\u0001\u0000\u0000\u0000\u00a2\u00a3"+ + "\u0003\u000e\u0007\u0000\u00a3\u00a5\u0005*\u0000\u0000\u00a4\u00a6\u0005"+ + ",\u0000\u0000\u00a5\u00a4\u0001\u0000\u0000\u0000\u00a5\u00a6\u0001\u0000"+ + "\u0000\u0000\u00a6\u00a7\u0001\u0000\u0000\u0000\u00a7\u00a8\u0005-\u0000"+ + "\u0000\u00a8\u00aa\u0001\u0000\u0000\u0000\u00a9\u008d\u0001\u0000\u0000"+ + "\u0000\u00a9\u0090\u0001\u0000\u0000\u0000\u00a9\u0091\u0001\u0000\u0000"+ + "\u0000\u00a9\u0092\u0001\u0000\u0000\u0000\u00a9\u00a2\u0001\u0000\u0000"+ + "\u0000\u00aa\u00b3\u0001\u0000\u0000\u0000\u00ab\u00ac\n\u0004\u0000\u0000"+ + "\u00ac\u00ad\u0005\u001f\u0000\u0000\u00ad\u00b2\u0003\n\u0005\u0005\u00ae"+ + "\u00af\n\u0003\u0000\u0000\u00af\u00b0\u0005/\u0000\u0000\u00b0\u00b2"+ + "\u0003\n\u0005\u0004\u00b1\u00ab\u0001\u0000\u0000\u0000\u00b1\u00ae\u0001"+ + "\u0000\u0000\u0000\u00b2\u00b5\u0001\u0000\u0000\u0000\u00b3\u00b1\u0001"+ + "\u0000\u0000\u0000\u00b3\u00b4\u0001\u0000\u0000\u0000\u00b4\u000b\u0001"+ + "\u0000\u0000\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b8\u0003"+ + "\u000e\u0007\u0000\u00b7\u00b9\u0005,\u0000\u0000\u00b8\u00b7\u0001\u0000"+ + "\u0000\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000"+ + "\u0000\u0000\u00ba\u00bb\u0005+\u0000\u0000\u00bb\u00bc\u0003V+\u0000"+ + "\u00bc\u00c5\u0001\u0000\u0000\u0000\u00bd\u00bf\u0003\u000e\u0007\u0000"+ + "\u00be\u00c0\u0005,\u0000\u0000\u00bf\u00be\u0001\u0000\u0000\u0000\u00bf"+ + "\u00c0\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000\u0000\u00c1"+ + "\u00c2\u00051\u0000\u0000\u00c2\u00c3\u0003V+\u0000\u00c3\u00c5\u0001"+ + "\u0000\u0000\u0000\u00c4\u00b6\u0001\u0000\u0000\u0000\u00c4\u00bd\u0001"+ + "\u0000\u0000\u0000\u00c5\r\u0001\u0000\u0000\u0000\u00c6\u00cc\u0003\u0010"+ + "\b\u0000\u00c7\u00c8\u0003\u0010\b\u0000\u00c8\u00c9\u0003X,\u0000\u00c9"+ + "\u00ca\u0003\u0010\b\u0000\u00ca\u00cc\u0001\u0000\u0000\u0000\u00cb\u00c6"+ + "\u0001\u0000\u0000\u0000\u00cb\u00c7\u0001\u0000\u0000\u0000\u00cc\u000f"+ + "\u0001\u0000\u0000\u0000\u00cd\u00ce\u0006\b\uffff\uffff\u0000\u00ce\u00d2"+ + "\u0003\u0012\t\u0000\u00cf\u00d0\u0007\u0000\u0000\u0000\u00d0\u00d2\u0003"+ + "\u0010\b\u0003\u00d1\u00cd\u0001\u0000\u0000\u0000\u00d1\u00cf\u0001\u0000"+ + "\u0000\u0000\u00d2\u00db\u0001\u0000\u0000\u0000\u00d3\u00d4\n\u0002\u0000"+ + "\u0000\u00d4\u00d5\u0007\u0001\u0000\u0000\u00d5\u00da\u0003\u0010\b\u0003"+ + "\u00d6\u00d7\n\u0001\u0000\u0000\u00d7\u00d8\u0007\u0000\u0000\u0000\u00d8"+ + "\u00da\u0003\u0010\b\u0002\u00d9\u00d3\u0001\u0000\u0000\u0000\u00d9\u00d6"+ + "\u0001\u0000\u0000\u0000\u00da\u00dd\u0001\u0000\u0000\u0000\u00db\u00d9"+ + "\u0001\u0000\u0000\u0000\u00db\u00dc\u0001\u0000\u0000\u0000\u00dc\u0011"+ + "\u0001\u0000\u0000\u0000\u00dd\u00db\u0001\u0000\u0000\u0000\u00de\u00e6"+ + "\u00034\u001a\u0000\u00df\u00e6\u0003,\u0016\u0000\u00e0\u00e6\u0003\u0014"+ + "\n\u0000\u00e1\u00e2\u0005(\u0000\u0000\u00e2\u00e3\u0003\n\u0005\u0000"+ + "\u00e3\u00e4\u00052\u0000\u0000\u00e4\u00e6\u0001\u0000\u0000\u0000\u00e5"+ + "\u00de\u0001\u0000\u0000\u0000\u00e5\u00df\u0001\u0000\u0000\u0000\u00e5"+ + "\u00e0\u0001\u0000\u0000\u0000\u00e5\u00e1\u0001\u0000\u0000\u0000\u00e6"+ + "\u0013\u0001\u0000\u0000\u0000\u00e7\u00e8\u00030\u0018\u0000\u00e8\u00f2"+ + "\u0005(\u0000\u0000\u00e9\u00f3\u0005=\u0000\u0000\u00ea\u00ef\u0003\n"+ + "\u0005\u0000\u00eb\u00ec\u0005\"\u0000\u0000\u00ec\u00ee\u0003\n\u0005"+ + "\u0000\u00ed\u00eb\u0001\u0000\u0000\u0000\u00ee\u00f1\u0001\u0000\u0000"+ + "\u0000\u00ef\u00ed\u0001\u0000\u0000\u0000\u00ef\u00f0\u0001\u0000\u0000"+ + "\u0000\u00f0\u00f3\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000"+ + "\u0000\u00f2\u00e9\u0001\u0000\u0000\u0000\u00f2\u00ea\u0001\u0000\u0000"+ + "\u0000\u00f2\u00f3\u0001\u0000\u0000\u0000\u00f3\u00f4\u0001\u0000\u0000"+ + "\u0000\u00f4\u00f5\u00052\u0000\u0000\u00f5\u0015\u0001\u0000\u0000\u0000"+ + "\u00f6\u00f7\u0005\u000e\u0000\u0000\u00f7\u00f8\u0003\u0018\f\u0000\u00f8"+ + "\u0017\u0001\u0000\u0000\u0000\u00f9\u00fe\u0003\u001a\r\u0000\u00fa\u00fb"+ + "\u0005\"\u0000\u0000\u00fb\u00fd\u0003\u001a\r\u0000\u00fc\u00fa\u0001"+ + "\u0000\u0000\u0000\u00fd\u0100\u0001\u0000\u0000\u0000\u00fe\u00fc\u0001"+ + "\u0000\u0000\u0000\u00fe\u00ff\u0001\u0000\u0000\u0000\u00ff\u0019\u0001"+ + "\u0000\u0000\u0000\u0100\u00fe\u0001\u0000\u0000\u0000\u0101\u0107\u0003"+ + "\n\u0005\u0000\u0102\u0103\u0003,\u0016\u0000\u0103\u0104\u0005!\u0000"+ + "\u0000\u0104\u0105\u0003\n\u0005\u0000\u0105\u0107\u0001\u0000\u0000\u0000"+ + "\u0106\u0101\u0001\u0000\u0000\u0000\u0106\u0102\u0001\u0000\u0000\u0000"+ + "\u0107\u001b\u0001\u0000\u0000\u0000\u0108\u0109\u0005\u0006\u0000\u0000"+ + "\u0109\u010e\u0003*\u0015\u0000\u010a\u010b\u0005\"\u0000\u0000\u010b"+ + "\u010d\u0003*\u0015\u0000\u010c\u010a\u0001\u0000\u0000\u0000\u010d\u0110"+ + "\u0001\u0000\u0000\u0000\u010e\u010c\u0001\u0000\u0000\u0000\u010e\u010f"+ + "\u0001\u0000\u0000\u0000\u010f\u0112\u0001\u0000\u0000\u0000\u0110\u010e"+ + "\u0001\u0000\u0000\u0000\u0111\u0113\u0003\u001e\u000f\u0000\u0112\u0111"+ + "\u0001\u0000\u0000\u0000\u0112\u0113\u0001\u0000\u0000\u0000\u0113\u001d"+ + "\u0001\u0000\u0000\u0000\u0114\u0117\u0003 \u0010\u0000\u0115\u0117\u0003"+ + "\"\u0011\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0115\u0001\u0000"+ + "\u0000\u0000\u0117\u001f\u0001\u0000\u0000\u0000\u0118\u0119\u0005G\u0000"+ + "\u0000\u0119\u011e\u0003*\u0015\u0000\u011a\u011b\u0005\"\u0000\u0000"+ + "\u011b\u011d\u0003*\u0015\u0000\u011c\u011a\u0001\u0000\u0000\u0000\u011d"+ + "\u0120\u0001\u0000\u0000\u0000\u011e\u011c\u0001\u0000\u0000\u0000\u011e"+ + "\u011f\u0001\u0000\u0000\u0000\u011f!\u0001\u0000\u0000\u0000\u0120\u011e"+ + "\u0001\u0000\u0000\u0000\u0121\u0122\u0005@\u0000\u0000\u0122\u0123\u0003"+ + " \u0010\u0000\u0123\u0124\u0005A\u0000\u0000\u0124#\u0001\u0000\u0000"+ + "\u0000\u0125\u0126\u0005\u0004\u0000\u0000\u0126\u0127\u0003\u0018\f\u0000"+ + "\u0127%\u0001\u0000\u0000\u0000\u0128\u012a\u0005\u0011\u0000\u0000\u0129"+ + "\u012b\u0003\u0018\f\u0000\u012a\u0129\u0001\u0000\u0000\u0000\u012a\u012b"+ + "\u0001\u0000\u0000\u0000\u012b\u012e\u0001\u0000\u0000\u0000\u012c\u012d"+ + "\u0005\u001e\u0000\u0000\u012d\u012f\u0003\u0018\f\u0000\u012e\u012c\u0001"+ + "\u0000\u0000\u0000\u012e\u012f\u0001\u0000\u0000\u0000\u012f\'\u0001\u0000"+ + "\u0000\u0000\u0130\u0131\u0005\b\u0000\u0000\u0131\u0134\u0003\u0018\f"+ + "\u0000\u0132\u0133\u0005\u001e\u0000\u0000\u0133\u0135\u0003\u0018\f\u0000"+ + "\u0134\u0132\u0001\u0000\u0000\u0000\u0134\u0135\u0001\u0000\u0000\u0000"+ + "\u0135)\u0001\u0000\u0000\u0000\u0136\u0137\u0007\u0002\u0000\u0000\u0137"+ + "+\u0001\u0000\u0000\u0000\u0138\u013d\u00030\u0018\u0000\u0139\u013a\u0005"+ + "$\u0000\u0000\u013a\u013c\u00030\u0018\u0000\u013b\u0139\u0001\u0000\u0000"+ + "\u0000\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000"+ + "\u0000\u013d\u013e\u0001\u0000\u0000\u0000\u013e-\u0001\u0000\u0000\u0000"+ + "\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0145\u00032\u0019\u0000\u0141"+ + "\u0142\u0005$\u0000\u0000\u0142\u0144\u00032\u0019\u0000\u0143\u0141\u0001"+ + "\u0000\u0000\u0000\u0144\u0147\u0001\u0000\u0000\u0000\u0145\u0143\u0001"+ + "\u0000\u0000\u0000\u0145\u0146\u0001\u0000\u0000\u0000\u0146/\u0001\u0000"+ + "\u0000\u0000\u0147\u0145\u0001\u0000\u0000\u0000\u0148\u0149\u0007\u0003"+ + "\u0000\u0000\u01491\u0001\u0000\u0000\u0000\u014a\u014b\u0005L\u0000\u0000"+ + "\u014b3\u0001\u0000\u0000\u0000\u014c\u0177\u0005-\u0000\u0000\u014d\u014e"+ + "\u0003T*\u0000\u014e\u014f\u0005B\u0000\u0000\u014f\u0177\u0001\u0000"+ + "\u0000\u0000\u0150\u0177\u0003R)\u0000\u0151\u0177\u0003T*\u0000\u0152"+ + "\u0177\u0003N\'\u0000\u0153\u0177\u00050\u0000\u0000\u0154\u0177\u0003"+ + "V+\u0000\u0155\u0156\u0005@\u0000\u0000\u0156\u015b\u0003P(\u0000\u0157"+ + "\u0158\u0005\"\u0000\u0000\u0158\u015a\u0003P(\u0000\u0159\u0157\u0001"+ + "\u0000\u0000\u0000\u015a\u015d\u0001\u0000\u0000\u0000\u015b\u0159\u0001"+ + "\u0000\u0000\u0000\u015b\u015c\u0001\u0000\u0000\u0000\u015c\u015e\u0001"+ + "\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000\u0000\u015e\u015f\u0005"+ + "A\u0000\u0000\u015f\u0177\u0001\u0000\u0000\u0000\u0160\u0161\u0005@\u0000"+ + "\u0000\u0161\u0166\u0003N\'\u0000\u0162\u0163\u0005\"\u0000\u0000\u0163"+ + "\u0165\u0003N\'\u0000\u0164\u0162\u0001\u0000\u0000\u0000\u0165\u0168"+ + "\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0166\u0167"+ + "\u0001\u0000\u0000\u0000\u0167\u0169\u0001\u0000\u0000\u0000\u0168\u0166"+ + "\u0001\u0000\u0000\u0000\u0169\u016a\u0005A\u0000\u0000\u016a\u0177\u0001"+ + "\u0000\u0000\u0000\u016b\u016c\u0005@\u0000\u0000\u016c\u0171\u0003V+"+ + "\u0000\u016d\u016e\u0005\"\u0000\u0000\u016e\u0170\u0003V+\u0000\u016f"+ + "\u016d\u0001\u0000\u0000\u0000\u0170\u0173\u0001\u0000\u0000\u0000\u0171"+ + "\u016f\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000\u0000\u0000\u0172"+ + "\u0174\u0001\u0000\u0000\u0000\u0173\u0171\u0001\u0000\u0000\u0000\u0174"+ + "\u0175\u0005A\u0000\u0000\u0175\u0177\u0001\u0000\u0000\u0000\u0176\u014c"+ + "\u0001\u0000\u0000\u0000\u0176\u014d\u0001\u0000\u0000\u0000\u0176\u0150"+ + "\u0001\u0000\u0000\u0000\u0176\u0151\u0001\u0000\u0000\u0000\u0176\u0152"+ + "\u0001\u0000\u0000\u0000\u0176\u0153\u0001\u0000\u0000\u0000\u0176\u0154"+ + "\u0001\u0000\u0000\u0000\u0176\u0155\u0001\u0000\u0000\u0000\u0176\u0160"+ + "\u0001\u0000\u0000\u0000\u0176\u016b\u0001\u0000\u0000\u0000\u01775\u0001"+ + "\u0000\u0000\u0000\u0178\u0179\u0005\n\u0000\u0000\u0179\u017a\u0005\u001c"+ + "\u0000\u0000\u017a7\u0001\u0000\u0000\u0000\u017b\u017c\u0005\u0010\u0000"+ + "\u0000\u017c\u0181\u0003:\u001d\u0000\u017d\u017e\u0005\"\u0000\u0000"+ + "\u017e\u0180\u0003:\u001d\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u0180"+ + "\u0183\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0181"+ + "\u0182\u0001\u0000\u0000\u0000\u01829\u0001\u0000\u0000\u0000\u0183\u0181"+ + "\u0001\u0000\u0000\u0000\u0184\u0186\u0003\n\u0005\u0000\u0185\u0187\u0007"+ + "\u0004\u0000\u0000\u0186\u0185\u0001\u0000\u0000\u0000\u0186\u0187\u0001"+ + "\u0000\u0000\u0000\u0187\u018a\u0001\u0000\u0000\u0000\u0188\u0189\u0005"+ + ".\u0000\u0000\u0189\u018b\u0007\u0005\u0000\u0000\u018a\u0188\u0001\u0000"+ + "\u0000\u0000\u018a\u018b\u0001\u0000\u0000\u0000\u018b;\u0001\u0000\u0000"+ + "\u0000\u018c\u018d\u0005\t\u0000\u0000\u018d\u0192\u0003.\u0017\u0000"+ + "\u018e\u018f\u0005\"\u0000\u0000\u018f\u0191\u0003.\u0017\u0000\u0190"+ + "\u018e\u0001\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192"+ + "\u0190\u0001\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193"+ + "=\u0001\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0196"+ + "\u0005\u0002\u0000\u0000\u0196\u019b\u0003.\u0017\u0000\u0197\u0198\u0005"+ + "\"\u0000\u0000\u0198\u019a\u0003.\u0017\u0000\u0199\u0197\u0001\u0000"+ + "\u0000\u0000\u019a\u019d\u0001\u0000\u0000\u0000\u019b\u0199\u0001\u0000"+ + "\u0000\u0000\u019b\u019c\u0001\u0000\u0000\u0000\u019c?\u0001\u0000\u0000"+ + "\u0000\u019d\u019b\u0001\u0000\u0000\u0000\u019e\u019f\u0005\r\u0000\u0000"+ + "\u019f\u01a4\u0003B!\u0000\u01a0\u01a1\u0005\"\u0000\u0000\u01a1\u01a3"+ + "\u0003B!\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a3\u01a6\u0001\u0000"+ + "\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000\u01a4\u01a5\u0001\u0000"+ + "\u0000\u0000\u01a5A\u0001\u0000\u0000\u0000\u01a6\u01a4\u0001\u0000\u0000"+ + "\u0000\u01a7\u01a8\u0003.\u0017\u0000\u01a8\u01a9\u0005P\u0000\u0000\u01a9"+ + "\u01aa\u0003.\u0017\u0000\u01aaC\u0001\u0000\u0000\u0000\u01ab\u01ac\u0005"+ + "\u0001\u0000\u0000\u01ac\u01ad\u0003\u0012\t\u0000\u01ad\u01af\u0003V"+ + "+\u0000\u01ae\u01b0\u0003J%\u0000\u01af\u01ae\u0001\u0000\u0000\u0000"+ + "\u01af\u01b0\u0001\u0000\u0000\u0000\u01b0E\u0001\u0000\u0000\u0000\u01b1"+ + "\u01b2\u0005\u0007\u0000\u0000\u01b2\u01b3\u0003\u0012\t\u0000\u01b3\u01b4"+ + "\u0003V+\u0000\u01b4G\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005\f\u0000"+ + "\u0000\u01b6\u01b7\u0003,\u0016\u0000\u01b7I\u0001\u0000\u0000\u0000\u01b8"+ + "\u01bd\u0003L&\u0000\u01b9\u01ba\u0005\"\u0000\u0000\u01ba\u01bc\u0003"+ + "L&\u0000\u01bb\u01b9\u0001\u0000\u0000\u0000\u01bc\u01bf\u0001\u0000\u0000"+ + "\u0000\u01bd\u01bb\u0001\u0000\u0000\u0000\u01bd\u01be\u0001\u0000\u0000"+ + "\u0000\u01beK\u0001\u0000\u0000\u0000\u01bf\u01bd\u0001\u0000\u0000\u0000"+ + "\u01c0\u01c1\u00030\u0018\u0000\u01c1\u01c2\u0005!\u0000\u0000\u01c2\u01c3"+ + "\u00034\u001a\u0000\u01c3M\u0001\u0000\u0000\u0000\u01c4\u01c5\u0007\u0006"+ + "\u0000\u0000\u01c5O\u0001\u0000\u0000\u0000\u01c6\u01c9\u0003R)\u0000"+ + "\u01c7\u01c9\u0003T*\u0000\u01c8\u01c6\u0001\u0000\u0000\u0000\u01c8\u01c7"+ + "\u0001\u0000\u0000\u0000\u01c9Q\u0001\u0000\u0000\u0000\u01ca\u01cc\u0007"+ + "\u0000\u0000\u0000\u01cb\u01ca\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001"+ + "\u0000\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005"+ + "\u001d\u0000\u0000\u01ceS\u0001\u0000\u0000\u0000\u01cf\u01d1\u0007\u0000"+ + "\u0000\u0000\u01d0\u01cf\u0001\u0000\u0000\u0000\u01d0\u01d1\u0001\u0000"+ + "\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005\u001c"+ + "\u0000\u0000\u01d3U\u0001\u0000\u0000\u0000\u01d4\u01d5\u0005\u001b\u0000"+ + "\u0000\u01d5W\u0001\u0000\u0000\u0000\u01d6\u01d7\u0007\u0007\u0000\u0000"+ + "\u01d7Y\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005\u0005\u0000\u0000\u01d9"+ + "\u01da\u0003\\.\u0000\u01da[\u0001\u0000\u0000\u0000\u01db\u01dc\u0005"+ + "@\u0000\u0000\u01dc\u01dd\u0003\u0002\u0001\u0000\u01dd\u01de\u0005A\u0000"+ + "\u0000\u01de]\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u000f\u0000\u0000"+ + "\u01e0\u01e1\u0005`\u0000\u0000\u01e1_\u0001\u0000\u0000\u0000\u01e2\u01e3"+ + "\u0005\u000b\u0000\u0000\u01e3\u01e4\u0005d\u0000\u0000\u01e4a\u0001\u0000"+ + "\u0000\u0000\u01e5\u01e6\u0005\u0003\u0000\u0000\u01e6\u01e9\u0005V\u0000"+ + "\u0000\u01e7\u01e8\u0005T\u0000\u0000\u01e8\u01ea\u0003.\u0017\u0000\u01e9"+ + "\u01e7\u0001\u0000\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea"+ + "\u01f4\u0001\u0000\u0000\u0000\u01eb\u01ec\u0005U\u0000\u0000\u01ec\u01f1"+ + "\u0003d2\u0000\u01ed\u01ee\u0005\"\u0000\u0000\u01ee\u01f0\u0003d2\u0000"+ + "\u01ef\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001\u0000\u0000\u0000"+ + "\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000"+ + "\u01f2\u01f5\u0001\u0000\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000"+ + "\u01f4\u01eb\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000"+ + "\u01f5c\u0001\u0000\u0000\u0000\u01f6\u01f7\u0003.\u0017\u0000\u01f7\u01f8"+ + "\u0005!\u0000\u0000\u01f8\u01fa\u0001\u0000\u0000\u0000\u01f9\u01f6\u0001"+ + "\u0000\u0000\u0000\u01f9\u01fa\u0001\u0000\u0000\u0000\u01fa\u01fb\u0001"+ + "\u0000\u0000\u0000\u01fb\u01fc\u0003.\u0017\u0000\u01fce\u0001\u0000\u0000"+ + "\u00001qy\u0088\u0094\u009d\u00a5\u00a9\u00b1\u00b3\u00b8\u00bf\u00c4"+ + "\u00cb\u00d1\u00d9\u00db\u00e5\u00ef\u00f2\u00fe\u0106\u010e\u0112\u0116"+ + "\u011e\u012a\u012e\u0134\u013d\u0145\u015b\u0166\u0171\u0176\u0181\u0186"+ + "\u018a\u0192\u019b\u01a4\u01af\u01bd\u01c8\u01cb\u01d0\u01e9\u01f1\u01f4"+ + "\u01f9"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index ffbcfc57a90ea..6737e782025b2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -845,13 +845,13 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { * *

    The default implementation does nothing.

    */ - @Override public void enterShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { } + @Override public void enterMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { } /** * {@inheritDoc} * *

    The default implementation does nothing.

    */ - @Override public void exitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { } + @Override public void exitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index ca876efb3e7da..95502053521d6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -501,7 +501,7 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im *

    The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

    */ - @Override public T visitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { return visitChildren(ctx); } + @Override public T visitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 75656c1df76dc..433eba1a14999 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -754,17 +754,17 @@ public interface EsqlBaseParserListener extends ParseTreeListener { */ void exitShowInfo(EsqlBaseParser.ShowInfoContext ctx); /** - * Enter a parse tree produced by the {@code showFunctions} - * labeled alternative in {@link EsqlBaseParser#showCommand}. + * Enter a parse tree produced by the {@code metaFunctions} + * labeled alternative in {@link EsqlBaseParser#metaCommand}. * @param ctx the parse tree */ - void enterShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx); + void enterMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx); /** - * Exit a parse tree produced by the {@code showFunctions} - * labeled alternative in {@link EsqlBaseParser#showCommand}. + * Exit a parse tree produced by the {@code metaFunctions} + * labeled alternative in {@link EsqlBaseParser#metaCommand}. * @param ctx the parse tree */ - void exitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx); + void exitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#enrichCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 0c3cc791f7fe2..323eb46a42fda 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -453,12 +453,12 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { */ T visitShowInfo(EsqlBaseParser.ShowInfoContext ctx); /** - * Visit a parse tree produced by the {@code showFunctions} - * labeled alternative in {@link EsqlBaseParser#showCommand}. + * Visit a parse tree produced by the {@code metaFunctions} + * labeled alternative in {@link EsqlBaseParser#metaCommand}. * @param ctx the parse tree * @return the visitor result */ - T visitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx); + T visitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#enrichCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index d58e25391aa26..223d318a64324 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -23,11 +23,11 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.WildcardLike; import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; @@ -73,7 +73,9 @@ import java.util.function.BiFunction; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.parseTemporalAmout; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToIntegral; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.DATE_PERIOD; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.TIME_DURATION; import static org.elasticsearch.xpack.ql.parser.ParserUtils.source; @@ -124,11 +126,11 @@ public Literal visitIntegerValue(EsqlBaseParser.IntegerValueContext ctx) { Number number; try { - number = StringUtils.parseIntegral(text); + number = stringToIntegral(text); } catch (InvalidArgumentException siae) { // if it's too large, then quietly try to parse as a float instead try { - return new Literal(source, StringUtils.parseDouble(text), DataTypes.DOUBLE); + return new Literal(source, EsqlDataTypeConverter.stringToDouble(text), DataTypes.DOUBLE); } catch (InvalidArgumentException ignored) {} throw new ParsingException(source, siae.getMessage()); @@ -161,7 +163,9 @@ public Object visitNumericArrayLiteral(EsqlBaseParser.NumericArrayLiteralContext source, mapNumbers( numbers, - (no, dt) -> dt == DataTypes.UNSIGNED_LONG ? no.longValue() : asLongUnsigned(BigInteger.valueOf(no.longValue())) + (no, dt) -> dt == DataTypes.UNSIGNED_LONG + ? no.longValue() + : bigIntegerToUnsignedLong(BigInteger.valueOf(no.longValue())) ), DataTypes.UNSIGNED_LONG ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 8f4f942551002..64ce1633e8772 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -28,7 +28,7 @@ import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; -import org.elasticsearch.xpack.esql.plan.logical.show.ShowFunctions; +import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -64,6 +64,7 @@ import static org.elasticsearch.common.logging.HeaderWarning.addWarning; import static org.elasticsearch.xpack.esql.plan.logical.Enrich.Mode; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; import static org.elasticsearch.xpack.ql.parser.ParserUtils.source; import static org.elasticsearch.xpack.ql.parser.ParserUtils.typedParsing; import static org.elasticsearch.xpack.ql.parser.ParserUtils.visitList; @@ -250,7 +251,7 @@ public PlanFactory visitWhereCommand(EsqlBaseParser.WhereCommandContext ctx) { @Override public PlanFactory visitLimitCommand(EsqlBaseParser.LimitCommandContext ctx) { Source source = source(ctx); - int limit = Integer.parseInt(ctx.INTEGER_LITERAL().getText()); + int limit = stringToInt(ctx.INTEGER_LITERAL().getText()); return input -> new Limit(source, new Literal(source, limit, DataTypes.INTEGER), input); } @@ -315,8 +316,8 @@ public LogicalPlan visitShowInfo(EsqlBaseParser.ShowInfoContext ctx) { } @Override - public LogicalPlan visitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { - return new ShowFunctions(source(ctx)); + public LogicalPlan visitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { + return new MetaFunctions(source(ctx)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java similarity index 86% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java index 5a4b90c45f23d..34b6fd1a31b13 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.plan.logical.show; +package org.elasticsearch.xpack.esql.plan.logical.meta; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; @@ -28,11 +28,11 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; -public class ShowFunctions extends LeafPlan { +public class MetaFunctions extends LeafPlan { private final List attributes; - public ShowFunctions(Source source) { + public MetaFunctions(Source source) { super(source); attributes = new ArrayList<>(); @@ -82,11 +82,16 @@ private Object collect(EsqlFunctionRegistry.FunctionDescription signature, Funct return result; } + List args = signature.args(); List result = signature.args().stream().map(x).collect(Collectors.toList()); - if (result.isEmpty() == false && result.get(0) instanceof String[]) { - List newResult = new ArrayList<>(); - for (Object item : result) { - newResult.add(withPipes((String[]) item)); + boolean withPipes = result.get(0) instanceof String[]; + if (result.isEmpty() == false) { + List newResult = new ArrayList<>(); + for (int i = 0; i < result.size(); i++) { + if (signature.variadic() && args.get(i).optional()) { + continue; + } + newResult.add(withPipes ? withPipes((String[]) result.get(i)) : result.get(i)); } return newResult; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index f5cee225b1b13..a95d846133c45 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -11,17 +11,16 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.CountDistinct; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Median; import org.elasticsearch.xpack.esql.expression.function.aggregate.MedianAbsoluteDeviation; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.expression.function.aggregate.NumericAggregate; import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.Expression; @@ -42,7 +41,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -54,17 +52,17 @@ public class AggregateMapper { static final List NUMERIC = List.of("Int", "Long", "Double"); static final List SPATIAL = List.of("GeoPoint", "CartesianPoint"); - /** List of all ESQL agg functions. */ + /** List of all mappable ESQL agg functions (excludes surrogates like AVG = SUM/COUNT). */ static final List> AGG_FUNCTIONS = List.of( Count.class, CountDistinct.class, Max.class, - Median.class, MedianAbsoluteDeviation.class, Min.class, Percentile.class, SpatialCentroid.class, - Sum.class + Sum.class, + Values.class ); /** Record of agg Class, type, and grouping (or non-grouping). */ @@ -77,7 +75,7 @@ record AggDef(Class aggClazz, String type, String extra, boolean grouping) {} private final HashMap> cache = new HashMap<>(); AggregateMapper() { - this(AGG_FUNCTIONS.stream().filter(Predicate.not(SurrogateExpression.class::isAssignableFrom)).toList()); + this(AGG_FUNCTIONS); } AggregateMapper(List> aggregateFunctionClasses) { @@ -148,6 +146,9 @@ private static Stream, Tuple>> typeAndNames(Class } else if (SpatialAggregateFunction.class.isAssignableFrom(clazz)) { types = SPATIAL; extraConfigs = List.of("SourceValues", "DocValues"); + } else if (Values.class.isAssignableFrom(clazz)) { + // TODO can't we figure this out from the function itself? + types = List.of("Int", "Long", "Double", "Boolean", "BytesRef"); } else { assert clazz == CountDistinct.class : "Expected CountDistinct, got: " + clazz; types = Stream.concat(NUMERIC.stream(), Stream.of("Boolean", "BytesRef")).toList(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 33f8b4a5eddef..0afa6179fd3c8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -7,10 +7,11 @@ package org.elasticsearch.xpack.esql.planner; +import org.apache.lucene.document.ShapeField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.geometry.Geometry; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; @@ -19,7 +20,10 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NullEquals; +import org.elasticsearch.xpack.esql.querydsl.query.SpatialRelatesQuery; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -50,7 +54,12 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Set; +import java.util.function.Supplier; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.HOUR_MINUTE_SECOND; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; import static org.elasticsearch.xpack.ql.type.DataTypes.IP; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; @@ -61,6 +70,7 @@ public final class EsqlExpressionTranslators { public static final List> QUERY_TRANSLATORS = List.of( new EqualsIgnoreCaseTranslator(), new BinaryComparisons(), + new SpatialRelatesTranslator(), new ExpressionTranslators.Ranges(), new ExpressionTranslators.BinaryLogic(), new ExpressionTranslators.IsNulls(), @@ -170,26 +180,26 @@ static Query translate(BinaryComparison bc, TranslatorHandler handler) { if (value instanceof ZonedDateTime || value instanceof OffsetTime) { DateFormatter formatter; if (value instanceof ZonedDateTime) { - formatter = DateFormatter.forPattern("strict_date_optional_time_nanos"); + formatter = DEFAULT_DATE_TIME_FORMATTER; // RangeQueryBuilder accepts an Object as its parameter, but it will call .toString() on the ZonedDateTime instance // which can have a slightly different format depending on the ZoneId used to create the ZonedDateTime // Since RangeQueryBuilder can handle date as String as well, we'll format it as String and provide the format as well. value = formatter.format((ZonedDateTime) value); } else { - formatter = DateFormatter.forPattern("strict_hour_minute_second_fraction"); + formatter = HOUR_MINUTE_SECOND; value = formatter.format((OffsetTime) value); } format = formatter.pattern(); isDateLiteralComparison = true; } else if (attribute.dataType() == IP && value instanceof BytesRef bytesRef) { - value = DocValueFormat.IP.format(bytesRef); + value = ipToString(bytesRef); } else if (attribute.dataType() == VERSION) { // VersionStringFieldMapper#indexedValueForSearch() only accepts as input String or BytesRef with the String (i.e. not // encoded) representation of the version as it'll do the encoding itself. if (value instanceof BytesRef bytesRef) { - value = new Version(bytesRef).toString(); + value = versionToString(bytesRef); } else if (value instanceof Version version) { - value = version.toString(); + value = versionToString(version); } } else if (attribute.dataType() == UNSIGNED_LONG && value instanceof Long ul) { value = unsignedLongAsNumber(ul); @@ -348,4 +358,63 @@ public static Query doTranslate(ScalarFunction f, TranslatorHandler handler) { return ExpressionTranslators.Scalars.doTranslate(f, handler); } } + + public static class SpatialRelatesTranslator extends ExpressionTranslator { + + @Override + protected Query asQuery(SpatialRelatesFunction bc, TranslatorHandler handler) { + return doTranslate(bc, handler); + } + + public static void checkSpatialRelatesFunction(Expression constantExpression, ShapeField.QueryRelation queryRelation) { + Check.isTrue( + constantExpression.foldable(), + "Line {}:{}: Comparisons against fields are not (currently) supported; offender [{}] in [ST_{}]", + constantExpression.sourceLocation().getLineNumber(), + constantExpression.sourceLocation().getColumnNumber(), + Expressions.name(constantExpression), + queryRelation + ); + } + + /** + * We should normally be using the real `wrapFunctionQuery` above, so we get the benefits of `SingleValueQuery`, + * but at the moment `SingleValueQuery` makes use of `SortDocValues` to determine if the results are single or multi-valued, + * and LeafShapeFieldData does not support `SortedBinaryDocValues getBytesValues()`. + * Skipping this code path entirely is a temporary workaround while separate work is being done to simplify `SingleValueQuery` + * to rather rely on a new method on `LeafFieldData`. This is both for the benefit of the spatial queries, as well as an + * improvement overall. + * TODO: Remove this method and call the parent method once the SingleValueQuery improvements have been made + */ + public static Query wrapFunctionQuery(Expression field, Supplier querySupplier) { + return ExpressionTranslator.wrapIfNested(querySupplier.get(), field); + } + + public static Query doTranslate(SpatialRelatesFunction bc, TranslatorHandler handler) { + if (bc.left().foldable()) { + checkSpatialRelatesFunction(bc.left(), bc.queryRelation()); + return wrapFunctionQuery(bc.right(), () -> translate(bc, handler, bc.right(), bc.left())); + } else { + checkSpatialRelatesFunction(bc.right(), bc.queryRelation()); + return wrapFunctionQuery(bc.left(), () -> translate(bc, handler, bc.left(), bc.right())); + } + } + + static Query translate( + SpatialRelatesFunction bc, + TranslatorHandler handler, + Expression spatialExpression, + Expression constantExpression + ) { + TypedAttribute attribute = checkIsPushableAttribute(spatialExpression); + String name = handler.nameOf(attribute); + + try { + Geometry shape = SpatialRelatesUtils.makeGeometryFromLiteral(constantExpression); + return new SpatialRelatesQuery(bc.source(), name, bc.queryRelation(), shape, attribute.dataType()); + } catch (IllegalArgumentException e) { + throw new QlIllegalArgumentException(e.getMessage(), e); + } + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 34a31ac7e656d..aad80b6c673ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -99,6 +99,7 @@ import static java.util.stream.Collectors.joining; import static org.elasticsearch.compute.operator.LimitOperator.Factory; import static org.elasticsearch.compute.operator.ProjectOperator.ProjectOperatorFactory; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; /** * The local execution planner takes a plan (represented as PlanNode tree / digraph) as input and creates the corresponding @@ -366,7 +367,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte int limit; if (topNExec.limit() instanceof Literal literal) { - limit = Integer.parseInt(literal.value().toString()); + limit = stringToInt(literal.value().toString()); } else { throw new EsqlIllegalArgumentException("limit only supported with literal values"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index fd0801d35958d..516c88b5f6526 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; -import org.elasticsearch.xpack.esql.plan.logical.show.ShowFunctions; +import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; @@ -85,8 +85,8 @@ public PhysicalPlan map(LogicalPlan p) { } // Commands - if (p instanceof ShowFunctions showFunctions) { - return new ShowExec(showFunctions.source(), showFunctions.output(), showFunctions.values(functionRegistry)); + if (p instanceof MetaFunctions metaFunctions) { + return new ShowExec(metaFunctions.source(), metaFunctions.output(), metaFunctions.values(functionRegistry)); } if (p instanceof ShowInfo showInfo) { return new ShowExec(showInfo.source(), showInfo.output(), showInfo.values()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 2b7eadb16f444..17f262143f57a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -12,8 +12,26 @@ import org.elasticsearch.features.NodeFeature; import java.util.Map; +import java.util.Set; public class EsqlFeatures implements FeatureSpecification { + /** + * Introduction of {@code MV_SORT}, {@code MV_SLICE}, and {@code MV_ZIP}. + * Added in #106095. + */ + private static final NodeFeature MV_SORT = new NodeFeature("esql.mv_sort"); + + /** + * When we disabled some broken optimizations around {@code nullable}. + * Fixed in #105691. + */ + private static final NodeFeature DISABLE_NULLABLE_OPTS = new NodeFeature("esql.disable_nullable_opts"); + + /** + * Introduction of {@code ST_X} and {@code ST_Y}. Added in #105768. + */ + private static final NodeFeature ST_X_Y = new NodeFeature("esql.st_x_y"); + /** * When we added the warnings for multivalued fields emitting {@code null} * when they touched multivalued fields. Added in #102417. @@ -42,6 +60,21 @@ public class EsqlFeatures implements FeatureSpecification { // */ // private static final NodeFeature GEO_SHAPE_SUPPORT = new NodeFeature("esql.geo_shape"); + /** + * The introduction of the {@code VALUES} agg. + */ + private static final NodeFeature AGG_VALUES = new NodeFeature("esql.agg_values"); + + /** + * Does ESQL support async queries. + */ + public static final NodeFeature ASYNC_QUERY = new NodeFeature("esql.async_query"); + + @Override + public Set getFeatures() { + return Set.of(ASYNC_QUERY, AGG_VALUES, MV_SORT, DISABLE_NULLABLE_OPTS, ST_X_Y); + } + @Override public Map getHistoricalFeatures() { return Map.ofEntries( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java new file mode 100644 index 0000000000000..ca69569546ba3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -0,0 +1,287 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.querydsl.query; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.document.XYDocValuesField; +import org.apache.lucene.document.XYPointField; +import org.apache.lucene.document.XYShape; +import org.apache.lucene.geo.XYGeometry; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.IndexOrDocValuesQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.geo.LuceneGeometriesUtils; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.ShapeType; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.GeoShapeQueryable; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.lucene.spatial.CartesianShapeDocValuesQuery; +import org.elasticsearch.search.sort.NestedSortBuilder; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.querydsl.query.Query; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.io.IOException; +import java.util.Objects; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; + +public class SpatialRelatesQuery extends Query { + private final String field; + private final ShapeField.QueryRelation queryRelation; + private final Geometry shape; + private final DataType dataType; + + public SpatialRelatesQuery(Source source, String field, ShapeField.QueryRelation queryRelation, Geometry shape, DataType dataType) { + super(source); + this.field = field; + this.queryRelation = queryRelation; + this.shape = shape; + this.dataType = dataType; + } + + @Override + public boolean containsNestedField(String path, String field) { + return false; + } + + @Override + public Query addNestedField(String path, String field, String format, boolean hasDocValues) { + return null; + } + + @Override + public void enrichNestedSort(NestedSortBuilder sort) { + + } + + @Override + public QueryBuilder asBuilder() { + return EsqlDataTypes.isSpatialGeo(dataType) ? new GeoShapeQueryBuilder() : new CartesianShapeQueryBuilder(); + } + + @Override + protected String innerToString() { + throw new IllegalArgumentException("SpatialRelatesQuery.innerToString() not implemented"); + } + + @Override + public int hashCode() { + return Objects.hash(field, queryRelation, shape, dataType); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + SpatialRelatesQuery other = (SpatialRelatesQuery) obj; + return Objects.equals(field, other.field) + && Objects.equals(queryRelation, other.queryRelation) + && Objects.equals(shape, other.shape) + && Objects.equals(dataType, other.dataType); + } + + public ShapeRelation shapeRelation() { + return switch (queryRelation) { + case INTERSECTS -> ShapeRelation.INTERSECTS; + case DISJOINT -> ShapeRelation.DISJOINT; + case WITHIN -> ShapeRelation.WITHIN; + case CONTAINS -> ShapeRelation.CONTAINS; + }; + } + + /** + * This class is a minimal implementation of the QueryBuilder interface. + * We only need the toQuery method, but ESQL makes extensive use of QueryBuilder and trimming that interface down for ESQL only would + * be a large undertaking. + * Note that this class is only public for testing in PhysicalPlanOptimizerTests. + */ + public abstract class ShapeQueryBuilder implements QueryBuilder { + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + throw new UnsupportedOperationException("Unimplemented: toXContent()"); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + throw new UnsupportedOperationException("Unimplemented: toXContent()"); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException("Unimplemented: toXContent()"); + } + + @Override + public org.apache.lucene.search.Query toQuery(SearchExecutionContext context) throws IOException { + final MappedFieldType fieldType = context.getFieldType(field); + if (fieldType == null) { + throw new QueryShardException(context, "failed to find type for field [" + field + "]"); + } + return buildShapeQuery(context, fieldType); + } + + abstract org.apache.lucene.search.Query buildShapeQuery(SearchExecutionContext context, MappedFieldType fieldType); + + @Override + public QueryBuilder queryName(String queryName) { + throw new UnsupportedOperationException("Unimplemented: String"); + } + + @Override + public String queryName() { + throw new UnsupportedOperationException("Unimplemented: queryName"); + } + + @Override + public float boost() { + return 0; + } + + @Override + public QueryBuilder boost(float boost) { + throw new UnsupportedOperationException("Unimplemented: float"); + } + + @Override + public String getName() { + throw new UnsupportedOperationException("Unimplemented: getName"); + } + + /** Public for testing */ + public String fieldName() { + return field; + } + + /** Public for testing */ + public ShapeRelation relation() { + return shapeRelation(); + } + + /** Public for testing */ + public Geometry shape() { + return shape; + } + } + + private class GeoShapeQueryBuilder extends ShapeQueryBuilder { + public final String NAME = "geo_shape"; + + @Override + public String getWriteableName() { + return "GeoShapeQueryBuilder"; + } + + @Override + org.apache.lucene.search.Query buildShapeQuery(SearchExecutionContext context, MappedFieldType fieldType) { + if ((fieldType instanceof GeoShapeQueryable) == false) { + throw new QueryShardException( + context, + "Field [" + field + "] is of unsupported type [" + fieldType.typeName() + "] for [" + NAME + "] query" + ); + } + final GeoShapeQueryable ft = (GeoShapeQueryable) fieldType; + return new ConstantScoreQuery(ft.geoShapeQuery(context, fieldType.name(), shapeRelation(), shape)); + } + } + + private class CartesianShapeQueryBuilder extends ShapeQueryBuilder { + @Override + public String getWriteableName() { + return "CartesianShapeQueryBuilder"; + } + + @Override + org.apache.lucene.search.Query buildShapeQuery(SearchExecutionContext context, MappedFieldType fieldType) { + org.apache.lucene.search.Query innerQuery = dataType == CARTESIAN_POINT + ? pointShapeQuery(shape, fieldType.name(), queryRelation, context) + : shapeShapeQuery(shape, fieldType.name(), queryRelation, context); + return new ConstantScoreQuery(innerQuery); + } + + /** + * This code is based on the ShapeQueryPointProcessor.shapeQuery() method + */ + private static org.apache.lucene.search.Query pointShapeQuery( + Geometry geometry, + String fieldName, + ShapeField.QueryRelation relation, + SearchExecutionContext context + ) { + final boolean hasDocValues = context.getFieldType(fieldName).hasDocValues(); + // only the intersects relation is supported for indexed cartesian point types + if (relation != ShapeField.QueryRelation.INTERSECTS) { + throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); + } + final Consumer checker = t -> { + if (t == ShapeType.POINT || t == ShapeType.MULTIPOINT || t == ShapeType.LINESTRING || t == ShapeType.MULTILINESTRING) { + throw new QueryShardException(context, "Field [" + fieldName + "] does not support " + t + " queries"); + } + }; + final XYGeometry[] luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, checker); + org.apache.lucene.search.Query query = XYPointField.newGeometryQuery(fieldName, luceneGeometries); + if (hasDocValues) { + final org.apache.lucene.search.Query queryDocValues = XYDocValuesField.newSlowGeometryQuery(fieldName, luceneGeometries); + query = new IndexOrDocValuesQuery(query, queryDocValues); + } + return query; + } + + /** + * This code is based on the ShapeQueryProcessor.shapeQuery() method + */ + private static org.apache.lucene.search.Query shapeShapeQuery( + Geometry geometry, + String fieldName, + ShapeField.QueryRelation relation, + SearchExecutionContext context + ) { + final boolean hasDocValues = context.getFieldType(fieldName).hasDocValues(); + // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0); + if (relation == ShapeField.QueryRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { + throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); + } + if (geometry == null || geometry.isEmpty()) { + return new MatchNoDocsQuery(); + } + final XYGeometry[] luceneGeometries; + try { + luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); + } catch (IllegalArgumentException e) { + throw new QueryShardException(context, "Exception creating query on Field [" + fieldName + "] " + e.getMessage(), e); + } + org.apache.lucene.search.Query query = XYShape.newGeometryQuery(fieldName, relation, luceneGeometries); + if (hasDocValues) { + final org.apache.lucene.search.Query queryDocValues = new CartesianShapeDocValuesQuery( + fieldName, + relation, + luceneGeometries + ); + query = new IndexOrDocValuesQuery(query, queryDocValues); + } + return query; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java index d30aff3139495..dd1d9cffeeff1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; -import org.elasticsearch.xpack.esql.plan.logical.show.ShowFunctions; +import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; @@ -43,12 +43,13 @@ public enum FeatureMetric { WHERE(Filter.class::isInstance), ENRICH(Enrich.class::isInstance), MV_EXPAND(MvExpand.class::isInstance), - SHOW(plan -> plan instanceof ShowInfo || plan instanceof ShowFunctions), + SHOW(ShowInfo.class::isInstance), ROW(Row.class::isInstance), FROM(EsRelation.class::isInstance), DROP(Drop.class::isInstance), KEEP(Keep.class::isInstance), - RENAME(Rename.class::isInstance); + RENAME(Rename.class::isInstance), + META(MetaFunctions.class::isInstance); private Predicate planCheck; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java index c813308ea0443..679781a40c869 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java @@ -195,7 +195,8 @@ public boolean isSingleValue(String field) { if (exists(field) == false) { stat.singleValue = true; } else { - var sv = new boolean[] { true }; + // fields are MV per default + var sv = new boolean[] { false }; for (SearchContext context : contexts) { var sec = context.getSearchExecutionContext(); MappedFieldType mappedType = sec.isFieldMapped(field) ? null : sec.getFieldType(field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index eba80ff238a45..82e7fc2e9fc88 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -7,8 +7,12 @@ package org.elasticsearch.xpack.esql.type; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; @@ -16,21 +20,41 @@ import org.elasticsearch.xpack.ql.type.Converter; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypeConverter; +import org.elasticsearch.xpack.ql.util.NumericUtils; +import org.elasticsearch.xpack.ql.util.StringUtils; +import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; +import java.math.BigInteger; import java.time.Duration; +import java.time.Instant; import java.time.Period; +import java.time.ZoneId; +import java.time.temporal.ChronoField; import java.time.temporal.TemporalAmount; +import java.util.Locale; import java.util.function.Function; +import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToLong; +import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToUnsignedLong; import static org.elasticsearch.xpack.ql.type.DataTypes.NULL; import static org.elasticsearch.xpack.ql.type.DataTypes.isPrimitive; import static org.elasticsearch.xpack.ql.type.DataTypes.isString; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ONE_AS_UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asUnsignedLong; +import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.UNSPECIFIED; public class EsqlDataTypeConverter { + public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time"); + + public static final DateFormatter HOUR_MINUTE_SECOND = DateFormatter.forPattern("strict_hour_minute_second_fraction"); + /** * Returns true if the from type can be converted to the to type, false - otherwise */ @@ -44,6 +68,7 @@ public static boolean canConvert(DataType from, DataType to) { } public static Converter converterFor(DataType from, DataType to) { + // TODO move EXPRESSION_TO_LONG here if there is no regression Converter converter = DataTypeConverter.converterFor(from, to); if (converter != null) { return converter; @@ -148,10 +173,183 @@ public static TemporalAmount parseTemporalAmout(Number value, String qualifier, }; } + /** + * The following conversions are used by DateExtract. + */ + private static ChronoField stringToChrono(Object field) { + ChronoField chronoField = null; + try { + BytesRef br = BytesRefs.toBytesRef(field); + chronoField = ChronoField.valueOf(br.utf8ToString().toUpperCase(Locale.ROOT)); + } catch (Exception e) { + return null; + } + return chronoField; + } + + public static long chronoToLong(long dateTime, BytesRef chronoField, ZoneId zone) { + ChronoField chrono = ChronoField.valueOf(chronoField.utf8ToString().toUpperCase(Locale.ROOT)); + return Instant.ofEpochMilli(dateTime).atZone(zone).getLong(chrono); + } + + public static long chronoToLong(long dateTime, ChronoField chronoField, ZoneId zone) { + return Instant.ofEpochMilli(dateTime).atZone(zone).getLong(chronoField); + } + + /** + * The following conversions are between String and other data types. + */ + public static BytesRef stringToIP(BytesRef field) { + return StringUtils.parseIP(field.utf8ToString()); + } + + public static BytesRef stringToIP(String field) { + return StringUtils.parseIP(field); + } + + public static String ipToString(BytesRef field) { + return DocValueFormat.IP.format(field); + } + + public static BytesRef stringToVersion(BytesRef field) { + return new Version(field.utf8ToString()).toBytesRef(); + } + + public static String versionToString(BytesRef field) { + return new Version(field).toString(); + } + + public static String versionToString(Version field) { + return field.toString(); + } + + public static String spatialToString(BytesRef field) { + return UNSPECIFIED.wkbToWkt(field); + } + + public static BytesRef stringToSpatial(String field) { + return UNSPECIFIED.wktToWkb(field); + } + + public static long dateTimeToLong(String dateTime) { + return DEFAULT_DATE_TIME_FORMATTER.parseMillis(dateTime); + } + + public static long dateTimeToLong(String dateTime, DateFormatter formatter) { + return formatter == null ? dateTimeToLong(dateTime) : formatter.parseMillis(dateTime); + } + + public static String dateTimeToString(long dateTime) { + return DEFAULT_DATE_TIME_FORMATTER.formatMillis(dateTime); + } + + public static String dateTimeToString(long dateTime, DateFormatter formatter) { + return formatter == null ? dateTimeToString(dateTime) : formatter.formatMillis(dateTime); + } + + public static BytesRef numericBooleanToString(Object field) { + return new BytesRef(String.valueOf(field)); + } + + public static boolean stringToBoolean(String field) { + return Boolean.parseBoolean(field); + } + + public static int stringToInt(String field) { + try { + return Integer.parseInt(field); + } catch (NumberFormatException nfe) { + try { + return safeToInt(stringToDouble(field)); + } catch (Exception e) { + throw new InvalidArgumentException(nfe, "Cannot parse number [{}]", field); + } + } + } + + public static long stringToLong(String field) { + try { + return StringUtils.parseLong(field); + } catch (InvalidArgumentException iae) { + try { + return safeDoubleToLong(stringToDouble(field)); + } catch (Exception e) { + throw new InvalidArgumentException(iae, "Cannot parse number [{}]", field); + } + } + } + + public static double stringToDouble(String field) { + return StringUtils.parseDouble(field); + } + + public static BytesRef unsignedLongToString(long number) { + return new BytesRef(unsignedLongAsNumber(number).toString()); + } + + public static long stringToUnsignedLong(String field) { + return asLongUnsigned(safeToUnsignedLong(field)); + } + + public static Number stringToIntegral(String field) { + return StringUtils.parseIntegral(field); + } + + /** + * The following conversion are between unsignedLong and other numeric data types. + */ + public static double unsignedLongToDouble(long number) { + return NumericUtils.unsignedLongAsNumber(number).doubleValue(); + } + + public static long doubleToUnsignedLong(double number) { + return NumericUtils.asLongUnsigned(safeToUnsignedLong(number)); + } + + public static int unsignedLongToInt(long number) { + Number n = NumericUtils.unsignedLongAsNumber(number); + int i = n.intValue(); + if (i != n.longValue()) { + throw new InvalidArgumentException("[{}] out of [integer] range", n); + } + return i; + } + + public static long intToUnsignedLong(int number) { + return longToUnsignedLong(number, false); + } + + public static long unsignedLongToLong(long number) { + return DataTypeConverter.safeToLong(unsignedLongAsNumber(number)); + } + + public static long longToUnsignedLong(long number, boolean allowNegative) { + return allowNegative == false ? NumericUtils.asLongUnsigned(safeToUnsignedLong(number)) : NumericUtils.asLongUnsigned(number); + } + + public static long bigIntegerToUnsignedLong(BigInteger field) { + BigInteger unsignedLong = asUnsignedLong(field); + return NumericUtils.asLongUnsigned(unsignedLong); + } + + public static BigInteger unsignedLongToBigInteger(long number) { + return NumericUtils.unsignedLongAsBigInteger(number); + } + + public static boolean unsignedLongToBoolean(long number) { + Number n = NumericUtils.unsignedLongAsNumber(number); + return n instanceof BigInteger || n.longValue() != 0; + } + + public static long booleanToUnsignedLong(boolean number) { + return number ? ONE_AS_UNSIGNED_LONG : ZERO_AS_UNSIGNED_LONG; + } + public enum EsqlConverter implements Converter { STRING_TO_DATE_PERIOD(x -> EsqlDataTypeConverter.parseTemporalAmount(x, EsqlDataTypes.DATE_PERIOD)), - STRING_TO_TIME_DURATION(x -> EsqlDataTypeConverter.parseTemporalAmount(x, EsqlDataTypes.TIME_DURATION)); + STRING_TO_TIME_DURATION(x -> EsqlDataTypeConverter.parseTemporalAmount(x, EsqlDataTypes.TIME_DURATION)), + STRING_TO_CHRONO_FIELD(EsqlDataTypeConverter::stringToChrono); private static final String NAME = "esql-converter"; private final Function converter; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java index 0813069330879..8edee89832255 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java @@ -45,10 +45,10 @@ public final class EsqlDataTypes { public static final DataType DATE_PERIOD = new DataType("DATE_PERIOD", null, 3 * Integer.BYTES, false, false, false); public static final DataType TIME_DURATION = new DataType("TIME_DURATION", null, Integer.BYTES + Long.BYTES, false, false, false); - public static final DataType GEO_POINT = new DataType("geo_point", Double.BYTES * 2, false, false, false); - public static final DataType CARTESIAN_POINT = new DataType("cartesian_point", Double.BYTES * 2, false, false, false); - public static final DataType GEO_SHAPE = new DataType("geo_shape", Integer.MAX_VALUE, false, false, false); - public static final DataType CARTESIAN_SHAPE = new DataType("cartesian_shape", Integer.MAX_VALUE, false, false, false); + public static final DataType GEO_POINT = new DataType("geo_point", Double.BYTES * 2, false, false, true); + public static final DataType CARTESIAN_POINT = new DataType("cartesian_point", Double.BYTES * 2, false, false, true); + public static final DataType GEO_SHAPE = new DataType("geo_shape", Integer.MAX_VALUE, false, false, true); + public static final DataType CARTESIAN_SHAPE = new DataType("cartesian_shape", Integer.MAX_VALUE, false, false, true); private static final Collection TYPES = Stream.of( BOOLEAN, @@ -175,6 +175,10 @@ public static boolean isSpatial(DataType t) { return t == GEO_POINT || t == CARTESIAN_POINT || t == GEO_SHAPE || t == CARTESIAN_SHAPE; } + public static boolean isSpatialGeo(DataType t) { + return t == GEO_POINT || t == GEO_SHAPE; + } + public static boolean isSpatialPoint(DataType t) { return t == GEO_POINT || t == CARTESIAN_POINT; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 56b6dcdd1ad8b..662ae1a208ed0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -319,7 +319,11 @@ private static CsvTestsDataLoader.TestsDataset testsDataset(LogicalPlan parsed) var preAnalysis = new PreAnalyzer().preAnalyze(parsed); var indices = preAnalysis.indices; if (indices.size() == 0) { - return CSV_DATASET_MAP.values().iterator().next(); // default dataset for `row` source command + /* + * If the data set doesn't matter we'll just grab one we know works. + * Employees is fine. + */ + return CSV_DATASET_MAP.get("employees"); } else if (preAnalysis.indices.size() > 1) { throw new IllegalArgumentException("unexpected index resolution to multiple entries [" + preAnalysis.indices.size() + "]"); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 975b31b967fe0..543e7c93526d2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1775,6 +1775,32 @@ public void testUnsupportedTypesInStats() { ); } + public void testInOnText() { + assertProjectionWithMapping(""" + from a_index + | eval text in (\"a\", \"b\", \"c\") + | keep text + """, "mapping-multi-field-variation.json", "text"); + + assertProjectionWithMapping(""" + from a_index + | eval text in (\"a\", \"b\", \"c\", text) + | keep text + """, "mapping-multi-field-variation.json", "text"); + + assertProjectionWithMapping(""" + from a_index + | eval text not in (\"a\", \"b\", \"c\") + | keep text + """, "mapping-multi-field-variation.json", "text"); + + assertProjectionWithMapping(""" + from a_index + | eval text not in (\"a\", \"b\", \"c\", text) + | keep text + """, "mapping-multi-field-variation.json", "text"); + } + private void verifyUnsupported(String query, String errorMessage) { verifyUnsupported(query, errorMessage, "mapping-multi-field-variation.json"); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 40526d35031d4..78c1c57e07782 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -10,6 +10,7 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.bytes.BytesReference; @@ -181,11 +182,20 @@ public static Expression deepCopyOfField(String name, DataType type) { */ protected abstract Expression build(Source source, List args); - protected Expression buildFieldExpression(TestCaseSupplier.TestCase testCase) { + /** + * Build an {@link Expression} where all inputs are field references, + * except those that have been marked with {@link TestCaseSupplier.TypedData#forceLiteral()}. + */ + protected final Expression buildFieldExpression(TestCaseSupplier.TestCase testCase) { return build(testCase.getSource(), testCase.getDataAsFields()); } - protected Expression buildDeepCopyOfFieldExpression(TestCaseSupplier.TestCase testCase) { + /** + * Build an {@link Expression} where all inputs are anonymous functions + * that make a copy of the values from a field except + * those that have been marked with {@link TestCaseSupplier.TypedData#forceLiteral()}. + */ + protected final Expression buildDeepCopyOfFieldExpression(TestCaseSupplier.TestCase testCase) { return build(testCase.getSource(), testCase.getDataAsDeepCopiedFields()); } @@ -255,7 +265,7 @@ private void testEvaluate(boolean readFloating) { } assertFalse("expected resolved", expression.typeResolved().unresolved()); expression = new FoldNull().rule(expression); - assertThat(expression.dataType(), equalTo(testCase.expectedType)); + assertThat(expression.dataType(), equalTo(testCase.expectedType())); logger.info("Result type: " + expression.dataType()); Object result; @@ -278,7 +288,7 @@ private void testEvaluate(boolean readFloating) { private Object toJavaObjectUnsignedLongAware(Block block, int position) { Object result; result = toJavaObject(block, position); - if (result != null && testCase.expectedType == DataTypes.UNSIGNED_LONG) { + if (result != null && testCase.expectedType() == DataTypes.UNSIGNED_LONG) { assertThat(result, instanceOf(Long.class)); result = NumericUtils.unsignedLongAsBigInteger((Long) result); } @@ -524,7 +534,7 @@ public final void testEvaluatorToString() { assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); var factory = evaluator(buildFieldExpression(testCase)); try (ExpressionEvaluator ev = factory.get(driverContext())) { - assertThat(ev.toString(), equalTo(testCase.evaluatorToString)); + assertThat(ev.toString(), testCase.evaluatorToString()); } } @@ -532,7 +542,7 @@ public final void testFactoryToString() { assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); var factory = evaluator(buildFieldExpression(testCase)); - assertThat(factory.toString(), equalTo(testCase.evaluatorToString)); + assertThat(factory.toString(), testCase.evaluatorToString()); } public final void testFold() { @@ -544,12 +554,12 @@ public final void testFold() { } assertFalse(expression.typeResolved().unresolved()); Expression nullOptimized = new FoldNull().rule(expression); - assertThat(nullOptimized.dataType(), equalTo(testCase.expectedType)); + assertThat(nullOptimized.dataType(), equalTo(testCase.expectedType())); assertTrue(nullOptimized.foldable()); if (testCase.foldingExceptionClass() == null) { Object result = nullOptimized.fold(); // Decode unsigned longs into BigIntegers - if (testCase.expectedType == DataTypes.UNSIGNED_LONG && result != null) { + if (testCase.expectedType() == DataTypes.UNSIGNED_LONG && result != null) { result = NumericUtils.unsignedLongAsBigInteger((Long) result); } assertThat(result, testCase.getMatcher()); @@ -670,11 +680,13 @@ protected static List anyNullIsNull(boolean entirelyNullPreser }).toList(); return new TestCaseSupplier.TestCase( data, - oc.evaluatorToString, - oc.expectedType, + oc.evaluatorToString(), + oc.expectedType(), nullValue(), null, - oc.getExpectedTypeError() + oc.getExpectedTypeError(), + null, + null ); })); @@ -691,11 +703,13 @@ protected static List anyNullIsNull(boolean entirelyNullPreser .toList(); return new TestCaseSupplier.TestCase( data, - "LiteralsEvaluator[lit=null]", - entirelyNullPreservesType == false && oc.getData().size() == 1 ? DataTypes.NULL : oc.expectedType, + equalTo("LiteralsEvaluator[lit=null]"), + entirelyNullPreservesType == false && oc.getData().size() == 1 ? DataTypes.NULL : oc.expectedType(), nullValue(), null, - oc.getExpectedTypeError() + oc.getExpectedTypeError(), + null, + null ); })); } @@ -882,6 +896,7 @@ protected static String typeErrorMessage(boolean includeOrdinal, List * After each test method we add the signature it operated on via * {@link #trackSignature}. Once the test class is done we render - * all the unique signatures to a temp file with {@link #renderTypesTable}. + * all the unique signatures to a temp file with {@link #renderTypes}. * We use a temp file because that's all we're allowed to write to. * Gradle will move the files into the docs after this is done. *

    @@ -1067,32 +1082,41 @@ public void trackSignature() { if (testCase.getData().stream().anyMatch(t -> t.type() == DataTypes.NULL)) { return; } - signatures.putIfAbsent(testCase.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), testCase.expectedType); + signatures.putIfAbsent(testCase.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), testCase.expectedType()); } @AfterClass - public static void renderTypesTable() throws IOException { + public static void renderDocs() throws IOException { if (System.getProperty("generateDocs") == null) { return; } String name = functionName(); if (binaryOperator(name) != null) { - renderTypesTable(List.of("lhs", "rhs")); + renderTypes(List.of("lhs", "rhs")); return; } if (unaryOperator(name) != null) { - renderTypesTable(List.of("v")); + renderTypes(List.of("v")); return; } FunctionDefinition definition = definition(name); if (definition != null) { - renderTypesTable(EsqlFunctionRegistry.description(definition).argNames()); + EsqlFunctionRegistry.FunctionDescription description = EsqlFunctionRegistry.description(definition); + renderTypes(description.argNames()); + renderParametersList(description.argNames(), description.argDescriptions()); + FunctionInfo info = EsqlFunctionRegistry.functionInfo(definition); + renderDescription(description.description(), info.note()); + boolean hasExamples = renderExamples(info); + renderFullLayout(name, hasExamples); return; } LogManager.getLogger(getTestClass()).info("Skipping rendering types because the function '" + name + "' isn't registered"); } - private static void renderTypesTable(List argNames) throws IOException { + private static final String DOCS_WARNING = + "// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.\n\n"; + + private static void renderTypes(List argNames) throws IOException { StringBuilder header = new StringBuilder(); for (String arg : argNames) { header.append(arg).append(" | "); @@ -1113,7 +1137,9 @@ private static void renderTypesTable(List argNames) throws IOException { } Collections.sort(table); - String rendered = """ + String rendered = DOCS_WARNING + """ + *Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== """ + header + "\n" + table.stream().collect(Collectors.joining("\n")) + "\n|===\n"; @@ -1121,6 +1147,81 @@ private static void renderTypesTable(List argNames) throws IOException { writeToTempDir("types", rendered, "asciidoc"); } + private static void renderParametersList(List argNames, List argDescriptions) throws IOException { + StringBuilder builder = new StringBuilder(); + builder.append("*Parameters*\n"); + for (int a = 0; a < argNames.size(); a++) { + builder.append("\n`").append(argNames.get(a)).append("`::\n").append(argDescriptions.get(a)).append('\n'); + } + String rendered = builder.toString(); + LogManager.getLogger(getTestClass()).info("Writing parameters for [{}]:\n{}", functionName(), rendered); + writeToTempDir("parameters", rendered, "asciidoc"); + } + + private static void renderDescription(String description, String note) throws IOException { + String rendered = DOCS_WARNING + """ + *Description* + + """ + description + "\n"; + if (Strings.isNullOrEmpty(note) == false) { + rendered += "\nNOTE: " + note + "\n"; + } + LogManager.getLogger(getTestClass()).info("Writing description for [{}]:\n{}", functionName(), rendered); + writeToTempDir("description", rendered, "asciidoc"); + } + + private static boolean renderExamples(FunctionInfo info) throws IOException { + if (info == null || info.examples().length == 0) { + return false; + } + StringBuilder builder = new StringBuilder(); + builder.append(DOCS_WARNING); + if (info.examples().length == 1) { + builder.append("*Example*\n\n"); + } else { + builder.append("*Examples*\n\n"); + } + for (Example example : info.examples()) { + builder.append(""" + [source.merge.styled,esql] + ---- + include::{esql-specs}/$FILE$.csv-spec[tag=$TAG$] + ---- + [%header.monospaced.styled,format=dsv,separator=|] + |=== + include::{esql-specs}/$FILE$.csv-spec[tag=$TAG$-result] + |=== + """.replace("$FILE$", example.file()).replace("$TAG$", example.tag())); + } + builder.append('\n'); + String rendered = builder.toString(); + LogManager.getLogger(getTestClass()).info("Writing examples for [{}]:\n{}", functionName(), rendered); + writeToTempDir("examples", rendered, "asciidoc"); + return true; + } + + private static void renderFullLayout(String name, boolean hasExamples) throws IOException { + String rendered = DOCS_WARNING + """ + [discrete] + [[esql-$NAME$]] + === `$UPPER_NAME$` + + *Syntax* + + [.text-center] + image::esql/functions/signature/$NAME$.svg[Embedded,opts=inline] + + include::../parameters/$NAME$.asciidoc[] + include::../description/$NAME$.asciidoc[] + include::../types/$NAME$.asciidoc[] + """.replace("$NAME$", name).replace("$UPPER_NAME$", name.toUpperCase(Locale.ROOT)); + if (hasExamples) { + rendered += "include::../examples/" + name + ".asciidoc[]\n"; + } + LogManager.getLogger(getTestClass()).info("Writing layout for [{}]:\n{}", functionName(), rendered); + writeToTempDir("layout", rendered, "asciidoc"); + } + private static String functionName() { Class testClass = getTestClass(); if (testClass.isAnnotationPresent(FunctionName.class)) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 38389369fe6b3..2bae1546cd02f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -54,7 +54,7 @@ public record TestCaseSupplier(String name, List types, Supplier { - private static Logger logger = LogManager.getLogger(TestCaseSupplier.class); + private static final Logger logger = LogManager.getLogger(TestCaseSupplier.class); /** * Build a test case without types. * @@ -214,6 +214,16 @@ private static void casesCrossProduct( } } + public static TestCaseSupplier testCaseSupplier( + TypedDataSupplier lhsSupplier, + TypedDataSupplier rhsSupplier, + BiFunction evaluatorToString, + DataType expectedType, + BinaryOperator expectedValue + ) { + return testCaseSupplier(lhsSupplier, rhsSupplier, evaluatorToString, expectedType, expectedValue, List.of()); + } + private static TestCaseSupplier testCaseSupplier( TypedDataSupplier lhsSupplier, TypedDataSupplier rhsSupplier, @@ -938,31 +948,53 @@ public static List timeDurationCases() { ); } - private static List geoPointCases() { - return List.of(new TypedDataSupplier("", () -> GEO.asWkb(GeometryTestUtils.randomPoint()), EsqlDataTypes.GEO_POINT)); + public static List geoPointCases() { + return geoPointCases(ESTestCase::randomBoolean); } - private static List cartesianPointCases() { + public static List cartesianPointCases() { + return cartesianPointCases(ESTestCase::randomBoolean); + } + + public static List geoShapeCases() { + return geoShapeCases(ESTestCase::randomBoolean); + } + + public static List cartesianShapeCases() { + return cartesianShapeCases(ESTestCase::randomBoolean); + } + + public static List geoPointCases(Supplier hasAlt) { return List.of( - new TypedDataSupplier("", () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()), EsqlDataTypes.CARTESIAN_POINT) + new TypedDataSupplier("", () -> GEO.asWkb(GeometryTestUtils.randomPoint(hasAlt.get())), EsqlDataTypes.GEO_POINT) + ); + } + + public static List cartesianPointCases(Supplier hasAlt) { + return List.of( + new TypedDataSupplier( + "", + () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(hasAlt.get())), + EsqlDataTypes.CARTESIAN_POINT + ) ); } - private static List geoShapeCases() { + public static List geoShapeCases(Supplier hasAlt) { return List.of( new TypedDataSupplier( "", - () -> GEO.asWkb(GeometryTestUtils.randomGeometry(ESTestCase.randomBoolean())), + () -> GEO.asWkb(GeometryTestUtils.randomGeometryWithoutCircle(0, hasAlt.get())), EsqlDataTypes.GEO_SHAPE ) ); } - private static List cartesianShapeCases() { + public static List cartesianShapeCases(Supplier hasAlt) { return List.of( new TypedDataSupplier( "", - () -> CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(ESTestCase.randomBoolean())), + () -> CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(hasAlt.get())), EsqlDataTypes.CARTESIAN_SHAPE ) ); @@ -1098,51 +1130,57 @@ public static class TestCase { /** * The {@link Source} this test case should be run with */ - private Source source; + private final Source source; /** * The parameter values and types to pass into the function for this test run */ - private List data; + private final List data; /** * The expected toString output for the evaluator this function invocation should generate */ - String evaluatorToString; + private final Matcher evaluatorToString; /** * The expected output type for the case being tested */ - DataType expectedType; + private final DataType expectedType; /** * A matcher to validate the output of the function run on the given input data */ - private Matcher matcher; + private final Matcher matcher; /** * Warnings this test is expected to produce */ - private String[] expectedWarnings; - - private Class foldingExceptionClass; - private String foldingExceptionMessage; + private final String[] expectedWarnings; private final String expectedTypeError; private final boolean allTypesAreRepresentable; + private final Class foldingExceptionClass; + private final String foldingExceptionMessage; + public TestCase(List data, String evaluatorToString, DataType expectedType, Matcher matcher) { - this(data, evaluatorToString, expectedType, matcher, null, null); + this(data, equalTo(evaluatorToString), expectedType, matcher); + } + + public TestCase(List data, Matcher evaluatorToString, DataType expectedType, Matcher matcher) { + this(data, evaluatorToString, expectedType, matcher, null, null, null, null); } public static TestCase typeError(List data, String expectedTypeError) { - return new TestCase(data, null, null, null, null, expectedTypeError); + return new TestCase(data, null, null, null, null, expectedTypeError, null, null); } TestCase( List data, - String evaluatorToString, + Matcher evaluatorToString, DataType expectedType, Matcher matcher, String[] expectedWarnings, - String expectedTypeError + String expectedTypeError, + Class foldingExceptionClass, + String foldingExceptionMessage ) { this.source = Source.EMPTY; this.data = data; @@ -1152,6 +1190,8 @@ public static TestCase typeError(List data, String expectedTypeError) this.expectedWarnings = expectedWarnings; this.expectedTypeError = expectedTypeError; this.allTypesAreRepresentable = data.stream().allMatch(d -> EsqlDataTypes.isRepresentable(d.type)); + this.foldingExceptionClass = foldingExceptionClass; + this.foldingExceptionMessage = foldingExceptionMessage; } public Source getSource() { @@ -1163,15 +1203,15 @@ public List getData() { } public List getDataAsFields() { - return data.stream().map(t -> AbstractFunctionTestCase.field(t.name(), t.type())).collect(Collectors.toList()); + return data.stream().map(TypedData::asField).collect(Collectors.toList()); } public List getDataAsDeepCopiedFields() { - return data.stream().map(t -> AbstractFunctionTestCase.deepCopyOfField(t.name(), t.type())).collect(Collectors.toList()); + return data.stream().map(TypedData::asDeepCopyOfField).collect(Collectors.toList()); } public List getDataAsLiterals() { - return data.stream().map(t -> new Literal(Source.synthetic(t.name()), t.data(), t.type())).collect(Collectors.toList()); + return data.stream().map(TypedData::asLiteral).collect(Collectors.toList()); } public List getDataValues() { @@ -1210,13 +1250,28 @@ public TestCase withWarning(String warning) { } else { newWarnings = new String[] { warning }; } - return new TestCase(data, evaluatorToString, expectedType, matcher, newWarnings, expectedTypeError); + return new TestCase( + data, + evaluatorToString, + expectedType, + matcher, + newWarnings, + expectedTypeError, + foldingExceptionClass, + foldingExceptionMessage + ); + } + + public TestCase withFoldingException(Class clazz, String message) { + return new TestCase(data, evaluatorToString, expectedType, matcher, expectedWarnings, expectedTypeError, clazz, message); + } + + public DataType expectedType() { + return expectedType; } - public TestCase withFoldingException(Class clazz, String message) { - foldingExceptionClass = clazz; - foldingExceptionMessage = message; - return this; + public Matcher evaluatorToString() { + return evaluatorToString; } } @@ -1233,18 +1288,55 @@ public TypedData get() { /** * Holds a data value and the intended parse type of that value - * @param data - value to test against - * @param type - type of the value, for building expressions - * @param name - a name for the value, used for generating test case names */ - public record TypedData(Object data, DataType type, String name) { - + public static class TypedData { public static final TypedData NULL = new TypedData(null, DataTypes.NULL, ""); + private final Object data; + private final DataType type; + private final String name; + private final boolean forceLiteral; + + /** + * @param data value to test against + * @param type type of the value, for building expressions + * @param name a name for the value, used for generating test case names + * @param forceLiteral should this data always be converted to a literal and never to a field reference? + */ + private TypedData(Object data, DataType type, String name, boolean forceLiteral) { + this.data = data; + this.type = type; + this.name = name; + this.forceLiteral = forceLiteral; + } + + /** + * @param data value to test against + * @param type type of the value, for building expressions + * @param name a name for the value, used for generating test case names + */ + public TypedData(Object data, DataType type, String name) { + this(data, type, name, false); + } + + /** + * Build a value, guessing the type via reflection. + * @param data value to test against + * @param name a name for the value, used for generating test case names + */ public TypedData(Object data, String name) { this(data, EsqlDataTypes.fromJava(data), name); } + /** + * Return a {@link TypedData} that always returns a {@link Literal} from + * {@link #asField} and {@link #asDeepCopyOfField}. Use this for things that + * must be constants. + */ + public TypedData forceLiteral() { + return new TypedData(data, type, name, true); + } + @Override public String toString() { if (type == DataTypes.UNSIGNED_LONG && data instanceof Long longData) { @@ -1252,5 +1344,53 @@ public String toString() { } return type.toString() + "(" + (data == null ? "null" : data.toString()) + ")"; } + + /** + * Convert this into reference to a field. + */ + public Expression asField() { + if (forceLiteral) { + return asLiteral(); + } + return AbstractFunctionTestCase.field(name, type); + } + + /** + * Convert this into an anonymous function that performs a copy of the values loaded from a field. + */ + public Expression asDeepCopyOfField() { + if (forceLiteral) { + return asLiteral(); + } + return AbstractFunctionTestCase.deepCopyOfField(name, type); + } + + /** + * Convert this into a {@link Literal}. + */ + public Literal asLiteral() { + return new Literal(Source.synthetic(name), data, type); + } + + /** + * Value to test against. + */ + public Object data() { + return data; + } + + /** + * Type of the value. For building {@link Expression}s. + */ + public DataType type() { + return type; + } + + /** + * A name for the value. Used to generate test names. + */ + public String name() { + return name; + } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java index dcdfd49b8029c..2b75010ef66a1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java @@ -177,6 +177,9 @@ private String expectedTypeName(Set validTypes) { if (withoutNull.equals(List.of(DataTypes.DATETIME))) { return "datetime"; } + if (withoutNull.equals(List.of(DataTypes.IP))) { + return "ip"; + } List negations = Stream.concat(Stream.of(numerics()), Stream.of(EsqlDataTypes.DATE_PERIOD, EsqlDataTypes.TIME_DURATION)) .sorted(Comparator.comparing(DataType::name)) .toList(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java index 808249a01969a..93a0d0b5190f5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -25,6 +24,7 @@ import java.util.function.Supplier; import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; public class ToDatetimeTests extends AbstractFunctionTestCase { public ToDatetimeTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -103,7 +103,7 @@ public static Iterable parameters() { "Line -1:-1: java.lang.IllegalArgumentException: " + (bytesRef.utf8ToString().isEmpty() ? "cannot parse empty datetime" - : ("failed to parse date field [" + bytesRef.utf8ToString() + "] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z']")) + : ("failed to parse date field [" + bytesRef.utf8ToString() + "] with format [strict_date_optional_time]")) ) ); TestCaseSupplier.unary( @@ -113,12 +113,12 @@ public static Iterable parameters() { new TestCaseSupplier.TypedDataSupplier( "", // millis past "0001-01-01T00:00:00.000Z" to match the default formatter - () -> new BytesRef(randomDateString(-62135596800000L, Long.MAX_VALUE)), + () -> new BytesRef(randomDateString(-62135596800000L, 253402300799999L)), DataTypes.KEYWORD ) ), DataTypes.DATETIME, - bytesRef -> DateParse.DEFAULT_FORMATTER.parseMillis(((BytesRef) bytesRef).utf8ToString()), + bytesRef -> DEFAULT_DATE_TIME_FORMATTER.parseMillis(((BytesRef) bytesRef).utf8ToString()), emptyList() ); TestCaseSupplier.unary( @@ -138,7 +138,27 @@ public static Iterable parameters() { "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: java.lang.IllegalArgumentException: failed to parse date field [" + ((BytesRef) bytesRef).utf8ToString() - + "] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z']" + + "] with format [strict_date_optional_time]" + ) + ); + TestCaseSupplier.unary( + suppliers, + "ToDatetimeFromStringEvaluator[field=" + read + "]", + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + // millis before "0001-01-01T00:00:00.000Z" + () -> new BytesRef(randomDateString(253402300800000L, Long.MAX_VALUE)), + DataTypes.KEYWORD + ) + ), + DataTypes.DATETIME, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: failed to parse date field [" + + ((BytesRef) bytesRef).utf8ToString() + + "] with format [strict_date_optional_time]" ) ); @@ -146,12 +166,7 @@ public static Iterable parameters() { } private static String randomDateString(long from, long to) { - String result = Instant.ofEpochMilli(randomLongBetween(from, to)).toString(); - if (result.matches(".*:..Z")) { - // it's a zero millisecond date string, Instant.toString() will strip the milliseconds (and the parsing will fail) - return result.replace("Z", ".000Z"); - } - return result; + return Instant.ofEpochMilli(randomLongBetween(from, to)).toString(); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java index 0309bcce85581..22a00bb3684a6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java @@ -13,6 +13,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -55,7 +57,10 @@ public static Iterable parameters() { ); // random strings that don't look like a double TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("String"), DataTypes.DOUBLE, bytesRef -> null, bytesRef -> { - var exception = expectThrows(NumberFormatException.class, () -> Double.parseDouble(bytesRef.utf8ToString())); + var exception = expectThrows( + InvalidArgumentException.class, + () -> EsqlDataTypeConverter.stringToDouble(bytesRef.utf8ToString()) + ); return List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: " + exception diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java index 4402c6d8529b4..3a6cb86b7a3c6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java @@ -71,7 +71,7 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + bytesRef.utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + bytesRef.utf8ToString() + "]" ) ); // from doubles within Integer's range @@ -228,7 +228,9 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + + ((BytesRef) bytesRef).utf8ToString() + + "]" ) ); // strings of random doubles outside Integer's range, positive @@ -249,7 +251,9 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + + ((BytesRef) bytesRef).utf8ToString() + + "]" ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java index 030c219b75e2f..031ce6193bcc4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java @@ -50,7 +50,7 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + bytesRef.utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + bytesRef.utf8ToString() + "]" ) ); // from doubles within long's range @@ -179,7 +179,9 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + + ((BytesRef) bytesRef).utf8ToString() + + "]" ) ); // strings of random doubles outside integer's range, positive @@ -200,7 +202,9 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + + ((BytesRef) bytesRef).utf8ToString() + + "]" ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index 3a6a5d8eabae3..1e2c24062b07a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -53,6 +53,18 @@ public static Iterable parameters() { equalTo(2023L) ) ), + new TestCaseSupplier( + List.of(DataTypes.TEXT, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataTypes.TEXT, "chrono"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "date") + ), + "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataTypes.LONG, + equalTo(2023L) + ) + ), new TestCaseSupplier( List.of(DataTypes.KEYWORD, DataTypes.DATETIME), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java new file mode 100644 index 0000000000000..3fa28c566649e --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class DateFormatTests extends AbstractScalarFunctionTestCase { + public DateFormatTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataTypes.KEYWORD, "formatter"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "val") + ), + "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", + DataTypes.KEYWORD, + equalTo(BytesRefs.toBytesRef("2023")) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.TEXT, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataTypes.TEXT, "formatter"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "val") + ), + "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", + DataTypes.KEYWORD, + equalTo(BytesRefs.toBytesRef("2023")) + ) + ) + ) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new DateFormat(source, args.get(0), args.get(1), EsqlTestUtils.TEST_CFG); + } + + @Override + protected List argSpec() { + return List.of(required(strings()), required(DataTypes.DATETIME)); + } + + @Override + protected DataType expectedType(List argTypes) { + return DataTypes.KEYWORD; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index 540d1aa34474b..c7a1a945e079e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -62,6 +62,18 @@ public static Iterable parameters() { equalTo(1683244800000L) ) ), + new TestCaseSupplier( + "With Both Text", + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.TEXT, "second"), + new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.TEXT, "first") + ), + "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", + DataTypes.DATETIME, + equalTo(1683244800000L) + ) + ), new TestCaseSupplier( List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java new file mode 100644 index 0000000000000..fbeb824697178 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.ip; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class CIDRMatchTests extends AbstractScalarFunctionTestCase { + public CIDRMatchTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + + var suppliers = List.of( + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataTypes.KEYWORD, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(true) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.TEXT), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataTypes.TEXT, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(true) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataTypes.KEYWORD, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(false) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.TEXT), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataTypes.TEXT, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(false) + ) + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new CIDRMatch(source, args.get(0), List.of(args.get(1))); + } + + @Override + protected List argSpec() { + return List.of(required(DataTypes.IP), required(strings())); + } + + @Override + protected DataType expectedType(List argTypes) { + return DataTypes.BOOLEAN; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java index 8cbeca67d0abd..3c1bf69a78716 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java @@ -15,13 +15,15 @@ import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.util.NumericUtils; import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; + public class Log10Tests extends AbstractFunctionTestCase { public Log10Tests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -54,7 +56,7 @@ public static Iterable parameters() { suppliers, "Log10UnsignedLongEvaluator[val=" + read + "]", DataTypes.DOUBLE, - ul -> Math.log10(ul == null ? null : NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), + ul -> Math.log10(ul == null ? null : unsignedLongToDouble(bigIntegerToUnsignedLong(ul))), BigInteger.ONE, UNSIGNED_LONG_MAX, List.of() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java index 55a479a3d2b2c..29e75bb3f0225 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java @@ -22,6 +22,8 @@ import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; + public class SqrtTests extends AbstractFunctionTestCase { public SqrtTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -54,7 +56,7 @@ public static Iterable parameters() { suppliers, "SqrtUnsignedLongEvaluator[val=" + read + "]", DataTypes.DOUBLE, - ul -> Math.sqrt(ul == null ? null : NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), + ul -> Math.sqrt(ul == null ? null : unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), BigInteger.ZERO, UNSIGNED_LONG_MAX, List.of() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java index b1070cb7eb12b..c6c8826c6805a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java @@ -25,6 +25,7 @@ import java.util.function.Supplier; import java.util.stream.DoubleStream; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; import static org.hamcrest.Matchers.equalTo; public class MvAvgTests extends AbstractMultivalueFunctionTestCase { @@ -53,7 +54,7 @@ public static Iterable parameters() { * So we have to go back to encoded `long` and then convert to double * using the production conversion. That'll round in the same way. */ - (size, data) -> avg.apply(size, data.mapToDouble(v -> NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(v)))) + (size, data) -> avg.apply(size, data.mapToDouble(v -> unsignedLongToDouble(NumericUtils.asLongUnsigned(v)))) ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, cases))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java index 97b8a95289c7d..478e45167b859 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -47,30 +46,6 @@ protected Expression build(Source source, List args) { return new MvSort(source, args.get(0), args.size() > 1 ? args.get(1) : null); } - /** - * Override to create the second argument as a Literal instead of a FieldAttribute. - */ - @Override - protected Expression buildFieldExpression(TestCaseSupplier.TestCase testCase) { - List args = new ArrayList<>(2); - List data = testCase.getData(); - args.add(AbstractFunctionTestCase.field(data.get(0).name(), data.get(0).type())); - args.add(new Literal(Source.synthetic(data.get(1).name()), data.get(1).data(), data.get(1).type())); - return build(testCase.getSource(), args); - } - - /** - * Override to create the second argument as a Literal instead of a FieldAttribute. - */ - @Override - protected Expression buildDeepCopyOfFieldExpression(TestCaseSupplier.TestCase testCase) { - List args = new ArrayList<>(2); - List data = testCase.getData(); - args.add(AbstractFunctionTestCase.deepCopyOfField(data.get(0).name(), data.get(0).type())); - args.add(new Literal(Source.synthetic(data.get(1).name()), data.get(1).data(), data.get(1).type())); - return build(testCase.getSource(), args); - } - private static void booleans(List suppliers) { suppliers.add(new TestCaseSupplier(List.of(DataTypes.BOOLEAN, DataTypes.KEYWORD), () -> { List field = randomList(1, 10, () -> randomBoolean()); @@ -78,7 +53,7 @@ private static void booleans(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.BOOLEAN, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BOOLEAN + "[field=Attribute[channel=0], order=true]", DataTypes.BOOLEAN, @@ -95,7 +70,7 @@ private static void ints(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.INTEGER, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.INT + "[field=Attribute[channel=0], order=false]", DataTypes.INTEGER, @@ -111,7 +86,7 @@ private static void longs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.LONG, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.LONG + "[field=Attribute[channel=0], order=true]", DataTypes.LONG, @@ -125,7 +100,7 @@ private static void longs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.DATETIME, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.LONG + "[field=Attribute[channel=0], order=false]", DataTypes.DATETIME, @@ -141,7 +116,7 @@ private static void doubles(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.DOUBLE, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.DOUBLE + "[field=Attribute[channel=0], order=true]", DataTypes.DOUBLE, @@ -157,7 +132,7 @@ private static void bytesRefs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.KEYWORD, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=false]", DataTypes.KEYWORD, @@ -171,7 +146,7 @@ private static void bytesRefs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.TEXT, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=true]", DataTypes.TEXT, @@ -185,7 +160,7 @@ private static void bytesRefs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.IP, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=false]", DataTypes.IP, @@ -199,7 +174,7 @@ private static void bytesRefs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.VERSION, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=true]", DataTypes.VERSION, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java new file mode 100644 index 0000000000000..e36d92fecd81f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java @@ -0,0 +1,213 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import joptsimple.internal.Strings; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Set; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction.compatibleTypeNames; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatialGeo; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isString; + +@FunctionName("st_intersects") + +public class SpatialIntersectsTests extends AbstractFunctionTestCase { + public SpatialIntersectsTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; + addSpatialCombinations(suppliers, geoDataTypes); + DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; + addSpatialCombinations(suppliers, cartesianDataTypes); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialIntersectsTests::typeErrorMessage) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialIntersects(source, args.get(0), args.get(1)); + } + + private static void addSpatialCombinations(List suppliers, DataType[] dataTypes) { + for (DataType leftType : dataTypes) { + TestCaseSupplier.TypedDataSupplier leftDataSupplier = testCaseSupplier(leftType); + for (DataType rightType : dataTypes) { + if (typeCompatible(leftType, rightType)) { + TestCaseSupplier.TypedDataSupplier rightDataSupplier = testCaseSupplier(rightType); + suppliers.add( + TestCaseSupplier.testCaseSupplier( + leftDataSupplier, + rightDataSupplier, + SpatialIntersectsTests::spatialEvaluatorString, + DataTypes.BOOLEAN, + (l, r) -> expected(l, leftType, r, rightType) + ) + ); + } + } + } + } + + /** + * Build the expected error message for an invalid type signature. + */ + protected static String typeErrorMessage(boolean includeOrdinal, List> validPerPosition, List types) { + List badArgPositions = new ArrayList<>(); + for (int i = 0; i < types.size(); i++) { + if (validPerPosition.get(i).contains(types.get(i)) == false) { + badArgPositions.add(i); + } + } + if (badArgPositions.size() == 0) { + return oneInvalid(1, 0, includeOrdinal, types); + } else if (badArgPositions.size() == 1) { + int badArgPosition = badArgPositions.get(0); + int goodArgPosition = badArgPosition == 0 ? 1 : 0; + if (isSpatial(types.get(goodArgPosition)) == false) { + return oneInvalid(badArgPosition, -1, includeOrdinal, types); + } else { + return oneInvalid(badArgPosition, goodArgPosition, includeOrdinal, types); + } + } else { + return oneInvalid(0, -1, includeOrdinal, types); + } + } + + private static String oneInvalid(int badArgPosition, int goodArgPosition, boolean includeOrdinal, List types) { + String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; + String expectedType = goodArgPosition >= 0 + ? compatibleTypes(types.get(goodArgPosition)) + : "geo_point, cartesian_point, geo_shape or cartesian_shape"; + String name = types.get(badArgPosition).typeName(); + return ordinal + "argument of [] must be [" + expectedType + "], found value [" + name + "] type [" + name + "]"; + } + + private static String compatibleTypes(DataType spatialDataType) { + return Strings.join(compatibleTypeNames(spatialDataType), " or "); + } + + private static TestCaseSupplier.TypedDataSupplier testCaseSupplier(DataType dataType) { + return switch (dataType.esType()) { + case "geo_point" -> TestCaseSupplier.geoPointCases(() -> false).get(0); + case "geo_shape" -> TestCaseSupplier.geoShapeCases(() -> false).get(0); + case "cartesian_point" -> TestCaseSupplier.cartesianPointCases(() -> false).get(0); + case "cartesian_shape" -> TestCaseSupplier.cartesianShapeCases(() -> false).get(0); + default -> throw new IllegalArgumentException("Unsupported datatype for ST_INTERSECTS: " + dataType); + }; + } + + private static Object expected(Object left, DataType leftType, Object right, DataType rightType) { + if (typeCompatible(leftType, rightType) == false) { + return null; + } + // TODO cast objects to right type and check intersection + BytesRef leftWKB = asGeometryWKB(left, leftType); + BytesRef rightWKB = asGeometryWKB(right, rightType); + SpatialRelatesFunction.SpatialRelations spatialIntersects = spatialRelations(left, leftType, right, rightType); + try { + return spatialIntersects.geometryRelatesGeometry(leftWKB, rightWKB); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static SpatialRelatesFunction.SpatialRelations spatialRelations( + Object left, + DataType leftType, + Object right, + DataType rightType + ) { + if (isSpatialGeo(leftType) || isSpatialGeo(rightType)) { + return SpatialIntersects.GEO; + } else if (isSpatial(leftType) || isSpatial(rightType)) { + return SpatialIntersects.CARTESIAN; + } else { + throw new IllegalArgumentException( + "Unsupported left and right types: left[" + + leftType.esType() + + ":" + + left.getClass().getSimpleName() + + "] right[" + + rightType.esType() + + ":" + + right.getClass().getSimpleName() + + "]" + ); + } + } + + private static BytesRef asGeometryWKB(Object object, DataType dataType) { + if (isString(dataType)) { + return SpatialCoordinateTypes.UNSPECIFIED.wktToWkb(object.toString()); + } else if (object instanceof BytesRef wkb) { + return wkb; + } else { + throw new IllegalArgumentException("Invalid geometry base type for " + dataType + ": " + object.getClass().getSimpleName()); + } + } + + private static boolean typeCompatible(DataType leftType, DataType rightType) { + if (isSpatial(leftType) && isSpatial(rightType)) { + // Both must be GEO_* or both must be CARTESIAN_* + return countGeo(leftType, rightType) != 1; + } + return true; + } + + private static DataType pickSpatialType(DataType leftType, DataType rightType) { + if (isSpatial(leftType)) { + return leftType; + } else if (isSpatial(rightType)) { + return rightType; + } else { + throw new IllegalArgumentException("Invalid spatial types: " + leftType + " and " + rightType); + } + } + + private static String spatialEvaluatorString(DataType leftType, DataType rightType) { + String crsType = isSpatialGeo(pickSpatialType(leftType, rightType)) ? "Geo" : "Cartesian"; + return "SpatialIntersects" + crsType + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]"; + } + + private static int countGeo(DataType... types) { + int count = 0; + for (DataType type : types) { + if (isSpatialGeo(type)) { + count++; + } + } + return count; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java new file mode 100644 index 0000000000000..da8af4e57636c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.predicate.regex.RLikePattern; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; + +public class RLikeTests extends AbstractFunctionTestCase { + public RLikeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return parameters(str -> { + for (String syntax : new String[] { "\\", ".", "?", "+", "*", "|", "{", "}", "[", "]", "(", ")", "\"", "<", ">" }) { + str = str.replace(syntax, "\\" + syntax); + } + return str; + }, () -> randomAlphaOfLength(1) + "?"); + } + + static Iterable parameters(Function escapeString, Supplier optionalPattern) { + List cases = new ArrayList<>(); + cases.add( + new TestCaseSupplier( + "null", + List.of(DataTypes.NULL, DataTypes.KEYWORD, DataTypes.BOOLEAN), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(null, DataTypes.NULL, "e"), + new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), DataTypes.KEYWORD, "pattern").forceLiteral(), + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "caseInsensitive").forceLiteral() + ), + "LiteralsEvaluator[lit=null]", + DataTypes.BOOLEAN, + nullValue() + ) + ) + ); + casesForString(cases, "empty string", () -> "", false, escapeString, optionalPattern); + casesForString(cases, "single ascii character", () -> randomAlphaOfLength(1), true, escapeString, optionalPattern); + casesForString(cases, "ascii string", () -> randomAlphaOfLengthBetween(2, 100), true, escapeString, optionalPattern); + casesForString(cases, "3 bytes, 1 code point", () -> "☕", false, escapeString, optionalPattern); + casesForString(cases, "6 bytes, 2 code points", () -> "❗️", false, escapeString, optionalPattern); + casesForString(cases, "100 random code points", () -> randomUnicodeOfCodepointLength(100), true, escapeString, optionalPattern); + for (DataType type : EsqlDataTypes.types()) { + if (type == DataTypes.KEYWORD || type == DataTypes.TEXT || type == DataTypes.NULL) { + continue; + } + if (EsqlDataTypes.isRepresentable(type) == false) { + continue; + } + cases.add( + new TestCaseSupplier( + List.of(type, DataTypes.KEYWORD, DataTypes.BOOLEAN), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(randomLiteral(type).value(), type, "e"), + new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), DataTypes.KEYWORD, "pattern") + .forceLiteral(), + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "caseInsensitive").forceLiteral() + ), + "argument of [] must be [string], found value [e] type [" + type.typeName() + "]" + ) + ) + ); + } + return parameterSuppliersFromTypedData(cases); + } + + record TextAndPattern(String text, String pattern) {} + + private static void casesForString( + List cases, + String title, + Supplier textSupplier, + boolean canGenerateDifferent, + Function escapeString, + Supplier optionalPattern + ) { + cases(cases, title + " matches self", () -> { + String text = textSupplier.get(); + return new TextAndPattern(text, escapeString.apply(text)); + }, true); + cases(cases, title + " doesn't match self with trailing", () -> { + String text = textSupplier.get(); + return new TextAndPattern(text, escapeString.apply(text) + randomAlphaOfLength(1)); + }, false); + cases(cases, title + " matches self with optional trailing", () -> { + String text = randomAlphaOfLength(1); + return new TextAndPattern(text, escapeString.apply(text) + optionalPattern.get()); + }, true); + if (canGenerateDifferent) { + cases(cases, title + " doesn't match different", () -> { + String text = textSupplier.get(); + String different = escapeString.apply(randomValueOtherThan(text, textSupplier)); + return new TextAndPattern(text, different); + }, false); + } + } + + private static void cases(List cases, String title, Supplier textAndPattern, boolean expected) { + for (DataType type : new DataType[] { DataTypes.KEYWORD, DataTypes.TEXT }) { + cases.add(new TestCaseSupplier(title + " with " + type.esType(), List.of(type, type, DataTypes.BOOLEAN), () -> { + TextAndPattern v = textAndPattern.get(); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(v.text), type, "e"), + new TestCaseSupplier.TypedData(new BytesRef(v.pattern), type, "pattern").forceLiteral(), + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "caseInsensitive").forceLiteral() + ), + startsWith("AutomataMatchEvaluator[input=Attribute[channel=0], pattern=digraph Automaton {\n"), + DataTypes.BOOLEAN, + equalTo(expected) + ); + })); + } + } + + @Override + protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { + assumeFalse("generated test cases containing nulls by hand", true); + } + + @Override + protected Expression build(Source source, List args) { + Expression expression = args.get(0); + Literal pattern = (Literal) args.get(1); + Literal caseInsensitive = (Literal) args.get(2); + String patternString = ((BytesRef) pattern.fold()).utf8ToString(); + boolean caseInsensitiveBool = (boolean) caseInsensitive.fold(); + logger.info("pattern={} caseInsensitive={}", patternString, caseInsensitiveBool); + return new RLike(source, expression, new RLikePattern(patternString), caseInsensitiveBool); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java new file mode 100644 index 0000000000000..6377be0655614 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardPattern; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class WildcardLikeTests extends AbstractFunctionTestCase { + public WildcardLikeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return RLikeTests.parameters(str -> { + for (String syntax : new String[] { "\\", ".", "*" }) { + str = str.replace(syntax, "\\" + syntax); + } + return str; + }, () -> "*"); + } + + @Override + protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { + assumeFalse("generated test cases containing nulls by hand", true); + } + + @Override + protected Expression build(Source source, List args) { + Expression expression = args.get(0); + Literal pattern = (Literal) args.get(1); + Literal caseInsensitive = (Literal) args.get(2); + assertThat(caseInsensitive.fold(), equalTo(false)); + return new WildcardLike(source, expression, new WildcardPattern(((BytesRef) pattern.fold()).utf8ToString())); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 6370b0198ae88..a30418c69f0f3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -54,8 +54,8 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForExistingField; import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForMissingField; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; -import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests.getFieldAttribute; import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests.greaterThanOf; +import static org.elasticsearch.xpack.ql.TestUtils.getFieldAttribute; import static org.elasticsearch.xpack.ql.TestUtils.relation; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; import static org.hamcrest.Matchers.contains; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index f2bce6951151e..3f0b39603ef89 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -24,8 +24,6 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.WildcardLike; import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.aggregate.Avg; @@ -59,7 +57,9 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; @@ -112,9 +112,11 @@ import org.junit.BeforeClass; import java.lang.reflect.Constructor; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; @@ -132,6 +134,7 @@ import static org.elasticsearch.xpack.esql.analysis.Analyzer.NO_FIELDS; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.ql.TestUtils.getFieldAttribute; import static org.elasticsearch.xpack.ql.TestUtils.relation; import static org.elasticsearch.xpack.ql.expression.Literal.FALSE; import static org.elasticsearch.xpack.ql.expression.Literal.NULL; @@ -172,6 +175,19 @@ public class LogicalPlanOptimizerTests extends ESTestCase { private static Analyzer analyzerAirports; private static EnrichResolution enrichResolution; + private static class SubstitutionOnlyOptimizer extends LogicalPlanOptimizer { + static SubstitutionOnlyOptimizer INSTANCE = new SubstitutionOnlyOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); + + SubstitutionOnlyOptimizer(LogicalOptimizerContext optimizerContext) { + super(optimizerContext); + } + + @Override + protected List> batches() { + return List.of(substitutions()); + } + } + @BeforeClass public static void init() { parser = new EsqlParser(); @@ -316,6 +332,39 @@ public void testQlComparisonOptimizationsApply() { assertThat(con.value(), equalTo(5)); } + public void testCombineDisjunctionToInEquals() { + LogicalPlan plan = plan(""" + from test + | where emp_no == 1 or emp_no == 2 + """); + var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); + var condition = as(filter.condition(), In.class); + assertThat(condition.list(), equalTo(List.of(new Literal(EMPTY, 1, INTEGER), new Literal(EMPTY, 2, INTEGER)))); + } + + public void testCombineDisjunctionToInMixed() { + LogicalPlan plan = plan(""" + from test + | where emp_no == 1 or emp_no in (2) + """); + var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); + var condition = as(filter.condition(), In.class); + assertThat(condition.list(), equalTo(List.of(new Literal(EMPTY, 1, INTEGER), new Literal(EMPTY, 2, INTEGER)))); + } + + public void testCombineDisjunctionToInFromIn() { + LogicalPlan plan = plan(""" + from test + | where emp_no in (1) or emp_no in (2) + """); + var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); + var condition = as(filter.condition(), In.class); + assertThat(condition.list(), equalTo(List.of(new Literal(EMPTY, 1, INTEGER), new Literal(EMPTY, 2, INTEGER)))); + } + public void testCombineProjectionWithPruning() { var plan = plan(""" from test @@ -3238,6 +3287,177 @@ public void testStatsWithCanonicalAggregate() throws Exception { assertThat(Expressions.attribute(fields.get(1)), is(Expressions.attribute(sum_argument))); } + /** + * Expects after running the {@link LogicalPlanOptimizer#substitutions()}: + * + * Limit[1000[INTEGER]] + * \_EsqlProject[[s{r}#3, s_expr{r}#5, s_null{r}#7, w{r}#10]] + * \_Project[[s{r}#3, s_expr{r}#5, s_null{r}#7, w{r}#10]] + * \_Eval[[MVSUM([1, 2][INTEGER]) * $$COUNT$s$0{r}#25 AS s, MVSUM(314.0[DOUBLE] / 100[INTEGER]) * $$COUNT$s$0{r}#25 AS s + * _expr, MVSUM(null[NULL]) * $$COUNT$s$0{r}#25 AS s_null]] + * \_Aggregate[[w{r}#10],[COUNT(*[KEYWORD]) AS $$COUNT$s$0, w{r}#10]] + * \_Eval[[emp_no{f}#15 % 2[INTEGER] AS w]] + * \_EsRelation[test][_meta_field{f}#21, emp_no{f}#15, first_name{f}#16, ..] + */ + public void testSumOfLiteral() { + var plan = plan(""" + from test + | stats s = sum([1,2]), + s_expr = sum(314.0/100), + s_null = sum(null) + by w = emp_no % 2 + | keep s, s_expr, s_null, w + """, SubstitutionOnlyOptimizer.INSTANCE); + + var limit = as(plan, Limit.class); + var esqlProject = as(limit.child(), EsqlProject.class); + var project = as(esqlProject.child(), Project.class); + var eval = as(project.child(), Eval.class); + var agg = as(eval.child(), Aggregate.class); + + var exprs = eval.fields(); + // s = count(*) * 3 + var s = as(exprs.get(0), Alias.class); + assertThat(s.name(), equalTo("s")); + var mul = as(s.child(), Mul.class); + var mvSum = as(mul.left(), MvSum.class); + assertThat(mvSum.fold(), equalTo(3)); + var count = as(mul.right(), ReferenceAttribute.class); + assertThat(count.name(), equalTo("$$COUNT$s$0")); + + // s_expr = count(*) * 3.14 + var s_expr = as(exprs.get(1), Alias.class); + assertThat(s_expr.name(), equalTo("s_expr")); + var mul_expr = as(s_expr.child(), Mul.class); + var mvSum_expr = as(mul_expr.left(), MvSum.class); + assertThat(mvSum_expr.fold(), equalTo(3.14)); + var count_expr = as(mul_expr.right(), ReferenceAttribute.class); + assertThat(count_expr.name(), equalTo("$$COUNT$s$0")); + + // s_null = null + var s_null = as(exprs.get(2), Alias.class); + assertThat(s_null.name(), equalTo("s_null")); + var mul_null = as(s_null.child(), Mul.class); + var mvSum_null = as(mul_null.left(), MvSum.class); + assertThat(mvSum_null.field(), equalTo(NULL)); + var count_null = as(mul_null.right(), ReferenceAttribute.class); + assertThat(count_null.name(), equalTo("$$COUNT$s$0")); + + var count_agg = as(Alias.unwrap(agg.aggregates().get(0)), Count.class); + assertThat(count_agg.children().get(0), instanceOf(Literal.class)); + var w = as(Alias.unwrap(agg.groupings().get(0)), ReferenceAttribute.class); + assertThat(w.name(), equalTo("w")); + } + + private record AggOfLiteralTestCase( + String aggFunctionName, + Class substitution, + Function aggMultiValue + ) {}; + + private static List AGG_OF_CONST_CASES = List.of( + new AggOfLiteralTestCase("avg", MvAvg.class, ints -> ((double) Arrays.stream(ints).sum()) / ints.length), + new AggOfLiteralTestCase("min", MvMin.class, ints -> Arrays.stream(ints).min().getAsInt()), + new AggOfLiteralTestCase("max", MvMax.class, ints -> Arrays.stream(ints).max().getAsInt()) + ); + + /** + * Aggs of literals in case that the agg can be simply replaced by a corresponding mv-function; + * e.g. avg([1,2,3]) which is equivalent to mv_avg([1,2,3]). + * + * Expects after running the {@link LogicalPlanOptimizer#substitutions()}: + * + * Limit[1000[INTEGER]] + * \_EsqlProject[[s{r}#3, s_expr{r}#5, s_null{r}#7]] + * \_Project[[s{r}#3, s_expr{r}#5, s_null{r}#7]] + * \_Eval[[MVAVG([1, 2][INTEGER]) AS s, MVAVG(314.0[DOUBLE] / 100[INTEGER]) AS s_expr, MVAVG(null[NULL]) AS s_null]] + * \_LocalRelation[[{e}#21],[ConstantNullBlock[positions=1]]] + */ + public void testAggOfLiteral() { + for (AggOfLiteralTestCase testCase : AGG_OF_CONST_CASES) { + String query = LoggerMessageFormat.format(null, """ + from test + | stats s = {}([1,2]), + s_expr = {}(314.0/100), + s_null = {}(null) + | keep s, s_expr, s_null + """, testCase.aggFunctionName, testCase.aggFunctionName, testCase.aggFunctionName); + + var plan = plan(query, SubstitutionOnlyOptimizer.INSTANCE); + + var limit = as(plan, Limit.class); + var esqlProject = as(limit.child(), EsqlProject.class); + var project = as(esqlProject.child(), Project.class); + var eval = as(project.child(), Eval.class); + var singleRowRelation = as(eval.child(), LocalRelation.class); + var singleRow = singleRowRelation.supplier().get(); + assertThat(singleRow.length, equalTo(1)); + assertThat(singleRow[0].getPositionCount(), equalTo(1)); + + var exprs = eval.fields(); + var s = as(exprs.get(0), Alias.class); + assertThat(s.child(), instanceOf(testCase.substitution)); + assertThat(s.child().fold(), equalTo(testCase.aggMultiValue.apply(new int[] { 1, 2 }))); + var s_expr = as(exprs.get(1), Alias.class); + assertThat(s_expr.child(), instanceOf(testCase.substitution)); + assertThat(s_expr.child().fold(), equalTo(3.14)); + var s_null = as(exprs.get(2), Alias.class); + assertThat(s_null.child(), instanceOf(testCase.substitution)); + assertThat(s_null.child().fold(), equalTo(null)); + } + } + + /** + * Like {@link LogicalPlanOptimizerTests#testAggOfLiteral()} but with a grouping key. + * + * Expects after running the {@link LogicalPlanOptimizer#substitutions()}: + * + * Limit[1000[INTEGER]] + * \_EsqlProject[[s{r}#3, s_expr{r}#5, s_null{r}#7, emp_no{f}#13]] + * \_Project[[s{r}#3, s_expr{r}#5, s_null{r}#7, emp_no{f}#13]] + * \_Eval[[MVAVG([1, 2][INTEGER]) AS s, MVAVG(314.0[DOUBLE] / 100[INTEGER]) AS s_expr, MVAVG(null[NULL]) AS s_null]] + * \_Aggregate[[emp_no{f}#13],[emp_no{f}#13]] + * \_EsRelation[test][_meta_field{f}#19, emp_no{f}#13, first_name{f}#14, ..] + */ + public void testAggOfLiteralGrouped() { + for (AggOfLiteralTestCase testCase : AGG_OF_CONST_CASES) { + String query = LoggerMessageFormat.format(null, """ + from test + | stats s = {}([1,2]), + s_expr = {}(314.0/100), + s_null = {}(null) + by emp_no + | keep s, s_expr, s_null, emp_no + """, testCase.aggFunctionName, testCase.aggFunctionName, testCase.aggFunctionName); + + var plan = plan(query, SubstitutionOnlyOptimizer.INSTANCE); + + var limit = as(plan, Limit.class); + var esqlProject = as(limit.child(), EsqlProject.class); + var project = as(esqlProject.child(), Project.class); + var eval = as(project.child(), Eval.class); + var agg = as(eval.child(), Aggregate.class); + assertThat(agg.child(), instanceOf(EsRelation.class)); + + // Assert exprs + var exprs = eval.fields(); + + var s = as(exprs.get(0), Alias.class); + assertThat(s.child(), instanceOf(testCase.substitution)); + assertThat(s.child().fold(), equalTo(testCase.aggMultiValue.apply(new int[] { 1, 2 }))); + var s_expr = as(exprs.get(1), Alias.class); + assertThat(s_expr.child(), instanceOf(testCase.substitution)); + assertThat(s_expr.child().fold(), equalTo(3.14)); + var s_null = as(exprs.get(2), Alias.class); + assertThat(s_null.child(), instanceOf(testCase.substitution)); + assertThat(s_null.child().fold(), equalTo(null)); + + // Assert that the aggregate only does the grouping by emp_no + assertThat(Expressions.names(agg.groupings()), contains("emp_no")); + assertThat(agg.aggregates().size(), equalTo(1)); + } + } + public void testEmptyMappingIndex() { EsIndex empty = new EsIndex("empty_test", emptyMap(), emptySet()); IndexResolution getIndexResultAirports = IndexResolution.valid(empty); @@ -3265,7 +3485,6 @@ public void testEmptyMappingIndex() { assertThat(Expressions.names(local.output()), contains(NO_FIELDS.get(0).name(), "x", "language_code", "language_name")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105436") public void testPlanSanityCheck() throws Exception { var plan = optimizedPlan(""" from test @@ -3291,7 +3510,7 @@ public void testPlanSanityCheck() throws Exception { ) ); - VerificationException e = expectThrows(VerificationException.class, () -> logicalOptimizer.optimize(invalidPlan)); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> logicalOptimizer.optimize(invalidPlan)); assertThat(e.getMessage(), containsString("Plan [OrderBy[[Order[salary")); assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references [salary")); } @@ -3422,9 +3641,13 @@ private LogicalPlan optimizedPlan(String query) { } private LogicalPlan plan(String query) { + return plan(query, logicalOptimizer); + } + + private LogicalPlan plan(String query, LogicalPlanOptimizer optimizer) { var analyzed = analyzer.analyze(parser.createStatement(query)); // System.out.println(analyzed); - var optimized = logicalOptimizer.optimize(analyzed); + var optimized = optimizer.optimize(analyzed); // System.out.println(optimized); return optimized; } @@ -3442,15 +3665,6 @@ private void assertNullLiteral(Expression expression) { assertNull(expression.fold()); } - // TODO: move these from org.elasticsearch.xpack.ql.optimizer.OptimizerRulesTests to org.elasticsearch.xpack.ql.TestUtils - public static FieldAttribute getFieldAttribute(String name) { - return getFieldAttribute(name, INTEGER); - } - - private static FieldAttribute getFieldAttribute(String name, DataType dataType) { - return new FieldAttribute(EMPTY, name, new EsField(name + "f", dataType, emptyMap(), true)); - } - public static WildcardLike wildcardLike(Expression left, String exp) { return new WildcardLike(EMPTY, left, new WildcardPattern(exp)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java new file mode 100644 index 0000000000000..01fcd222a5141 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -0,0 +1,541 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NullEquals; +import org.elasticsearch.xpack.ql.TestUtils; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.predicate.Predicates; +import org.elasticsearch.xpack.ql.expression.predicate.Range; +import org.elasticsearch.xpack.ql.expression.predicate.logical.And; +import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.ql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.plan.logical.Filter; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; + +import static java.util.Arrays.asList; +import static org.elasticsearch.xpack.ql.TestUtils.rangeOf; +import static org.elasticsearch.xpack.ql.TestUtils.relation; +import static org.elasticsearch.xpack.ql.expression.Literal.FALSE; +import static org.elasticsearch.xpack.ql.expression.Literal.NULL; +import static org.elasticsearch.xpack.ql.expression.Literal.TRUE; +import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; +import static org.hamcrest.Matchers.contains; + +public class OptimizerRulesTests extends ESTestCase { + private static final Literal ONE = new Literal(Source.EMPTY, 1, DataTypes.INTEGER); + private static final Literal TWO = new Literal(Source.EMPTY, 2, DataTypes.INTEGER); + private static final Literal THREE = new Literal(Source.EMPTY, 3, DataTypes.INTEGER); + private static final Literal FOUR = new Literal(Source.EMPTY, 4, DataTypes.INTEGER); + private static final Literal FIVE = new Literal(Source.EMPTY, 5, DataTypes.INTEGER); + + private static Equals equalsOf(Expression left, Expression right) { + return new Equals(EMPTY, left, right, null); + } + + private static LessThan lessThanOf(Expression left, Expression right) { + return new LessThan(EMPTY, left, right, null); + } + + public static GreaterThan greaterThanOf(Expression left, Expression right) { + return new GreaterThan(EMPTY, left, right, randomZone()); + } + + public static NotEquals notEqualsOf(Expression left, Expression right) { + return new NotEquals(EMPTY, left, right, randomZone()); + } + + public static NullEquals nullEqualsOf(Expression left, Expression right) { + return new NullEquals(EMPTY, left, right, randomZone()); + } + + public static LessThanOrEqual lessThanOrEqualOf(Expression left, Expression right) { + return new LessThanOrEqual(EMPTY, left, right, randomZone()); + } + + public static GreaterThanOrEqual greaterThanOrEqualOf(Expression left, Expression right) { + return new GreaterThanOrEqual(EMPTY, left, right, randomZone()); + } + + private static FieldAttribute getFieldAttribute() { + return TestUtils.getFieldAttribute("a"); + } + + // + // CombineDisjunction in Equals + // + public void testTwoEqualsWithOr() { + FieldAttribute fa = getFieldAttribute(); + + Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO)); + } + + public void testTwoEqualsWithSameValue() { + FieldAttribute fa = getFieldAttribute(); + + Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, ONE)); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(Equals.class, e.getClass()); + Equals eq = (Equals) e; + assertEquals(fa, eq.left()); + assertEquals(ONE, eq.right()); + } + + public void testOneEqualsOneIn() { + FieldAttribute fa = getFieldAttribute(); + + Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, List.of(TWO))); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO)); + } + + public void testOneEqualsOneInWithSameValue() { + FieldAttribute fa = getFieldAttribute(); + + Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, asList(ONE, TWO))); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO)); + } + + public void testSingleValueInToEquals() { + FieldAttribute fa = getFieldAttribute(); + + Equals equals = equalsOf(fa, ONE); + Or or = new Or(EMPTY, equals, new In(EMPTY, fa, List.of(ONE))); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(equals, e); + } + + public void testEqualsBehindAnd() { + FieldAttribute fa = getFieldAttribute(); + + And and = new And(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); + Filter dummy = new Filter(EMPTY, relation(), and); + LogicalPlan transformed = new OptimizerRules.CombineDisjunctionsToIn().apply(dummy); + assertSame(dummy, transformed); + assertEquals(and, ((Filter) transformed).condition()); + } + + public void testTwoEqualsDifferentFields() { + FieldAttribute fieldOne = TestUtils.getFieldAttribute("ONE"); + FieldAttribute fieldTwo = TestUtils.getFieldAttribute("TWO"); + + Or or = new Or(EMPTY, equalsOf(fieldOne, ONE), equalsOf(fieldTwo, TWO)); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(or, e); + } + + public void testMultipleIn() { + FieldAttribute fa = getFieldAttribute(); + + Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), new In(EMPTY, fa, List.of(TWO))); + Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(secondOr); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO, THREE)); + } + + public void testOrWithNonCombinableExpressions() { + FieldAttribute fa = getFieldAttribute(); + + Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), lessThanOf(fa, TWO)); + Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(secondOr); + assertEquals(Or.class, e.getClass()); + Or or = (Or) e; + assertEquals(or.left(), firstOr.right()); + assertEquals(In.class, or.right().getClass()); + In in = (In) or.right(); + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, THREE)); + } + + // Test BooleanFunctionEqualsElimination + public void testBoolEqualsSimplificationOnExpressions() { + OptimizerRules.BooleanFunctionEqualsElimination s = new OptimizerRules.BooleanFunctionEqualsElimination(); + Expression exp = new GreaterThan(EMPTY, getFieldAttribute(), new Literal(EMPTY, 0, DataTypes.INTEGER), null); + + assertEquals(exp, s.rule(new Equals(EMPTY, exp, TRUE))); + // TODO: Replace use of QL Not with ESQL Not + assertEquals(new Not(EMPTY, exp), s.rule(new Equals(EMPTY, exp, FALSE))); + } + + public void testBoolEqualsSimplificationOnFields() { + OptimizerRules.BooleanFunctionEqualsElimination s = new OptimizerRules.BooleanFunctionEqualsElimination(); + + FieldAttribute field = getFieldAttribute(); + + List comparisons = asList( + new Equals(EMPTY, field, TRUE), + new Equals(EMPTY, field, FALSE), + notEqualsOf(field, TRUE), + notEqualsOf(field, FALSE), + new Equals(EMPTY, NULL, TRUE), + new Equals(EMPTY, NULL, FALSE), + notEqualsOf(NULL, TRUE), + notEqualsOf(NULL, FALSE) + ); + + for (BinaryComparison comparison : comparisons) { + assertEquals(comparison, s.rule(comparison)); + } + } + + // Test Propagate Equals + + // a == 1 AND a == 2 -> FALSE + public void testDualEqualsConjunction() { + FieldAttribute fa = getFieldAttribute(); + Equals eq1 = equalsOf(fa, ONE); + Equals eq2 = equalsOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, eq2)); + assertEquals(FALSE, exp); + } + + // a <=> 1 AND a <=> 2 -> FALSE + public void testDualNullEqualsConjunction() { + FieldAttribute fa = getFieldAttribute(); + NullEquals eq1 = nullEqualsOf(fa, ONE); + NullEquals eq2 = nullEqualsOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, eq2)); + assertEquals(FALSE, exp); + } + + // 1 < a < 10 AND a == 10 -> FALSE + public void testEliminateRangeByEqualsOutsideInterval() { + FieldAttribute fa = getFieldAttribute(); + Equals eq1 = equalsOf(fa, new Literal(EMPTY, 10, DataTypes.INTEGER)); + Range r = rangeOf(fa, ONE, false, new Literal(EMPTY, 10, DataTypes.INTEGER), false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, r)); + assertEquals(FALSE, exp); + } + + // 1 < a < 10 AND a <=> 10 -> FALSE + public void testEliminateRangeByNullEqualsOutsideInterval() { + FieldAttribute fa = getFieldAttribute(); + NullEquals eq1 = nullEqualsOf(fa, new Literal(EMPTY, 10, DataTypes.INTEGER)); + Range r = rangeOf(fa, ONE, false, new Literal(EMPTY, 10, DataTypes.INTEGER), false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, r)); + assertEquals(FALSE, exp); + } + + // a != 3 AND a = 3 -> FALSE + public void testPropagateEquals_VarNeq3AndVarEq3() { + FieldAttribute fa = getFieldAttribute(); + NotEquals neq = notEqualsOf(fa, THREE); + Equals eq = equalsOf(fa, THREE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, neq, eq)); + assertEquals(FALSE, exp); + } + + // a != 4 AND a = 3 -> a = 3 + public void testPropagateEquals_VarNeq4AndVarEq3() { + FieldAttribute fa = getFieldAttribute(); + NotEquals neq = notEqualsOf(fa, FOUR); + Equals eq = equalsOf(fa, THREE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, neq, eq)); + assertEquals(Equals.class, exp.getClass()); + assertEquals(eq, exp); + } + + // a = 2 AND a < 2 -> FALSE + public void testPropagateEquals_VarEq2AndVarLt2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + LessThan lt = lessThanOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, lt)); + assertEquals(FALSE, exp); + } + + // a = 2 AND a <= 2 -> a = 2 + public void testPropagateEquals_VarEq2AndVarLte2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + LessThanOrEqual lt = lessThanOrEqualOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, lt)); + assertEquals(eq, exp); + } + + // a = 2 AND a <= 1 -> FALSE + public void testPropagateEquals_VarEq2AndVarLte1() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + LessThanOrEqual lt = lessThanOrEqualOf(fa, ONE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, lt)); + assertEquals(FALSE, exp); + } + + // a = 2 AND a > 2 -> FALSE + public void testPropagateEquals_VarEq2AndVarGt2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + GreaterThan gt = greaterThanOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, gt)); + assertEquals(FALSE, exp); + } + + // a = 2 AND a >= 2 -> a = 2 + public void testPropagateEquals_VarEq2AndVarGte2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, gte)); + assertEquals(eq, exp); + } + + // a = 2 AND a > 3 -> FALSE + public void testPropagateEquals_VarEq2AndVarLt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + GreaterThan gt = greaterThanOf(fa, THREE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, gt)); + assertEquals(FALSE, exp); + } + + // a = 2 AND a < 3 AND a > 1 AND a != 4 -> a = 2 + public void testPropagateEquals_VarEq2AndVarLt3AndVarGt1AndVarNeq4() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + LessThan lt = lessThanOf(fa, THREE); + GreaterThan gt = greaterThanOf(fa, ONE); + NotEquals neq = notEqualsOf(fa, FOUR); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression and = Predicates.combineAnd(asList(eq, lt, gt, neq)); + Expression exp = rule.rule((And) and); + assertEquals(eq, exp); + } + + // a = 2 AND 1 < a < 3 AND a > 0 AND a != 4 -> a = 2 + public void testPropagateEquals_VarEq2AndVarRangeGt1Lt3AndVarGt0AndVarNeq4() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + Range range = rangeOf(fa, ONE, false, THREE, false); + GreaterThan gt = greaterThanOf(fa, new Literal(EMPTY, 0, DataTypes.INTEGER)); + NotEquals neq = notEqualsOf(fa, FOUR); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression and = Predicates.combineAnd(asList(eq, range, gt, neq)); + Expression exp = rule.rule((And) and); + assertEquals(eq, exp); + } + + // a = 2 OR a > 1 -> a > 1 + public void testPropagateEquals_VarEq2OrVarGt1() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + GreaterThan gt = greaterThanOf(fa, ONE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, gt)); + assertEquals(gt, exp); + } + + // a = 2 OR a > 2 -> a >= 2 + public void testPropagateEquals_VarEq2OrVarGte2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + GreaterThan gt = greaterThanOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, gt)); + assertEquals(GreaterThanOrEqual.class, exp.getClass()); + GreaterThanOrEqual gte = (GreaterThanOrEqual) exp; + assertEquals(TWO, gte.right()); + } + + // a = 2 OR a < 3 -> a < 3 + public void testPropagateEquals_VarEq2OrVarLt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + LessThan lt = lessThanOf(fa, THREE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, lt)); + assertEquals(lt, exp); + } + + // a = 3 OR a < 3 -> a <= 3 + public void testPropagateEquals_VarEq3OrVarLt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, THREE); + LessThan lt = lessThanOf(fa, THREE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, lt)); + assertEquals(LessThanOrEqual.class, exp.getClass()); + LessThanOrEqual lte = (LessThanOrEqual) exp; + assertEquals(THREE, lte.right()); + } + + // a = 2 OR 1 < a < 3 -> 1 < a < 3 + public void testPropagateEquals_VarEq2OrVarRangeGt1Lt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + Range range = rangeOf(fa, ONE, false, THREE, false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, range)); + assertEquals(range, exp); + } + + // a = 2 OR 2 < a < 3 -> 2 <= a < 3 + public void testPropagateEquals_VarEq2OrVarRangeGt2Lt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + Range range = rangeOf(fa, TWO, false, THREE, false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, range)); + assertEquals(Range.class, exp.getClass()); + Range r = (Range) exp; + assertEquals(TWO, r.lower()); + assertTrue(r.includeLower()); + assertEquals(THREE, r.upper()); + assertFalse(r.includeUpper()); + } + + // a = 3 OR 2 < a < 3 -> 2 < a <= 3 + public void testPropagateEquals_VarEq3OrVarRangeGt2Lt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, THREE); + Range range = rangeOf(fa, TWO, false, THREE, false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, range)); + assertEquals(Range.class, exp.getClass()); + Range r = (Range) exp; + assertEquals(TWO, r.lower()); + assertFalse(r.includeLower()); + assertEquals(THREE, r.upper()); + assertTrue(r.includeUpper()); + } + + // a = 2 OR a != 2 -> TRUE + public void testPropagateEquals_VarEq2OrVarNeq2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + NotEquals neq = notEqualsOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, neq)); + assertEquals(TRUE, exp); + } + + // a = 2 OR a != 5 -> a != 5 + public void testPropagateEquals_VarEq2OrVarNeq5() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + NotEquals neq = notEqualsOf(fa, FIVE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, neq)); + assertEquals(NotEquals.class, exp.getClass()); + NotEquals ne = (NotEquals) exp; + assertEquals(FIVE, ne.right()); + } + + // a = 2 OR 3 < a < 4 OR a > 2 OR a!= 2 -> TRUE + public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() { + FieldAttribute fa = getFieldAttribute(); + org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals eq = equalsOf(fa, TWO); + Range range = rangeOf(fa, THREE, false, FOUR, false); + GreaterThan gt = greaterThanOf(fa, TWO); + NotEquals neq = notEqualsOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule((Or) Predicates.combineOr(asList(eq, range, neq, gt))); + assertEquals(TRUE, exp); + } + + // a == 1 AND a == 2 -> nop for date/time fields + public void testPropagateEquals_ignoreDateTimeFields() { + FieldAttribute fa = TestUtils.getFieldAttribute("a", DataTypes.DATETIME); + Equals eq1 = equalsOf(fa, ONE); + Equals eq2 = equalsOf(fa, TWO); + And and = new And(EMPTY, eq1, eq2); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(and); + assertEquals(and, exp); + } + + // 1 <= a < 10 AND a == 1 -> a == 1 + public void testEliminateRangeByEqualsInInterval() { + FieldAttribute fa = getFieldAttribute(); + Equals eq1 = equalsOf(fa, ONE); + Range r = rangeOf(fa, ONE, true, new Literal(EMPTY, 10, DataTypes.INTEGER), false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, r)); + assertEquals(eq1, exp); + } + + // 1 <= a < 10 AND a <=> 1 -> a <=> 1 + public void testEliminateRangeByNullEqualsInInterval() { + FieldAttribute fa = getFieldAttribute(); + NullEquals eq1 = nullEqualsOf(fa, ONE); + Range r = rangeOf(fa, ONE, true, new Literal(EMPTY, 10, DataTypes.INTEGER), false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, r)); + assertEquals(eq1, exp); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 80b9c07095ee9..650cd2c81115c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -9,11 +9,13 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.geometry.Polygon; +import org.elasticsearch.geometry.ShapeType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; @@ -40,6 +42,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -68,6 +71,7 @@ import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; +import org.elasticsearch.xpack.esql.querydsl.query.SpatialRelatesQuery; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -82,6 +86,7 @@ import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.ql.expression.function.aggregate.SpatialAggregateFunction; +import org.elasticsearch.xpack.ql.expression.predicate.logical.And; import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.index.EsIndex; @@ -95,6 +100,7 @@ import org.elasticsearch.xpack.ql.type.EsField; import org.junit.Before; +import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -103,7 +109,6 @@ import static java.util.Arrays.asList; import static org.elasticsearch.core.Tuple.tuple; -import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.DOC_VALUES; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; @@ -142,14 +147,17 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { private LogicalPlanOptimizer logicalOptimizer; private PhysicalPlanOptimizer physicalPlanOptimizer; private Mapper mapper; - private Map mapping; - private Analyzer analyzer; - private int allFieldRowSize; - private static Map mappingAirports; - private static Analyzer analyzerAirports; + private TestDataSource testData; + private int allFieldRowSize; // TODO: Move this into testDataSource so tests that load other indexes can also assert on this + private TestDataSource airports; + private TestDataSource airportsWeb; + private TestDataSource countriesBbox; + private TestDataSource countriesBboxWeb; private final EsqlConfiguration config; + private record TestDataSource(Map mapping, EsIndex index, Analyzer analyzer) {} + @ParametersFactory(argumentFormatting = PARAM_FORMATTING) public static List readScriptSpec() { return settings().stream().map(t -> { @@ -173,13 +181,10 @@ public void init() { physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(functionRegistry); - // Most tests used data from the test index, so we load it here, and use it in the plan() function. - mapping = loadMapping("mapping-basic.json"); - EsIndex test = new EsIndex("test", mapping, Set.of("test")); - IndexResolution getIndexResult = IndexResolution.valid(test); var enrichResolution = setupEnrichResolution(); - analyzer = new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), TEST_VERIFIER); - allFieldRowSize = mapping.values() + // Most tests used data from the test index, so we load it here, and use it in the plan() function. + this.testData = makeTestDataSource("test", "mapping-basic.json", functionRegistry, enrichResolution); + allFieldRowSize = testData.mapping.values() .stream() .mapToInt( f -> (EstimatesRowSize.estimateSize(EsqlDataTypes.widenSmallNumericTypes(f.getDataType())) + f.getProperties() @@ -191,16 +196,31 @@ public void init() { ) .sum(); - // Some tests use data from the airports index, so we load it here, and use it in the plan_airports() function. - mappingAirports = loadMapping("mapping-airports.json"); - EsIndex airports = new EsIndex("airports", mappingAirports, Set.of("airports")); - IndexResolution getIndexResultAirports = IndexResolution.valid(airports); - analyzerAirports = new Analyzer( - new AnalyzerContext(config, functionRegistry, getIndexResultAirports, enrichResolution), - TEST_VERIFIER + // Some tests use data from the airports and countries indexes, so we load that here, and use it in the plan(q, airports) function. + this.airports = makeTestDataSource("airports", "mapping-airports.json", functionRegistry, enrichResolution); + this.airportsWeb = makeTestDataSource("airports_web", "mapping-airports_web.json", functionRegistry, enrichResolution); + this.countriesBbox = makeTestDataSource("countriesBbox", "mapping-countries_bbox.json", functionRegistry, enrichResolution); + this.countriesBboxWeb = makeTestDataSource( + "countriesBboxWeb", + "mapping-countries_bbox_web.json", + functionRegistry, + enrichResolution ); } + TestDataSource makeTestDataSource( + String indexName, + String mappingFileName, + FunctionRegistry functionRegistry, + EnrichResolution enrichResolution + ) { + Map mapping = loadMapping(mappingFileName); + EsIndex index = new EsIndex(indexName, mapping, Set.of("test")); + IndexResolution getIndexResult = IndexResolution.valid(index); + Analyzer analyzer = new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), TEST_VERIFIER); + return new TestDataSource(mapping, index, analyzer); + } + private static EnrichResolution setupEnrichResolution() { EnrichResolution enrichResolution = new EnrichResolution(); enrichResolution.addResolvedPolicy( @@ -319,7 +339,10 @@ public void testSingleFieldExtractor() { var filter = as(limit.child(), FilterExec.class); var extract = as(filter.child(), FieldExtractExec.class); - assertEquals(Sets.difference(allFields(mapping), Set.of("emp_no")), Sets.newHashSet(names(restExtract.attributesToExtract()))); + assertEquals( + Sets.difference(allFields(testData.mapping), Set.of("emp_no")), + Sets.newHashSet(names(restExtract.attributesToExtract())) + ); assertEquals(Set.of("emp_no"), Sets.newHashSet(names(extract.attributesToExtract()))); var query = as(extract.child(), EsQueryExec.class); @@ -355,7 +378,10 @@ public void testExactlyOneExtractorPerFieldWithPruning() { var filter = as(limit.child(), FilterExec.class); var extract = as(filter.child(), FieldExtractExec.class); - assertEquals(Sets.difference(allFields(mapping), Set.of("emp_no")), Sets.newHashSet(names(restExtract.attributesToExtract()))); + assertEquals( + Sets.difference(allFields(testData.mapping), Set.of("emp_no")), + Sets.newHashSet(names(restExtract.attributesToExtract())) + ); assertThat(names(extract.attributesToExtract()), contains("emp_no")); var query = source(extract.child()); @@ -2253,10 +2279,10 @@ public void testPartialAggFoldingOutputForSyntheticAgg() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValues() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" from airports | stats centroid = st_centroid(location) - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2278,13 +2304,12 @@ public void testSpatialTypesAndStatsUseDocValues() { agg = as(exchange.child(), AggregateExec.class); // below the exchange (in data node) the aggregation is using doc-values assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); - var filter = as(agg.child(), FilterExec.class); - var extract = as(filter.child(), FieldExtractExec.class); + var extract = as(agg.child(), FieldExtractExec.class); source(extract.child()); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); } /** @@ -2311,10 +2336,10 @@ public void testSpatialTypesAndStatsUseDocValues() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesNested() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" from airports | stats centroid = st_centroid(to_geopoint(location)) - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2351,6 +2376,8 @@ public void testSpatialTypesAndStatsUseDocValuesNested() { } /** + * This test does not have real index fields, and therefor asserts that doc-values field extraction does NOT occur. + * * Before local optimizations: * * LimitExec[1000[INTEGER]] @@ -2370,10 +2397,10 @@ public void testSpatialTypesAndStatsUseDocValuesNested() { * 36 35 33 36 29][KEYWORD] AS wkt]] */ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" row wkt = "POINT(42.97109629958868 14.7552534006536)" | stats centroid = st_centroid(to_geopoint(wkt)) - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2424,10 +2451,10 @@ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" from airports | stats centroid = st_centroid(location), count = COUNT() - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2456,10 +2483,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); var extract = as(agg.child(), FieldExtractExec.class); source(extract.child()); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); } /** @@ -2490,10 +2517,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesMultiSpatialAggregations() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" FROM airports | STATS airports=ST_CENTROID(location), cities=ST_CENTROID(city_location), count=COUNT() - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2525,10 +2552,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiSpatialAggregations() { assertAggregation(agg, "cities", SpatialCentroid.class, GEO_POINT, true); var extract = as(agg.child(), FieldExtractExec.class); source(extract.child()); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); } /** @@ -2555,11 +2582,11 @@ public void testSpatialTypesAndStatsUseDocValuesMultiSpatialAggregations() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" FROM airports | WHERE scalerank == 9 | STATS centroid=ST_CENTROID(location), count=COUNT() - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2589,10 +2616,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { assertAggregation(agg, "count", Count.class); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); var extract = as(agg.child(), FieldExtractExec.class); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); var source = source(extract.child()); var qb = as(source.query(), SingleValueQuery.Builder.class); assertThat("Expected predicate to be passed to Lucene query", qb.source().text(), equalTo("scalerank == 9")); @@ -2623,10 +2650,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" FROM airports | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2660,10 +2687,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { assertAggregation(agg, "count", Count.class); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); var extract = as(agg.child(), FieldExtractExec.class); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); source(extract.child()); } @@ -2693,11 +2720,11 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGroupedAggregated() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" FROM airports | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank | STATS centroid=ST_CENTROID(centroid), count=SUM(count) - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2753,10 +2780,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGroupedAggregat assertAggregation(agg, "count", Count.class); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); var extract = as(agg.child(), FieldExtractExec.class); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); source(extract.child()); } @@ -2779,18 +2806,18 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGroupedAggregat * \_AggregateExec[[],[SPATIALCENTROID(city_location{f}#16) AS centroid],PARTIAL,50] * \_EnrichExec[ANY,geo_match,city_location{f}#16,city_boundaries,city_boundary,{=airport_city_boundaries},[airport{r}#21, * region{r}#22, city_boundary{r}#23]] - * \_FilterExec[ISNOTNULL(city_location{f}#16)] - * \_FieldExtractExec[city_location{f}#16][city_location{f}#16]> - * \_EsQueryExec[airports], query[][_doc{f}#46], limit[], sort[] estimatedRowSize[204] + * \_FieldExtractExec[city_location{f}#16][city_location{f}#16] + * \_EsQueryExec[airports], query[{"exists":{"field":"city_location","boost":1.0}}][_doc{f}#46], limit[], sort[] + * estimatedRowSize[204] * * Note the FieldExtractExec has 'city_location' set for doc-values: FieldExtractExec[city_location{f}#16][city_location{f}#16] */ public void testEnrichBeforeSpatialAggregationSupportsDocValues() { - var plan = physicalPlanAirports(""" + var plan = physicalPlan(""" from airports | enrich city_boundaries ON city_location WITH airport, region, city_boundary | stats centroid = st_centroid(city_location) - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2820,13 +2847,578 @@ public void testEnrichBeforeSpatialAggregationSupportsDocValues() { assertThat(enrichExec.mode(), equalTo(Enrich.Mode.ANY)); assertThat(enrichExec.concreteIndices(), equalTo(Map.of("", "airport_city_boundaries"))); assertThat(enrichExec.enrichFields().size(), equalTo(3)); - var filter = as(enrichExec.child(), FilterExec.class); - var extract = as(filter.child(), FieldExtractExec.class); + var extract = as(enrichExec.child(), FieldExtractExec.class); + source(extract.child()); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); + } + + /** + * Plan: + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Limit[500[INTEGER]] + * \_Filter[SPATIALINTERSECTS(location{f}#7,[50 4f 4c 59 47 4f 4e 28 29][KEYWORD])] + * \_EsRelation[airports][abbrev{f}#3, city{f}#9, city_location{f}#10, countr..]]] + * + * Optimized: + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[abbrev{f}#3, city{f}#9, city_location{f}#10, country{f}#8, location{f}#7, name{f}#4, scalerank{f}#5, type{f}# + * 6]] + * \_FieldExtractExec[abbrev{f}#3, city{f}#9, city_location{f}#10, countr..][] + * \_EsQueryExec[airports], query[{ + * "esql_single_value":{ + * "field":"location", + * "next":{ + * "geo_shape":{ + * "location":{ + * "shape":{ + * "type":"Polygon", + * "coordinates":[[[42.0,14.0],[43.0,14.0],[43.0,15.0],[42.0,15.0],[42.0,14.0]]] + * }, + * "relation":"intersects" + * }, + * "ignore_unmapped":false, + * "boost":1.0 + * } + * }, + * "source":"ST_INTERSECTS(location, \"POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))\")@2:9" + * } + * }][_doc{f}#19], limit[500], sort[] estimatedRowSize[358] + */ + public void testPushSpatialIntersectsStringToSource() { + for (String query : new String[] { """ + FROM airports + | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + """, """ + FROM airports + | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) + """ }) { + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + /** + * Plan: + * Plan: + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count],FINAL,null] + * \_ExchangeExec[[xVal{r}#16, xDel{r}#17, yVal{r}#18, yDel{r}#19, count{r}#20, count{r}#21, seen{r}#22],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count]] + * \_Filter[SPATIALINTERSECTS(location{f}#12,[50 4f 4c 59 47 4f 4e 28 28 34 32 20 31 34 2c 20 34 33 20 31 34 2c 20 34 33 2 + * 0 31 35 2c 20 34 32 20 31 35 2c 20 34 32 20 31 34 29 29][KEYWORD])] + * \_EsRelation[airports][abbrev{f}#8, city{f}#14, city_location{f}#15, count..]]] + * + * Optimized: + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count],FINAL,58] + * \_ExchangeExec[[xVal{r}#16, xDel{r}#17, yVal{r}#18, yDel{r}#19, count{r}#20, count{r}#21, seen{r}#22],true] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count],PARTIAL,58] + * \_FieldExtractExec[location{f}#12][location{f}#12] + * \_EsQueryExec[airports], query[{ + * "esql_single_value":{ + * "field":"location", + * "next":{ + * "geo_shape":{ + * "location":{ + * "shape":{ + * "type":"Polygon", + * "coordinates":[[[42.0,14.0],[43.0,14.0],[43.0,15.0],[42.0,15.0],[42.0,14.0]]] + * }, + * "relation":"intersects" + * }, + * "ignore_unmapped":false, + * "boost":1.0 + * } + * }, + * "source":"ST_INTERSECTS(location, \"POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))\")@2:9" + * } + * }][_doc{f}#140, limit[], sort[] estimatedRowSize[54] + */ + public void testPushSpatialIntersectsStringToSourceAndUseDocValuesForCentroid() { + for (String query : new String[] { """ + FROM airports + | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + | STATS centroid=ST_CENTROID(location), count=COUNT() + """, """ + FROM airports + | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) + | STATS centroid=ST_CENTROID(location), count=COUNT() + """ }) { + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); + var source = source(extract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + public void testPushSpatialIntersectsStringToSourceCompoundPredicate() { + for (String query : new String[] { """ + FROM airports + | WHERE scalerank == 9 + AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + AND type == "mid" + """, """ + FROM airports + | WHERE scalerank == 9 + AND ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) + AND type == "mid" + """ }) { + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + var and = as(filter.condition(), And.class); + var left = as(and.left(), And.class); + assertThat("filter contains ST_INTERSECTS", left.right(), instanceOf(SpatialIntersects.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var booleanQuery = as(source.query(), BoolQueryBuilder.class); + assertThat("Expected boolean query of three predicates", booleanQuery.must().size(), equalTo(3)); + var condition = as(booleanQuery.must().get(1), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + public void testPushSpatialIntersectsStringToSourceCompoundPredicateAndUseDocValuesForCentroid() { + for (String query : new String[] { """ + FROM airports + | WHERE scalerank == 9 + AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + AND type == "mid" + | STATS centroid=ST_CENTROID(location), count=COUNT() + """, """ + FROM airports + | WHERE scalerank == 9 + AND ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) + AND type == "mid" + | STATS centroid=ST_CENTROID(location), count=COUNT() + """ }) { + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + var and = as(filter.condition(), And.class); + var left = as(and.left(), And.class); + assertThat("filter contains ST_INTERSECTS", left.right(), instanceOf(SpatialIntersects.class)); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); + var source = source(extract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var booleanQuery = as(source.query(), BoolQueryBuilder.class); + assertThat("Expected boolean query of three predicates", booleanQuery.must().size(), equalTo(3)); + var condition = as(booleanQuery.must().get(1), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + /** + * Plan: + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#16) AS location, SPATIALCENTROID(city_location{f}#19) AS city_location, COUNT([ + * 2a][KEYWORD]) AS count],FINAL,null] + * \_ExchangeExec[[xVal{r}#20, xDel{r}#21, yVal{r}#22, yDel{r}#23, count{r}#24, xVal{r}#25, xDel{r}#26, yVal{r}#27, yDel{r}#28, + * count{r}#29, count{r}#30, seen{r}#31],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[SPATIALCENTROID(location{f}#16) AS location, SPATIALCENTROID(city_location{f}#19) AS city_location, COUNT([ + * 2a][KEYWORD]) AS count]] + * \_Filter[SPATIALINTERSECTS(location{f}#16,city_location{f}#19)] + * \_EsRelation[airports][abbrev{f}#12, city{f}#18, city_location{f}#19, coun..]]] + * + * Optimized: + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#16) AS location, SPATIALCENTROID(city_location{f}#19) AS city_location, COUNT([ + * 2a][KEYWORD]) AS count],FINAL,108] + * \_ExchangeExec[[xVal{r}#20, xDel{r}#21, yVal{r}#22, yDel{r}#23, count{r}#24, xVal{r}#25, xDel{r}#26, yVal{r}#27, yDel{r}#28, + * count{r}#29, count{r}#30, seen{r}#31],true] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#16) AS location, SPATIALCENTROID(city_location{f}#19) AS city_location, COUNT([ + * 2a][KEYWORD]) AS count],PARTIAL,108] + * \_FilterExec[SPATIALINTERSECTS(location{f}#16,city_location{f}#19)] + * \_FieldExtractExec[location{f}#16, city_location{f}#19][city_location{f}#19, location{f}#16] + * \_EsQueryExec[airports], query[][_doc{f}#55], limit[], sort[] estimatedRowSize[104] + */ + public void testIntersectsOnTwoPointFieldAndBothCentroidUsesDocValues() { + String query = """ + FROM airports + | WHERE ST_INTERSECTS(location, city_location) + | STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() + """; + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, false); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, false); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, true); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, false); + var filterExec = as(agg.child(), FilterExec.class); + var extract = as(filterExec.child(), FieldExtractExec.class); + assertFieldExtractionWithDocValues(extract, GEO_POINT, "location"); source(extract.child()); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + } + + public void testIntersectsOnTwoPointFieldAndOneCentroidUsesDocValues() { + for (String query : new String[] { """ + FROM airports + | WHERE ST_INTERSECTS(location, city_location) + | STATS location=ST_CENTROID(location), count=COUNT() + """, """ + FROM airports + | WHERE ST_INTERSECTS(location, city_location) + | STATS city_location=ST_CENTROID(city_location), count=COUNT() + """ }) { + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + var aggFieldName = findSingleAggregation(agg, "location", "city_location"); + assertAggregation(agg, aggFieldName, SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, aggFieldName, SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, aggFieldName, SpatialCentroid.class, GEO_POINT, true); + var filterExec = as(agg.child(), FilterExec.class); + var extract = as(filterExec.child(), FieldExtractExec.class); + assertFieldExtractionWithDocValues(extract, GEO_POINT, aggFieldName); + source(extract.child()); + } + } + + public void testTwoIntersectsWithTwoCentroidsUsesDocValues() { + String query = """ + FROM airports + | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + AND ST_INTERSECTS(city_location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + | STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() + """; + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, false); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + var and = as(filter.condition(), And.class); + assertThat("filter contains ST_INTERSECTS", and.left(), instanceOf(SpatialIntersects.class)); + assertThat("filter contains ST_INTERSECTS", and.right(), instanceOf(SpatialIntersects.class)); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, false); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, true); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + assertFieldExtractionWithDocValues(extract, GEO_POINT, "location", "city_location"); + var source = source(extract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var booleanQuery = as(source.query(), BoolQueryBuilder.class); + assertThat("Expected boolean query of two predicates", booleanQuery.must().size(), equalTo(2)); + String[] fieldNames = new String[] { "location", "city_location" }; + for (String fieldName : fieldNames) { + var condition = as(findQueryBuilder(booleanQuery, fieldName), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo(fieldName)); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + public void testPushSpatialIntersectsShapeToSource() { + for (String query : new String[] { """ + FROM countriesBbox + | WHERE ST_INTERSECTS(shape, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + """, """ + FROM countriesBbox + | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), shape) + """ }) { + + var plan = this.physicalPlan(query, countriesBbox); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("shape")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + public void testPushCartesianSpatialIntersectsToSource() { + for (String query : new String[] { """ + FROM airports_web + | WHERE ST_INTERSECTS( + location, + TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))") + ) + """, """ + FROM airports_web + | WHERE ST_INTERSECTS( + TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), + location + ) + """ }) { + + var plan = this.physicalPlan(query, airportsWeb); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + public void testPushCartesianSpatialIntersectsShapeToSource() { + for (String query : new String[] { """ + FROM countriesBboxWeb + | WHERE ST_INTERSECTS( + shape, + TO_CARTESIANSHAPE( + "POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))" + ) + ) + """, """ + FROM countriesBboxWeb + | WHERE ST_INTERSECTS( + TO_CARTESIANSHAPE( + "POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))" + ), + shape + ) + """ }) { + + var plan = this.physicalPlan(query, countriesBboxWeb); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("shape")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } } public void testEnrichBeforeAggregation() { @@ -3287,6 +3879,42 @@ private static AggregateFunction assertAggregation(PhysicalPlan plan, String ali return aggFunc; } + private static String findSingleAggregation(PhysicalPlan plan, String... aliasNames) { + var agg = as(plan, AggregateExec.class); + var aggExps = agg.aggregates().stream().filter(a -> { + var alias = as(a, Alias.class); + return Arrays.stream(aliasNames).anyMatch(name -> name.equals(alias.name())); + }).toList(); + if (aggExps.size() != 1) { + throw new AssertionError( + "Expected single aggregation from " + Arrays.toString(aliasNames) + " but found " + aggExps.size() + " aggregations" + ); + } + var aggExp = aggExps.get(0); + var alias = as(aggExp, Alias.class); + return alias.name(); + } + + private static QueryBuilder findQueryBuilder(BoolQueryBuilder booleanQuery, String fieldName) { + return booleanQuery.must() + .stream() + .filter(b -> ((SpatialRelatesQuery.ShapeQueryBuilder) b).fieldName().equals(fieldName)) + .findFirst() + .get(); + } + + private void assertFieldExtractionWithDocValues(FieldExtractExec extract, DataType dataType, String... fieldNames) { + extract.attributesToExtract().forEach(attr -> { + String name = attr.qualifiedName(); + if (asList(fieldNames).contains(name)) { + assertThat("Expected field '" + name + "' to use doc-values", extract.hasDocValuesAttribute(attr), equalTo(true)); + assertThat("Expected field '" + name + "' to have data type " + dataType, attr.dataType(), equalTo(dataType)); + } else { + assertThat("Expected field '" + name + "' to NOT use doc-values", extract.hasDocValuesAttribute(attr), equalTo(false)); + } + }); + } + private static EsQueryExec source(PhysicalPlan plan) { if (plan instanceof ExchangeExec exchange) { plan = exchange.child(); @@ -3344,16 +3972,11 @@ static PhysicalPlan localRelationshipAlignment(PhysicalPlan l) { } private PhysicalPlan physicalPlan(String query) { - var logical = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement(query))); - // System.out.println("Logical\n" + logical); - var physical = mapper.map(logical); - // System.out.println(physical); - assertSerialization(physical); - return physical; + return physicalPlan(query, testData); } - private PhysicalPlan physicalPlanAirports(String query) { - var logical = logicalOptimizer.optimize(analyzerAirports.analyze(parser.createStatement(query))); + private PhysicalPlan physicalPlan(String query, TestDataSource dataSource) { + var logical = logicalOptimizer.optimize(dataSource.analyzer.analyze(parser.createStatement(query))); // System.out.println("Logical\n" + logical); var physical = mapper.map(logical); // System.out.println(physical); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index b1f8a4ed2f07d..9e215e45fbde2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -14,8 +14,8 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.WildcardLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -583,7 +583,8 @@ public void testDeprecatedIsNullFunction() { public void testMetadataFieldOnOtherSources() { expectError("row a = 1 metadata _index", "line 1:20: extraneous input '_index' expecting "); - expectError("show functions metadata _index", "line 1:16: token recognition error at: 'm'"); + expectError("meta functions metadata _index", "line 1:16: token recognition error at: 'm'"); + expectError("show info metadata _index", "line 1:11: token recognition error at: 'm'"); expectError( "explain [from foo] metadata _index", "line 1:20: mismatched input 'metadata' expecting {'|', ',', OPENING_BRACKET, ']', 'metadata'}" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 2f201196fbe83..043d75ea1cbca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -277,7 +277,7 @@ public Operator get(DriverContext driverContext) { aggregators, () -> BlockHash.build( List.of(new HashAggregationOperator.GroupSpec(groupByChannel, groupElementType)), - driverContext, + driverContext.blockFactory(), pageSize, false ), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index 6dc15d67e0560..ab004a3a055ce 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -24,6 +24,7 @@ import static org.elasticsearch.xpack.esql.stats.FeatureMetric.GROK; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.KEEP; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.META; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.MV_EXPAND; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.RENAME; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ROW; @@ -54,6 +55,7 @@ public void testDissectQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testEvalQuery() { @@ -73,6 +75,7 @@ public void testEvalQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testGrokQuery() { @@ -92,6 +95,7 @@ public void testGrokQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testLimitQuery() { @@ -111,6 +115,7 @@ public void testLimitQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testSortQuery() { @@ -130,6 +135,7 @@ public void testSortQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testStatsQuery() { @@ -149,6 +155,7 @@ public void testStatsQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testWhereQuery() { @@ -168,6 +175,7 @@ public void testWhereQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testTwoWhereQuery() { @@ -187,6 +195,7 @@ public void testTwoWhereQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testTwoQueriesExecuted() { @@ -226,6 +235,7 @@ public void testTwoQueriesExecuted() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testEnrich() { @@ -251,6 +261,7 @@ public void testEnrich() { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testMvExpand() { @@ -279,11 +290,31 @@ public void testMvExpand() { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } - public void testShowFunctionsOrInfo() { - String showCommand = randomFrom("show functions", "show info"); - Counters c = esql(showCommand + " | stats a = count(*), b = count(*), c = count(*) | mv_expand c"); + public void testMetaFunctions() { + Counters c = esql("meta functions | stats a = count(*) | mv_expand a"); + assertEquals(0, dissect(c)); + assertEquals(0, eval(c)); + assertEquals(0, grok(c)); + assertEquals(0, limit(c)); + assertEquals(0, sort(c)); + assertEquals(1L, stats(c)); + assertEquals(0, where(c)); + assertEquals(0, enrich(c)); + assertEquals(1L, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(0, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); + assertEquals(1L, meta(c)); + } + + public void testShowInfo() { + Counters c = esql("show info | stats a = count(*), b = count(*), c = count(*) | mv_expand c"); assertEquals(0, dissect(c)); assertEquals(0, eval(c)); assertEquals(0, grok(c)); @@ -299,6 +330,7 @@ public void testShowFunctionsOrInfo() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testRow() { @@ -318,6 +350,7 @@ public void testRow() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testDropAndRename() { @@ -337,6 +370,7 @@ public void testDropAndRename() { assertEquals(1L, drop(c)); assertEquals(0, keep(c)); assertEquals(1L, rename(c)); + assertEquals(0, meta(c)); } public void testKeep() { @@ -361,6 +395,7 @@ public void testKeep() { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } private long dissect(Counters c) { @@ -423,6 +458,10 @@ private long rename(Counters c) { return c.get(FPREFIX + RENAME); } + private long meta(Counters c) { + return c.get(FPREFIX + META); + } + private Counters esql(String esql) { return esql(esql, null); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferencePermissionsIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferencePermissionsIT.java new file mode 100644 index 0000000000000..cc8096831f598 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferencePermissionsIT.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.ClassRule; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class InferencePermissionsIT extends ESRestTestCase { + + private static final String PASSWORD = "secret-test-password"; + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + .plugin("inference-service-test") + .user("x_pack_rest_user", "x-pack-test-password") + .user("test_inference_admin", PASSWORD, "inference_admin", false) + .user("test_inference_user", PASSWORD, "inference_user", false) + .user("test_no_privileged", PASSWORD, "", false) + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + // use the privileged users here but not in the tests + String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + public void testPermissions() throws IOException { + var putRequest = new Request("PUT", "_inference/sparse_embedding/permissions_test"); + putRequest.setJsonEntity(InferenceBaseRestTest.mockSparseServiceModelConfig()); + var getAllRequest = new Request("GET", "_inference/sparse_embedding/_all"); + var deleteRequest = new Request("DELETE", "_inference/sparse_embedding/permissions_test"); + + var putModelForTestingInference = new Request("PUT", "_inference/sparse_embedding/model_to_test_user_priv"); + putModelForTestingInference.setJsonEntity(InferenceBaseRestTest.mockSparseServiceModelConfig()); + + var inferRequest = new Request("POST", "_inference/sparse_embedding/model_to_test_user_priv"); + var bodyBuilder = new StringBuilder("{\"input\": ["); + for (var in : new String[] { "foo", "bar" }) { + bodyBuilder.append('"').append(in).append('"').append(','); + } + // remove last comma + bodyBuilder.deleteCharAt(bodyBuilder.length() - 1); + bodyBuilder.append("]}"); + inferRequest.setJsonEntity(bodyBuilder.toString()); + + var deleteInferenceModel = new Request("DELETE", "_inference/sparse_embedding/model_to_test_user_priv"); + + try (RestClient inferenceAdminClient = buildClient(inferenceAdminClientSettings(), getClusterHosts().toArray(new HttpHost[0]))) { + makeRequest(inferenceAdminClient, putRequest, true); + makeRequest(inferenceAdminClient, getAllRequest, true); + makeRequest(inferenceAdminClient, deleteRequest, true); + // create a model now as the other clients don't have the privilege to do so + makeRequest(inferenceAdminClient, putModelForTestingInference, true); + makeRequest(inferenceAdminClient, inferRequest, true); + } + + try (RestClient inferenceUserClient = buildClient(inferenceUserClientSettings(), getClusterHosts().toArray(new HttpHost[0]))) { + makeRequest(inferenceUserClient, putRequest, false); + makeRequest(inferenceUserClient, getAllRequest, true); + makeRequest(inferenceUserClient, inferRequest, true); + makeRequest(inferenceUserClient, deleteInferenceModel, false); + } + + try (RestClient unprivilegedClient = buildClient(unprivilegedUserClientSettings(), getClusterHosts().toArray(new HttpHost[0]))) { + makeRequest(unprivilegedClient, putRequest, false); + makeRequest(unprivilegedClient, getAllRequest, false); + makeRequest(unprivilegedClient, inferRequest, false); + makeRequest(unprivilegedClient, deleteInferenceModel, false); + } + } + + private Settings inferenceAdminClientSettings() { + String token = basicAuthHeaderValue("test_inference_admin", new SecureString(PASSWORD.toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + private Settings inferenceUserClientSettings() { + String token = basicAuthHeaderValue("test_inference_user", new SecureString(PASSWORD.toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + private Settings unprivilegedUserClientSettings() { + String token = basicAuthHeaderValue("test_no_privileged", new SecureString(PASSWORD.toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + /* + * This makes the given request with the given client. It asserts a 200 response if expectSuccess is true, and asserts an exception + * with a 403 response if expectStatus is false. + */ + private void makeRequest(RestClient client, Request request, boolean expectSuccess) throws IOException { + if (expectSuccess) { + Response response = client.performRequest(request); + assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); + } else { + ResponseException exception = expectThrows(ResponseException.class, () -> client.performRequest(request)); + assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.FORBIDDEN.getStatus())); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index c38b427200744..80fd98c40516e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -16,6 +16,7 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; @@ -62,6 +63,13 @@ public static List getNamedWriteables() { ); // Chunked inference results + namedWriteables.add( + new NamedWriteableRegistry.Entry( + InferenceServiceResults.class, + ErrorChunkedInferenceResults.NAME, + ErrorChunkedInferenceResults::new + ) + ); namedWriteables.add( new NamedWriteableRegistry.Entry( InferenceServiceResults.class, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java new file mode 100644 index 0000000000000..ac28aa87f554b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.common.Strings; + +import java.time.Clock; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +/** + * Implements a throttler using the token bucket algorithm. + * + * The general approach is to define the rate limiter with size (accumulated tokens limit) which dictates how many + * unused tokens can be saved up, and a rate at which the tokens are created. Then when a thread should be rate limited + * it can attempt to acquire a certain number of tokens (typically one for each item of work it's going to do). If unused tokens + * are available in the bucket already, those will be used. If the number of available tokens covers the desired amount + * the thread will not sleep. If the bucket does not contain enough tokens, it will calculate how long the thread needs to sleep + * to accumulate the requested amount of tokens. + * + * By setting the accumulated tokens limit to a value greater than zero, it effectively allows bursts of traffic. If the accumulated + * tokens limit is set to zero, it will force the acquiring thread to wait on each call. + */ +public class RateLimiter { + + private double tokensPerMicros; + private double accumulatedTokensLimit; + private double accumulatedTokens; + private Instant nextTokenAvailability; + private final Sleeper sleeper; + private final Clock clock; + + /** + * @param accumulatedTokensLimit the limit for tokens stashed in the bucket + * @param tokensPerTimeUnit the number of tokens to produce per the time unit passed in + * @param unit the time unit frequency for generating tokens + */ + public RateLimiter(double accumulatedTokensLimit, double tokensPerTimeUnit, TimeUnit unit) { + this(accumulatedTokensLimit, tokensPerTimeUnit, unit, new TimeUnitSleeper(), Clock.systemUTC()); + } + + // default for testing + RateLimiter(double accumulatedTokensLimit, double tokensPerTimeUnit, TimeUnit unit, Sleeper sleeper, Clock clock) { + this.sleeper = Objects.requireNonNull(sleeper); + this.clock = Objects.requireNonNull(clock); + nextTokenAvailability = Instant.MIN; + setRate(accumulatedTokensLimit, tokensPerTimeUnit, unit); + } + + public final synchronized void setRate(double newAccumulatedTokensLimit, double newTokensPerTimeUnit, TimeUnit newUnit) { + Objects.requireNonNull(newUnit); + + if (newAccumulatedTokensLimit < 0) { + throw new IllegalArgumentException("Accumulated tokens limit must be greater than or equal to 0"); + } + + if (Double.isInfinite(newAccumulatedTokensLimit)) { + throw new IllegalArgumentException( + Strings.format("Accumulated tokens limit must be less than or equal to %s", Double.MAX_VALUE) + ); + } + + if (newTokensPerTimeUnit <= 0) { + throw new IllegalArgumentException("Tokens per time unit must be greater than 0"); + } + + if (newTokensPerTimeUnit == Double.POSITIVE_INFINITY) { + throw new IllegalArgumentException(Strings.format("Tokens per time unit must be less than or equal to %s", Double.MAX_VALUE)); + } + + accumulatedTokens = Math.min(accumulatedTokens, newAccumulatedTokensLimit); + + accumulatedTokensLimit = newAccumulatedTokensLimit; + + var unitsInMicros = newUnit.toMicros(1); + tokensPerMicros = newTokensPerTimeUnit / unitsInMicros; + assert Double.isInfinite(tokensPerMicros) == false : "Tokens per microsecond should not be infinity"; + + accumulateTokens(); + } + + /** + * Causes the thread to wait until the tokens are available + * @param tokens the number of items of work that should be throttled, typically you'd pass a value of 1 here + * @throws InterruptedException _ + */ + public void acquire(int tokens) throws InterruptedException { + if (tokens <= 0) { + throw new IllegalArgumentException("Requested tokens must be positive"); + } + + double microsToWait; + synchronized (this) { + accumulateTokens(); + var accumulatedTokensToUse = Math.min(tokens, accumulatedTokens); + var additionalTokensRequired = tokens - accumulatedTokensToUse; + microsToWait = additionalTokensRequired / tokensPerMicros; + accumulatedTokens -= accumulatedTokensToUse; + nextTokenAvailability = nextTokenAvailability.plus((long) microsToWait, ChronoUnit.MICROS); + } + + sleeper.sleep((long) microsToWait); + } + + private void accumulateTokens() { + var now = Instant.now(clock); + if (now.isAfter(nextTokenAvailability)) { + var elapsedTimeMicros = microsBetweenExact(nextTokenAvailability, now); + var newTokens = tokensPerMicros * elapsedTimeMicros; + accumulatedTokens = Math.min(accumulatedTokensLimit, accumulatedTokens + newTokens); + nextTokenAvailability = now; + } + } + + private static long microsBetweenExact(Instant start, Instant end) { + try { + return ChronoUnit.MICROS.between(start, end); + } catch (ArithmeticException e) { + if (end.isAfter(start)) { + return Long.MAX_VALUE; + } + + return 0; + } + } + + // default for testing + Instant getNextTokenAvailability() { + return nextTokenAvailability; + } + + public interface Sleeper { + void sleep(long microsecondsToSleep) throws InterruptedException; + } + + static final class TimeUnitSleeper implements Sleeper { + public void sleep(long microsecondsToSleep) throws InterruptedException { + TimeUnit.MICROSECONDS.sleep(microsecondsToSleep); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java index 1f50f0ae6bc57..a49fc85200894 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java @@ -31,7 +31,7 @@ public CohereEmbeddingsAction(Sender sender, CohereEmbeddingsModel model) { Objects.requireNonNull(model); this.sender = Objects.requireNonNull(sender); this.failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( - model.getServiceSettings().getCommonSettings().getUri(), + model.getServiceSettings().getCommonSettings().uri(), "Cohere embeddings" ); requestCreator = new CohereEmbeddingsExecutableRequestCreator(model); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java index 94583c634fb26..dc89240862e6a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import java.util.Map; @@ -33,4 +34,11 @@ public ExecutableAction create(OpenAiEmbeddingsModel model, Map return new OpenAiEmbeddingsAction(sender, overriddenModel, serviceComponents); } + + @Override + public ExecutableAction create(OpenAiChatCompletionModel model, Map taskSettings) { + var overriddenModel = OpenAiChatCompletionModel.of(model, taskSettings); + + return new OpenAiChatCompletionAction(sender, overriddenModel, serviceComponents); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionVisitor.java index 52d9f2e2132a7..0f26e054d734b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionVisitor.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.inference.external.action.openai; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import java.util.Map; public interface OpenAiActionVisitor { ExecutableAction create(OpenAiEmbeddingsModel model, Map taskSettings); + + ExecutableAction create(OpenAiChatCompletionModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java new file mode 100644 index 0000000000000..31fd6a35ef26b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.openai; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.OpenAiCompletionExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class OpenAiChatCompletionAction implements ExecutableAction { + + private final String errorMessage; + private final OpenAiCompletionExecutableRequestCreator requestCreator; + + private final Sender sender; + + public OpenAiChatCompletionAction(Sender sender, OpenAiChatCompletionModel model, ServiceComponents serviceComponents) { + Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(model); + this.sender = Objects.requireNonNull(sender); + this.requestCreator = new OpenAiCompletionExecutableRequestCreator(model); + this.errorMessage = constructFailedToSendRequestMessage(model.getServiceSettings().uri(), "OpenAI chat completions"); + } + + @Override + public void execute(List input, ActionListener listener) { + if (input.size() > 1) { + listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(requestCreator, input, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java index b0fdc800a64da..ff4f9847da8a1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java @@ -37,7 +37,7 @@ private static ResponseHandler createEmbeddingsHandler() { public CohereEmbeddingsExecutableRequestCreator(CohereEmbeddingsModel model) { this.model = Objects.requireNonNull(model); - account = new CohereAccount(this.model.getServiceSettings().getCommonSettings().getUri(), this.model.getSecretSettings().apiKey()); + account = new CohereAccount(this.model.getServiceSettings().getCommonSettings().uri(), this.model.getSecretSettings().apiKey()); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java new file mode 100644 index 0000000000000..44ab670843335 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; +import org.elasticsearch.xpack.inference.external.openai.OpenAiChatCompletionResponseHandler; +import org.elasticsearch.xpack.inference.external.request.openai.OpenAiChatCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.openai.OpenAiChatCompletionResponseEntity; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class OpenAiCompletionExecutableRequestCreator implements ExecutableRequestCreator { + + private static final Logger logger = LogManager.getLogger(OpenAiCompletionExecutableRequestCreator.class); + + private static final ResponseHandler HANDLER = createCompletionHandler(); + + private final OpenAiChatCompletionModel model; + + private final OpenAiAccount account; + + public OpenAiCompletionExecutableRequestCreator(OpenAiChatCompletionModel model) { + this.model = Objects.requireNonNull(model); + this.account = new OpenAiAccount( + this.model.getServiceSettings().uri(), + this.model.getServiceSettings().organizationId(), + this.model.getSecretSettings().apiKey() + ); + } + + @Override + public Runnable create( + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + HttpClientContext context, + ActionListener listener + ) { + OpenAiChatCompletionRequest request = new OpenAiChatCompletionRequest(account, input, model); + + return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + } + + private static ResponseHandler createCompletionHandler() { + return new OpenAiChatCompletionResponseHandler("openai completion", OpenAiChatCompletionResponseEntity::fromResponse); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java new file mode 100644 index 0000000000000..5924356e610a3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; + +public class OpenAiChatCompletionResponseHandler extends OpenAiResponseHandler { + public OpenAiChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) { + super(requestType, parseFunction); + } + + @Override + RetryException buildExceptionHandling429(Request request, HttpResult result) { + // We don't retry, if the chat completion input is too large + return new RetryException(false, buildError(RATE_LIMIT, request, result)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java index 10083d3fd4667..db7ca8d6bdc63 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -35,6 +35,7 @@ public class OpenAiResponseHandler extends BaseResponseHandler { static final String REMAINING_TOKENS = "x-ratelimit-remaining-tokens"; static final String CONTENT_TOO_LARGE_MESSAGE = "Please reduce your prompt; or completion length."; + static final String OPENAI_SERVER_BUSY = "Received a server busy error status code"; public OpenAiResponseHandler(String requestType, ResponseParser parseFunction) { @@ -70,7 +71,7 @@ void checkForFailureStatusCode(Request request, HttpResult result) throws RetryE } else if (statusCode > 500) { throw new RetryException(false, buildError(SERVER_ERROR, request, result)); } else if (statusCode == 429) { - throw new RetryException(true, buildError(buildRateLimitErrorMessage(result), request, result)); + throw buildExceptionHandling429(request, result); } else if (isContentTooLarge(result)) { throw new ContentTooLargeException(buildError(CONTENT_TOO_LARGE, request, result)); } else if (statusCode == 401) { @@ -82,6 +83,10 @@ void checkForFailureStatusCode(Request request, HttpResult result) throws RetryE } } + RetryException buildExceptionHandling429(Request request, HttpResult result) { + return new RetryException(true, buildError(buildRateLimitErrorMessage(result), request, result)); + } + private static boolean isContentTooLarge(HttpResult result) { int statusCode = result.response().getStatusLine().getStatusCode(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java index d788a4667c532..45f25a4dd35f5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java @@ -46,7 +46,7 @@ public CohereEmbeddingsRequest(CohereAccount account, List input, Cohere this.input = Objects.requireNonNull(input); uri = buildUri(this.account.url(), "Cohere", CohereEmbeddingsRequest::buildDefaultUri); taskSettings = embeddingsModel.getTaskSettings(); - model = embeddingsModel.getServiceSettings().getCommonSettings().getModelId(); + model = embeddingsModel.getServiceSettings().getCommonSettings().modelId(); embeddingType = embeddingsModel.getServiceSettings().getEmbeddingType(); inferenceEntityId = embeddingsModel.getInferenceEntityId(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java index 45c7372a4dc22..035bd44ebf405 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java @@ -57,7 +57,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } if (embeddingType != null) { - builder.field(EMBEDDING_TYPES_FIELD, List.of(embeddingType)); + builder.field(EMBEDDING_TYPES_FIELD, List.of(embeddingType.toRequestString())); } if (taskSettings.getTruncation() != null) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java new file mode 100644 index 0000000000000..e53d4e7362735 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.openai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; +import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.createOrgHeader; + +public class OpenAiChatCompletionRequest implements OpenAiRequest { + + private final OpenAiAccount account; + private final List input; + private final URI uri; + private final OpenAiChatCompletionModel model; + + public OpenAiChatCompletionRequest(OpenAiAccount account, List input, OpenAiChatCompletionModel model) { + this.account = Objects.requireNonNull(account); + this.input = Objects.requireNonNull(input); + this.uri = buildUri(this.account.url(), "OpenAI", OpenAiChatCompletionRequest::buildDefaultUri); + this.model = Objects.requireNonNull(model); + } + + @Override + public HttpRequest createHttpRequest() { + HttpPost httpPost = new HttpPost(uri); + + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString( + new OpenAiChatCompletionRequestEntity(input, model.getServiceSettings().modelId(), model.getTaskSettings().user()) + ).getBytes(StandardCharsets.UTF_8) + ); + httpPost.setEntity(byteEntity); + + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); + httpPost.setHeader(createAuthBearerHeader(account.apiKey())); + + var org = account.organizationId(); + if (org != null) { + httpPost.setHeader(createOrgHeader(org)); + } + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public URI getURI() { + return uri; + } + + @Override + public Request truncate() { + // No truncation for OpenAI chat completions + return this; + } + + @Override + public boolean[] getTruncationInfo() { + // No truncation for OpenAI chat completions + return null; + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + // default for testing + static URI buildDefaultUri() throws URISyntaxException { + return new URIBuilder().setScheme("https") + .setHost(OpenAiUtils.HOST) + .setPathSegments(OpenAiUtils.VERSION_1, OpenAiUtils.CHAT_PATH, OpenAiUtils.COMPLETIONS_PATH) + .build(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntity.java new file mode 100644 index 0000000000000..c9aa225c77941 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntity.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.openai; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public class OpenAiChatCompletionRequestEntity implements ToXContentObject { + + private static final String MESSAGES_FIELD = "messages"; + private static final String MODEL_FIELD = "model"; + + private static final String NUMBER_OF_RETURNED_CHOICES_FIELD = "n"; + + private static final String ROLE_FIELD = "role"; + private static final String USER_FIELD = "user"; + private static final String CONTENT_FIELD = "content"; + + private final List messages; + private final String model; + + private final String user; + + public OpenAiChatCompletionRequestEntity(List messages, String model, String user) { + Objects.requireNonNull(messages); + Objects.requireNonNull(model); + + this.messages = messages; + this.model = model; + this.user = user; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(MESSAGES_FIELD); + { + for (String message : messages) { + builder.startObject(); + + { + builder.field(ROLE_FIELD, USER_FIELD); + builder.field(CONTENT_FIELD, message); + } + + builder.endObject(); + } + } + builder.endArray(); + + builder.field(MODEL_FIELD, model); + builder.field(NUMBER_OF_RETURNED_CHOICES_FIELD, 1); + + if (Strings.isNullOrEmpty(user) == false) { + builder.field(USER_FIELD, user); + } + + builder.endObject(); + + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java index 9893b556e1a47..df5d3024fd483 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java @@ -28,7 +28,7 @@ import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.createOrgHeader; -public class OpenAiEmbeddingsRequest implements Request { +public class OpenAiEmbeddingsRequest implements OpenAiRequest { private final Truncator truncator; private final OpenAiAccount account; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiRequest.java new file mode 100644 index 0000000000000..7a630108cfcdf --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiRequest.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.openai; + +import org.elasticsearch.xpack.inference.external.request.Request; + +public interface OpenAiRequest extends Request {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUtils.java index a6479b3ecde25..1199f8a4f0230 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUtils.java @@ -14,6 +14,10 @@ public class OpenAiUtils { public static final String HOST = "api.openai.com"; public static final String VERSION_1 = "v1"; public static final String EMBEDDINGS_PATH = "embeddings"; + + public static final String CHAT_PATH = "chat"; + + public static final String COMPLETIONS_PATH = "completions"; public static final String ORGANIZATION_HEADER = "OpenAI-Organization"; public static Header createOrgHeader(String org) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java index 4f4091873fba9..42fd0ddc812ec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -15,6 +15,13 @@ public class XContentUtils { + /** + * Moves to the first valid token, which is non-null. + * Does not move, if the parser is already positioned at a valid token. + * + * @param parser parser to move + * @throws IOException if underlying parser methods throw + */ public static void moveToFirstToken(XContentParser parser) throws IOException { if (parser.currentToken() == null) { parser.nextToken(); @@ -30,17 +37,42 @@ public static void moveToFirstToken(XContentParser parser) throws IOException { * @throws IllegalStateException if the field cannot be found */ public static void positionParserAtTokenAfterField(XContentParser parser, String field, String errorMsgTemplate) throws IOException { - XContentParser.Token token; + XContentParser.Token token = parser.nextToken(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + while (token != null && token != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { parser.nextToken(); return; } + token = parser.nextToken(); } throw new IllegalStateException(format(errorMsgTemplate, field)); } + /** + * Progress the parser consuming and discarding tokens until the + * parser points to the end of the current object. Nested objects + * and arrays are skipped. + * + * If successful the parser's current token is the end object token. + * + * @param parser + * @throws IOException + */ + public static void consumeUntilObjectEnd(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + + // token == null when correctly formed input has + // been fully parsed. + while (token != null && token != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + + token = parser.nextToken(); + } + } + private XContentUtils() {} } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java index bd808c225d7e3..9221e5c5deed8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java @@ -159,18 +159,17 @@ public static InferenceServiceResults fromResponse(Request request, HttpResult r } private static InferenceServiceResults parseEmbeddingsObject(XContentParser parser) throws IOException { - XContentParser.Token token; + XContentParser.Token token = parser.nextToken(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + while (token != null && token != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { var embeddingValueParser = EMBEDDING_PARSERS.get(parser.currentName()); - if (embeddingValueParser == null) { - continue; + if (embeddingValueParser != null) { + parser.nextToken(); + return embeddingValueParser.apply(parser); } - - parser.nextToken(); - return embeddingValueParser.apply(parser); } + token = parser.nextToken(); } throw new IllegalStateException( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java index 247537b9958d0..7b7d6c0d06b2b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java @@ -83,13 +83,15 @@ private static SparseEmbeddingResults.Embedding parseExpansionResult(boolean[] t XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); List weightedTokens = new ArrayList<>(); - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + token = parser.nextToken(); + while (token != null && token != XContentParser.Token.END_OBJECT) { XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); var floatToken = parser.nextToken(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, floatToken, parser); weightedTokens.add(new SparseEmbeddingResults.WeightedToken(parser.currentName(), parser.floatValue())); + + token = parser.nextToken(); } // prevent an out of bounds if for some reason the truncation list is smaller than the results diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntity.java new file mode 100644 index 0000000000000..daf4e6578240e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntity.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.openai; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class OpenAiChatCompletionResponseEntity { + + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in OpenAI chat completions response"; + + /** + * Parses the OpenAI chat completion response. + * For a request like: + * + *
    +     *     
    +     *         {
    +     *             "inputs": ["Please summarize this text: some text", "Answer the following question: Question"]
    +     *         }
    +     *     
    +     * 
    + * + * The response would look like: + * + *
    +     *     
    +     *         {
    +     *              "id": "chatcmpl-123",
    +     *              "object": "chat.completion",
    +     *              "created": 1677652288,
    +     *              "model": "gpt-3.5-turbo-0613",
    +     *              "system_fingerprint": "fp_44709d6fcb",
    +     *              "choices": [
    +     *                  {
    +     *                      "index": 0,
    +     *                      "message": {
    +     *                          "role": "assistant",
    +     *                          "content": "\n\nHello there, how may I assist you today?",
    +    *                          },
    +     *                      "logprobs": null,
    +     *                      "finish_reason": "stop"
    +     *                  }
    +     *              ],
    +     *              "usage": {
    +     *                "prompt_tokens": 9,
    +     *                "completion_tokens": 12,
    +     *                "total_tokens": 21
    +     *              }
    +     *          }
    +     *     
    +     * 
    + */ + + public static ChatCompletionResults fromResponse(Request request, HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "choices", FAILED_TO_FIND_FIELD_TEMPLATE); + + jsonParser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, jsonParser.currentToken(), jsonParser); + + positionParserAtTokenAfterField(jsonParser, "message", FAILED_TO_FIND_FIELD_TEMPLATE); + + token = jsonParser.currentToken(); + + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); + + XContentParser.Token contentToken = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.VALUE_STRING, contentToken, jsonParser); + String content = jsonParser.text(); + + return new ChatCompletionResults(List.of(new ChatCompletionResults.Result(content))); + } + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java index 4926ba3f0ef6b..4bfdec9a3669b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.util.List; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.consumeUntilObjectEnd; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; @@ -95,11 +96,8 @@ private static TextEmbeddingResults.Embedding parseEmbeddingObject(XContentParse positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); List embeddingValues = XContentParserUtils.parseList(parser, OpenAiEmbeddingsResponseEntity::parseEmbeddingList); - - // the parser is currently sitting at an ARRAY_END so go to the next token - parser.nextToken(); - // if there are additional fields within this object, lets skip them, so we can begin parsing the next embedding array - parser.skipChildren(); + // parse and discard the rest of the object + consumeUntilObjectEnd(parser); return new TextEmbeddingResults.Embedding(embeddingValues); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index cfbb07cb940e7..96846f3f71142 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -182,17 +182,14 @@ public static SecureString extractRequiredSecureString( } public static SimilarityMeasure extractSimilarity(Map map, String scope, ValidationException validationException) { - String similarity = extractOptionalString(map, SIMILARITY, scope, validationException); - - if (similarity != null) { - try { - return SimilarityMeasure.fromString(similarity); - } catch (IllegalArgumentException iae) { - validationException.addValidationError("[" + scope + "] Unknown similarity measure [" + similarity + "]"); - } - } - - return null; + return extractOptionalEnum( + map, + SIMILARITY, + scope, + SimilarityMeasure::fromString, + EnumSet.allOf(SimilarityMeasure.class), + validationException + ); } public static String extractRequiredString( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 2bd88f6f01eb4..e25dabd7e5520 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -216,13 +216,16 @@ public void checkModelConfig(Model model, ActionListener listener) { } private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsModel model, int embeddingSize) { + var similarityFromModel = model.getServiceSettings().similarity(); + var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; + CohereEmbeddingsServiceSettings serviceSettings = new CohereEmbeddingsServiceSettings( new CohereServiceSettings( - model.getServiceSettings().getCommonSettings().getUri(), - SimilarityMeasure.DOT_PRODUCT, + model.getServiceSettings().getCommonSettings().uri(), + similarityToUse, embeddingSize, - model.getServiceSettings().getCommonSettings().getMaxInputTokens(), - model.getServiceSettings().getCommonSettings().getModelId() + model.getServiceSettings().getCommonSettings().maxInputTokens(), + model.getServiceSettings().getCommonSettings().modelId() ), model.getServiceSettings().getEmbeddingType() ); @@ -232,6 +235,6 @@ private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsMo @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_CLASS_CLUSTER_ADDED; + return TransportVersions.ML_INFERENCE_L2_NORM_SIMILARITY_ADDED; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index 8ea271cdb64a5..7fc9ce08e9857 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -65,10 +65,10 @@ public static CohereServiceSettings fromMap(Map map, Configurati throw validationException; } - return new CohereServiceSettings(uri, similarity, dims, maxInputTokens, getModelId(oldModelId, modelId)); + return new CohereServiceSettings(uri, similarity, dims, maxInputTokens, modelId(oldModelId, modelId)); } - private static String getModelId(@Nullable String model, @Nullable String modelId) { + private static String modelId(@Nullable String model, @Nullable String modelId) { return modelId != null ? modelId : model; } @@ -110,23 +110,25 @@ public CohereServiceSettings(StreamInput in) throws IOException { modelId = in.readOptionalString(); } - public URI getUri() { + public URI uri() { return uri; } - public SimilarityMeasure getSimilarity() { + @Override + public SimilarityMeasure similarity() { return similarity; } - public Integer getDimensions() { + @Override + public Integer dimensions() { return dimensions; } - public Integer getMaxInputTokens() { + public Integer maxInputTokens() { return maxInputTokens; } - public String getModelId() { + public String modelId() { return modelId; } @@ -179,7 +181,7 @@ public TransportVersion getMinimalSupportedVersion() { public void writeTo(StreamOutput out) throws IOException { var uriToWrite = uri != null ? uri.toString() : null; out.writeOptionalString(uriToWrite); - out.writeOptionalEnum(similarity); + out.writeOptionalEnum(SimilarityMeasure.translateSimilarity(similarity, out.getTransportVersion())); out.writeOptionalVInt(dimensions); out.writeOptionalVInt(maxInputTokens); out.writeOptionalString(modelId); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java index 82d57cfb92381..8dbbbf7011e86 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java @@ -7,7 +7,15 @@ package org.elasticsearch.xpack.inference.services.cohere.embeddings; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; + +import java.util.Arrays; +import java.util.EnumSet; import java.util.Locale; +import java.util.Map; /** * Defines the type of embedding that the cohere api should return for a request. @@ -20,17 +28,48 @@ public enum CohereEmbeddingType { /** * Use this when you want to get back the default float embeddings. Valid for all models. */ - FLOAT, + FLOAT(DenseVectorFieldMapper.ElementType.FLOAT, RequestConstants.FLOAT), /** * Use this when you want to get back signed int8 embeddings. Valid for only v3 models. */ - INT8; + INT8(DenseVectorFieldMapper.ElementType.BYTE, RequestConstants.INT8), + /** + * This is a synonym for INT8 + */ + BYTE(DenseVectorFieldMapper.ElementType.BYTE, RequestConstants.INT8); + + private static final class RequestConstants { + private static final String FLOAT = "float"; + private static final String INT8 = "int8"; + } + + private static final Map ELEMENT_TYPE_TO_COHERE_EMBEDDING = Map.of( + DenseVectorFieldMapper.ElementType.FLOAT, + FLOAT, + DenseVectorFieldMapper.ElementType.BYTE, + BYTE + ); + static final EnumSet SUPPORTED_ELEMENT_TYPES = EnumSet.copyOf( + ELEMENT_TYPE_TO_COHERE_EMBEDDING.keySet() + ); + + private final DenseVectorFieldMapper.ElementType elementType; + private final String requestString; + + CohereEmbeddingType(DenseVectorFieldMapper.ElementType elementType, String requestString) { + this.elementType = elementType; + this.requestString = requestString; + } @Override public String toString() { return name().toLowerCase(Locale.ROOT); } + public String toRequestString() { + return requestString; + } + public static String toLowerCase(CohereEmbeddingType type) { return type.toString().toLowerCase(Locale.ROOT); } @@ -38,4 +77,45 @@ public static String toLowerCase(CohereEmbeddingType type) { public static CohereEmbeddingType fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); } + + public static CohereEmbeddingType fromElementType(DenseVectorFieldMapper.ElementType elementType) { + var embedding = ELEMENT_TYPE_TO_COHERE_EMBEDDING.get(elementType); + + if (embedding == null) { + var validElementTypes = SUPPORTED_ELEMENT_TYPES.stream() + .map(value -> value.toString().toLowerCase(Locale.ROOT)) + .toArray(String[]::new); + Arrays.sort(validElementTypes); + + throw new IllegalArgumentException( + Strings.format( + "Element type [%s] does not map to a Cohere embedding value, must be one of [%s]", + elementType, + String.join(", ", validElementTypes) + ) + ); + } + + return embedding; + } + + public DenseVectorFieldMapper.ElementType toElementType() { + return elementType; + } + + /** + * Returns an embedding type that is known based on the transport version provided. If the embedding type enum was not yet + * introduced it will be defaulted INT8. + * + * @param embeddingType the value to translate if necessary + * @param version the version that dictates the translation + * @return the embedding type that is known to the version passed in + */ + public static CohereEmbeddingType translateToVersion(CohereEmbeddingType embeddingType, TransportVersion version) { + if (version.before(TransportVersions.ML_INFERENCE_EMBEDDING_BYTE_ADDED) && embeddingType == BYTE) { + return INT8; + } + + return embeddingType; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java index a8ae8aa8d7fdd..ec639c3fac61c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -12,9 +12,10 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; @@ -22,32 +23,21 @@ import java.io.IOException; import java.util.EnumSet; -import java.util.Locale; import java.util.Map; import java.util.Objects; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; -import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; public class CohereEmbeddingsServiceSettings implements ServiceSettings { public static final String NAME = "cohere_embeddings_service_settings"; static final String EMBEDDING_TYPE = "embedding_type"; - static final String EMBEDDING_TYPE_BYTE = "byte"; public static CohereEmbeddingsServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); var commonServiceSettings = CohereServiceSettings.fromMap(map, context); - translateEmbeddingType(map, context); - - CohereEmbeddingType embeddingTypes = extractOptionalEnum( - map, - EMBEDDING_TYPE, - ModelConfigurations.SERVICE_SETTINGS, - CohereEmbeddingType::fromString, - EnumSet.allOf(CohereEmbeddingType.class), - validationException - ); + + CohereEmbeddingType embeddingTypes = parseEmbeddingType(map, context, validationException); if (validationException.validationErrors().isEmpty() == false) { throw validationException; @@ -56,47 +46,76 @@ public static CohereEmbeddingsServiceSettings fromMap(Map map, C return new CohereEmbeddingsServiceSettings(commonServiceSettings, embeddingTypes); } - private static void translateEmbeddingType(Map map, ConfigurationParseContext context) { - if (ConfigurationParseContext.isRequestContext(context) == false || map.containsKey(EMBEDDING_TYPE) == false) { - return; + private static CohereEmbeddingType parseEmbeddingType( + Map map, + ConfigurationParseContext context, + ValidationException validationException + ) { + if (context == ConfigurationParseContext.REQUEST) { + return Objects.requireNonNullElse( + extractOptionalEnum( + map, + EMBEDDING_TYPE, + ModelConfigurations.SERVICE_SETTINGS, + CohereEmbeddingType::fromString, + EnumSet.allOf(CohereEmbeddingType.class), + validationException + ), + CohereEmbeddingType.FLOAT + ); } - ValidationException validationException = new ValidationException(); - - String embeddingType = extractRequiredString(map, EMBEDDING_TYPE, ModelConfigurations.SERVICE_SETTINGS, validationException); - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } + DenseVectorFieldMapper.ElementType elementType = Objects.requireNonNullElse( + extractOptionalEnum( + map, + EMBEDDING_TYPE, + ModelConfigurations.SERVICE_SETTINGS, + DenseVectorFieldMapper.ElementType::fromString, + CohereEmbeddingType.SUPPORTED_ELEMENT_TYPES, + validationException + ), + DenseVectorFieldMapper.ElementType.FLOAT + ); - assert embeddingType != null; - if (embeddingType.toLowerCase(Locale.ROOT).equals(EMBEDDING_TYPE_BYTE)) { - map.put(EMBEDDING_TYPE, CohereEmbeddingType.INT8.toString()); - } else { - map.put(EMBEDDING_TYPE, embeddingType); - } + return CohereEmbeddingType.fromElementType(elementType); } private final CohereServiceSettings commonSettings; private final CohereEmbeddingType embeddingType; - public CohereEmbeddingsServiceSettings(CohereServiceSettings commonSettings, @Nullable CohereEmbeddingType embeddingType) { + public CohereEmbeddingsServiceSettings(CohereServiceSettings commonSettings, CohereEmbeddingType embeddingType) { this.commonSettings = commonSettings; - this.embeddingType = embeddingType; + this.embeddingType = Objects.requireNonNull(embeddingType); } public CohereEmbeddingsServiceSettings(StreamInput in) throws IOException { commonSettings = new CohereServiceSettings(in); - embeddingType = in.readOptionalEnum(CohereEmbeddingType.class); + embeddingType = Objects.requireNonNullElse(in.readOptionalEnum(CohereEmbeddingType.class), CohereEmbeddingType.FLOAT); } public CohereServiceSettings getCommonSettings() { return commonSettings; } + @Override + public SimilarityMeasure similarity() { + return commonSettings.similarity(); + } + + @Override + public Integer dimensions() { + return commonSettings.dimensions(); + } + public CohereEmbeddingType getEmbeddingType() { return embeddingType; } + @Override + public DenseVectorFieldMapper.ElementType elementType() { + return embeddingType == null ? DenseVectorFieldMapper.ElementType.FLOAT : embeddingType.toElementType(); + } + @Override public String getWriteableName() { return NAME; @@ -107,7 +126,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); commonSettings.toXContentFragment(builder); - builder.field(EMBEDDING_TYPE, embeddingType); + builder.field(EMBEDDING_TYPE, elementType()); builder.endObject(); return builder; @@ -126,7 +145,7 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException { commonSettings.writeTo(out); - out.writeOptionalEnum(embeddingType); + out.writeOptionalEnum(CohereEmbeddingType.translateToVersion(embeddingType, out.getTransportVersion())); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 1aafa340268f3..5e51746253ebe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -28,6 +28,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; @@ -36,6 +37,7 @@ import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; +import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; @@ -249,9 +251,9 @@ public void chunkedInfer( return; } - var configUpdate = chunkingOptions.settingsArePresent() + var configUpdate = chunkingOptions != null ? new TokenizationConfigUpdate(chunkingOptions.windowSize(), chunkingOptions.span()) - : TextEmbeddingConfigUpdate.EMPTY_INSTANCE; + : new TokenizationConfigUpdate(null, null); var request = InferTrainedModelDeploymentAction.Request.forTextInput( model.getConfigurations().getInferenceEntityId(), @@ -382,6 +384,8 @@ private List translateChunkedResults(List map) { ValidationException validationException = new ValidationException(); + var requestFields = extractRequestFields(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return createBuilder(requestFields); + } + + private static RequestFields extractRequestFields(Map map, ValidationException validationException) { Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); @@ -61,26 +76,23 @@ public static MultilingualE5SmallInternalServiceSettings.Builder fromMap(Map input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, + @Nullable ChunkingOptions chunkingOptions, ActionListener> listener ) { try { @@ -296,9 +299,9 @@ public void chunkedInfer( return; } - var configUpdate = chunkingOptions.settingsArePresent() + var configUpdate = chunkingOptions != null ? new TokenizationConfigUpdate(chunkingOptions.windowSize(), chunkingOptions.span()) - : TextExpansionConfigUpdate.EMPTY_UPDATE; + : new TokenizationConfigUpdate(null, null); var request = InferTrainedModelDeploymentAction.Request.forTextInput( model.getConfigurations().getInferenceEntityId(), @@ -388,6 +391,8 @@ private List translateChunkedResults(List listener) { private static HuggingFaceEmbeddingsModel updateModelWithEmbeddingDetails(HuggingFaceEmbeddingsModel model, int embeddingSize) { var serviceSettings = new HuggingFaceServiceSettings( model.getServiceSettings().uri(), - null, // Similarity measure is unknown + model.getServiceSettings().similarity(), // we don't know the similarity but use whatever the user specified embeddingSize, model.getTokenLimit() ); @@ -76,6 +76,6 @@ public String name() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_12_0; + return TransportVersions.ML_INFERENCE_L2_NORM_SIMILARITY_ADDED; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index f176cf7580567..b151e9c800a74 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; @@ -134,7 +135,7 @@ public TransportVersion getMinimalSupportedVersion() { public void writeTo(StreamOutput out) throws IOException { out.writeString(uri.toString()); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - out.writeOptionalEnum(similarity); + out.writeOptionalEnum(SimilarityMeasure.translateSimilarity(similarity, out.getTransportVersion())); out.writeOptionalVInt(dimensions); out.writeOptionalVInt(maxInputTokens); } @@ -144,10 +145,12 @@ public URI uri() { return uri; } + @Override public SimilarityMeasure similarity() { return similarity; } + @Override public Integer dimensions() { return dimensions; } @@ -156,6 +159,11 @@ public Integer maxInputTokens() { return maxInputTokens; } + @Override + public DenseVectorFieldMapper.ElementType elementType() { + return DenseVectorFieldMapper.ElementType.FLOAT; + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 130928b17ff8d..6ce9b73f6b87c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsServiceSettings; @@ -127,6 +128,14 @@ private static OpenAiModel createModel( secretSettings, context ); + case COMPLETION -> new OpenAiChatCompletionModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings + ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } @@ -239,11 +248,14 @@ private OpenAiEmbeddingsModel updateModelWithEmbeddingDetails(OpenAiEmbeddingsMo ); } + var similarityFromModel = model.getServiceSettings().similarity(); + var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; + OpenAiEmbeddingsServiceSettings serviceSettings = new OpenAiEmbeddingsServiceSettings( model.getServiceSettings().modelId(), model.getServiceSettings().uri(), model.getServiceSettings().organizationId(), - SimilarityMeasure.DOT_PRODUCT, + similarityToUse, embeddingSize, model.getServiceSettings().maxInputTokens(), model.getServiceSettings().dimensionsSetByUser() @@ -254,7 +266,7 @@ private OpenAiEmbeddingsModel updateModelWithEmbeddingDetails(OpenAiEmbeddingsMo @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_12_0; + return TransportVersions.ML_INFERENCE_L2_NORM_SIMILARITY_ADDED; } /** diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java new file mode 100644 index 0000000000000..bafe1b031b028 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai; + +public class OpenAiServiceFields { + + public static final String USER = "user"; + + public static final String ORGANIZATION = "organization_id"; + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java new file mode 100644 index 0000000000000..467c4f44f34fe --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionVisitor; +import org.elasticsearch.xpack.inference.services.openai.OpenAiModel; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.util.Map; + +public class OpenAiChatCompletionModel extends OpenAiModel { + + public static OpenAiChatCompletionModel of(OpenAiChatCompletionModel model, Map taskSettings) { + if (taskSettings == null || taskSettings.isEmpty()) { + return model; + } + + var requestTaskSettings = OpenAiChatCompletionRequestTaskSettings.fromMap(taskSettings); + return new OpenAiChatCompletionModel(model, OpenAiChatCompletionTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public OpenAiChatCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets + ) { + this( + inferenceEntityId, + taskType, + service, + OpenAiChatCompletionServiceSettings.fromMap(serviceSettings), + OpenAiChatCompletionTaskSettings.fromMap(taskSettings), + DefaultSecretSettings.fromMap(secrets) + ); + } + + OpenAiChatCompletionModel( + String modelId, + TaskType taskType, + String service, + OpenAiChatCompletionServiceSettings serviceSettings, + OpenAiChatCompletionTaskSettings taskSettings, + @Nullable DefaultSecretSettings secrets + ) { + super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secrets)); + } + + private OpenAiChatCompletionModel(OpenAiChatCompletionModel originalModel, OpenAiChatCompletionTaskSettings taskSettings) { + super(originalModel, taskSettings); + } + + @Override + public OpenAiChatCompletionServiceSettings getServiceSettings() { + return (OpenAiChatCompletionServiceSettings) super.getServiceSettings(); + } + + @Override + public OpenAiChatCompletionTaskSettings getTaskSettings() { + return (OpenAiChatCompletionTaskSettings) super.getTaskSettings(); + } + + @Override + public DefaultSecretSettings getSecretSettings() { + return (DefaultSecretSettings) super.getSecretSettings(); + } + + @Override + public ExecutableAction accept(OpenAiActionVisitor creator, Map taskSettings) { + return creator.create(this, taskSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettings.java new file mode 100644 index 0000000000000..8029d8579baba --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettings.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.USER; + +/** + * This class handles extracting OpenAI task settings from a request. The difference between this class and + * {@link OpenAiChatCompletionTaskSettings} is that this class considers all fields as optional. It will not throw an error if a field + * is missing. This allows overriding persistent task settings. + * @param user a unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse + */ +public record OpenAiChatCompletionRequestTaskSettings(@Nullable String user) { + + public static final OpenAiChatCompletionRequestTaskSettings EMPTY_SETTINGS = new OpenAiChatCompletionRequestTaskSettings(null); + + /** + * Extracts the task settings from a map. All settings are considered optional and the absence of a setting + * does not throw an error. + * + * @param map the settings received from a request + * @return a {@link OpenAiChatCompletionRequestTaskSettings} + */ + public static OpenAiChatCompletionRequestTaskSettings fromMap(Map map) { + if (map.isEmpty()) { + return OpenAiChatCompletionRequestTaskSettings.EMPTY_SETTINGS; + } + + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new OpenAiChatCompletionRequestTaskSettings(user); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java new file mode 100644 index 0000000000000..16b0ed5d47039 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.net.URI; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createOptionalUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.ORGANIZATION; + +/** + * Defines the service settings for interacting with OpenAI's chat completion models. + */ +public class OpenAiChatCompletionServiceSettings implements ServiceSettings { + + public static final String NAME = "openai_completion_service_settings"; + + public static OpenAiChatCompletionServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String modelId = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String organizationId = extractOptionalString(map, ORGANIZATION, ModelConfigurations.SERVICE_SETTINGS, validationException); + + String url = extractOptionalString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + + Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new OpenAiChatCompletionServiceSettings(modelId, uri, organizationId, maxInputTokens); + } + + private final String modelId; + + private final URI uri; + + private final String organizationId; + + private final Integer maxInputTokens; + + public OpenAiChatCompletionServiceSettings( + String modelId, + @Nullable URI uri, + @Nullable String organizationId, + @Nullable Integer maxInputTokens + ) { + this.modelId = modelId; + this.uri = uri; + this.organizationId = organizationId; + this.maxInputTokens = maxInputTokens; + } + + OpenAiChatCompletionServiceSettings( + String modelId, + @Nullable String uri, + @Nullable String organizationId, + @Nullable Integer maxInputTokens + ) { + this(modelId, createOptionalUri(uri), organizationId, maxInputTokens); + } + + public OpenAiChatCompletionServiceSettings(StreamInput in) throws IOException { + this.modelId = in.readString(); + this.uri = createOptionalUri(in.readOptionalString()); + this.organizationId = in.readOptionalString(); + this.maxInputTokens = in.readOptionalVInt(); + } + + public String modelId() { + return modelId; + } + + public URI uri() { + return uri; + } + + public String organizationId() { + return organizationId; + } + + public Integer maxInputTokens() { + return maxInputTokens; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + { + builder.field(MODEL_ID, modelId); + + if (uri != null) { + builder.field(URL, uri.toString()); + } + + if (organizationId != null) { + builder.field(ORGANIZATION, organizationId); + } + + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); + } + } + + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_COMPLETION_INFERENCE_SERVICE_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(modelId); + out.writeOptionalString(uri != null ? uri.toString() : null); + out.writeOptionalString(organizationId); + out.writeOptionalVInt(maxInputTokens); + } + + @Override + public ToXContentObject getFilteredXContentObject() { + return this; + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + OpenAiChatCompletionServiceSettings that = (OpenAiChatCompletionServiceSettings) object; + return Objects.equals(modelId, that.modelId) + && Objects.equals(uri, that.uri) + && Objects.equals(organizationId, that.organizationId) + && Objects.equals(maxInputTokens, that.maxInputTokens); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, uri, organizationId, maxInputTokens); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java new file mode 100644 index 0000000000000..2d5a407f3c1a6 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.USER; + +public class OpenAiChatCompletionTaskSettings implements TaskSettings { + + public static final String NAME = "openai_completion_task_settings"; + + public static OpenAiChatCompletionTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new OpenAiChatCompletionTaskSettings(user); + } + + private final String user; + + public OpenAiChatCompletionTaskSettings(@Nullable String user) { + this.user = user; + } + + public OpenAiChatCompletionTaskSettings(StreamInput in) throws IOException { + this.user = in.readOptionalString(); + } + + public static OpenAiChatCompletionTaskSettings of( + OpenAiChatCompletionTaskSettings originalSettings, + OpenAiChatCompletionRequestTaskSettings requestSettings + ) { + var userToUse = requestSettings.user() == null ? originalSettings.user : requestSettings.user(); + return new OpenAiChatCompletionTaskSettings(userToUse); + } + + public String user() { + return user; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (user != null) { + builder.field(USER, user); + } + + builder.endObject(); + + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_COMPLETION_INFERENCE_SERVICE_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(user); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + OpenAiChatCompletionTaskSettings that = (OpenAiChatCompletionTaskSettings) object; + return Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java index 5bdb0d7542a83..373704af37fcd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java @@ -16,7 +16,7 @@ import java.util.Map; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; -import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings.USER; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.USER; /** * This class handles extracting OpenAI task settings from a request. The difference between this class and diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index 34713ff2b7208..1e5c93ea9ae22 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; @@ -37,6 +38,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.ORGANIZATION; /** * Defines the service settings for interacting with OpenAI's text embedding models. @@ -45,7 +47,6 @@ public class OpenAiEmbeddingsServiceSettings implements ServiceSettings { public static final String NAME = "openai_service_settings"; - static final String ORGANIZATION = "organization_id"; static final String DIMENSIONS_SET_BY_USER = "dimensions_set_by_user"; public static OpenAiEmbeddingsServiceSettings fromMap(Map map, ConfigurationParseContext context) { @@ -191,10 +192,12 @@ public String organizationId() { return organizationId; } + @Override public SimilarityMeasure similarity() { return similarity; } + @Override public Integer dimensions() { return dimensions; } @@ -211,6 +214,11 @@ public String modelId() { return modelId; } + @Override + public DenseVectorFieldMapper.ElementType elementType() { + return DenseVectorFieldMapper.ElementType.FLOAT; + } + @Override public String getWriteableName() { return NAME; @@ -271,8 +279,9 @@ public void writeTo(StreamOutput out) throws IOException { var uriToWrite = uri != null ? uri.toString() : null; out.writeOptionalString(uriToWrite); out.writeOptionalString(organizationId); + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - out.writeOptionalEnum(similarity); + out.writeOptionalEnum(SimilarityMeasure.translateSimilarity(similarity, out.getTransportVersion())); out.writeOptionalVInt(dimensions); out.writeOptionalVInt(maxInputTokens); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java index a7b11487ca72f..e306f2d3d2928 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java @@ -23,6 +23,7 @@ import java.util.Objects; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.USER; /** * Defines the task settings for the openai service. @@ -33,7 +34,6 @@ public class OpenAiEmbeddingsTaskSettings implements TaskSettings { public static final String NAME = "openai_embeddings_task_settings"; - public static final String USER = "user"; public static OpenAiEmbeddingsTaskSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTests.java new file mode 100644 index 0000000000000..46931f12aaf4f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTests.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; + +import java.time.Clock; +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class RateLimiterTests extends ESTestCase { + public void testThrows_WhenAccumulatedTokensLimit_IsNegative() { + var exception = expectThrows( + IllegalArgumentException.class, + () -> new RateLimiter(-1, 1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()) + ); + assertThat(exception.getMessage(), is("Accumulated tokens limit must be greater than or equal to 0")); + } + + public void testThrows_WhenAccumulatedTokensLimit_IsInfinity() { + var exception = expectThrows( + IllegalArgumentException.class, + () -> new RateLimiter(Double.POSITIVE_INFINITY, 1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()) + ); + assertThat( + exception.getMessage(), + is(Strings.format("Accumulated tokens limit must be less than or equal to %s", Double.MAX_VALUE)) + ); + } + + public void testThrows_WhenTokensPerTimeUnit_IsZero() { + var exception = expectThrows( + IllegalArgumentException.class, + () -> new RateLimiter(0, 0, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()) + ); + assertThat(exception.getMessage(), is("Tokens per time unit must be greater than 0")); + } + + public void testThrows_WhenTokensPerTimeUnit_IsInfinity() { + var exception = expectThrows( + IllegalArgumentException.class, + () -> new RateLimiter(0, Double.POSITIVE_INFINITY, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()) + ); + assertThat(exception.getMessage(), is(Strings.format("Tokens per time unit must be less than or equal to %s", Double.MAX_VALUE))); + } + + public void testThrows_WhenTokensPerTimeUnit_IsNegative() { + var exception = expectThrows( + IllegalArgumentException.class, + () -> new RateLimiter(0, -1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()) + ); + assertThat(exception.getMessage(), is("Tokens per time unit must be greater than 0")); + } + + public void testAcquire_Throws_WhenTokens_IsZero() { + var limiter = new RateLimiter(0, 1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()); + var exception = expectThrows(IllegalArgumentException.class, () -> limiter.acquire(0)); + assertThat(exception.getMessage(), is("Requested tokens must be positive")); + } + + public void testAcquire_Throws_WhenTokens_IsNegative() { + var limiter = new RateLimiter(0, 1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()); + var exception = expectThrows(IllegalArgumentException.class, () -> limiter.acquire(-1)); + assertThat(exception.getMessage(), is("Requested tokens must be positive")); + } + + public void testAcquire_First_CallDoesNotSleep() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(0); + } + + public void testAcquire_DoesNotSleep_WhenTokenRateIsHigh() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, Double.MAX_VALUE, TimeUnit.MICROSECONDS, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(0); + } + + public void testAcquire_AcceptsMaxIntValue_WhenTokenRateIsHigh() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, Double.MAX_VALUE, TimeUnit.MICROSECONDS, sleeper, clock); + limiter.acquire(Integer.MAX_VALUE); + verify(sleeper, times(1)).sleep(0); + } + + public void testAcquire_AcceptsMaxIntValue_WhenTokenRateIsLow() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + double tokensPerDay = 1; + var limiter = new RateLimiter(0, tokensPerDay, TimeUnit.DAYS, sleeper, clock); + limiter.acquire(Integer.MAX_VALUE); + + double tokensPerMicro = tokensPerDay / TimeUnit.DAYS.toMicros(1); + verify(sleeper, times(1)).sleep((long) ((double) Integer.MAX_VALUE / tokensPerMicro)); + } + + public void testAcquire_SleepsForOneMinute_WhenRequestingOneUnavailableToken() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(2); + verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + } + + public void testAcquire_SleepsForOneMinute_WhenRequestingOneUnavailableToken_NoAccumulated() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + } + + public void testAcquire_SleepsFor10Minute_WhenRequesting10UnavailableToken_NoAccumulated() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(10); + verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(10)); + } + + public void testAcquire_IncrementsNextTokenAvailabilityInstant_ByOneMinute() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + assertThat(limiter.getNextTokenAvailability(), is(now.plus(1, ChronoUnit.MINUTES))); + } + + public void testAcquire_SecondCallToAcquire_ShouldWait_WhenAccumulatedTokensAreDepleted() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(0); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + } + + public void testAcquire_SecondCallToAcquire_ShouldWaitForHalfDuration_WhenElapsedTimeIsHalfRequiredDuration() + throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(0); + when(clock.instant()).thenReturn(now.plus(Duration.ofSeconds(30))); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(TimeUnit.SECONDS.toMicros(30)); + } + + public void testAcquire_ShouldAccumulateTokens() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(10, 10, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(5); + verify(sleeper, times(1)).sleep(0); + // it should accumulate 5 tokens + when(clock.instant()).thenReturn(now.plus(Duration.ofSeconds(30))); + limiter.acquire(10); + verify(sleeper, times(2)).sleep(0); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java index a844061fa48e1..9b14cf259522c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java @@ -27,17 +27,21 @@ import java.io.IOException; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.action.openai.OpenAiChatCompletionActionTests.buildExpectedChatCompletionResultMap; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionRequestTaskSettingsTests.getChatCompletionRequestTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests.createModel; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; import static org.hamcrest.Matchers.equalTo; @@ -283,6 +287,266 @@ public void testCreate_OpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() th } } + public void testCreate_OpenAiChatCompletionModel() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0613", + "system_fingerprint": "fp_44709d6fcb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello there, how may I assist you today?" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createChatCompletionModel(getUrl(webServer), "org", "secret", "model", "user"); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user"); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectedChatCompletionResultMap(List.of("Hello there, how may I assist you today?")))); + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(request.getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(request.getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testCreate_OpenAiChatCompletionModel_WithoutUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0613", + "system_fingerprint": "fp_44709d6fcb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello there, how may I assist you today?" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createChatCompletionModel(getUrl(webServer), "org", "secret", "model", null); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap(null); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectedChatCompletionResultMap(List.of("Hello there, how may I assist you today?")))); + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(request.getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(request.getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testCreate_OpenAiChatCompletionModel_WithoutOrganization() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0613", + "system_fingerprint": "fp_44709d6fcb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello there, how may I assist you today?" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createChatCompletionModel(getUrl(webServer), null, "secret", "model", null); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user"); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectedChatCompletionResultMap(List.of("Hello there, how may I assist you today?")))); + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(request.getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertNull(request.getHeader(ORGANIZATION_HEADER)); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testCreate_OpenAiChatCompletionModel_FailsFromInvalidResponseFormat() throws IOException { + // timeout as zero for no retries + var settings = buildSettingsWithRetryFields( + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1), + TimeValue.timeValueSeconds(0) + ); + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, settings); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0613", + "system_fingerprint": "fp_44709d6fcb", + "data_does_not_exist": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello there, how may I assist you today?" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createChatCompletionModel(getUrl(webServer), null, "secret", "model", null); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user"); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is(format("Failed to send OpenAI chat completions request to [%s]", getUrl(webServer))) + ); + assertThat( + thrownException.getCause().getMessage(), + is("Failed to find required field [choices] in OpenAI chat completions response") + ); + + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertNull(webServer.requests().get(0).getHeader(ORGANIZATION_HEADER)); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + assertThat(requestMap.get("n"), is(1)); + } + } + public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusCode() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java new file mode 100644 index 0000000000000..15998469d08d0 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java @@ -0,0 +1,297 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.openai; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockRequest; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.hamcrest.CoreMatchers; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class OpenAiChatCompletionActionTests extends ESTestCase { + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testExecute_ReturnsSuccessfulResponse() throws IOException { + var senderFactory = new HttpRequestSender.Factory(createWithEmptySettings(threadPool), clientManager, mockClusterServiceEmpty()); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0125", + "system_fingerprint": "fp_44709d6fcb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "result content" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectedChatCompletionResultMap(List.of("result content")))); + assertThat(webServer.requests(), hasSize(1)); + + MockRequest request = webServer.requests().get(0); + + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(request.getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(request.getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(request.getBody()); + assertThat(requestMap.size(), is(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testExecute_ThrowsURISyntaxException_ForInvalidUrl() throws IOException { + try (var sender = mock(Sender.class)) { + var thrownException = expectThrows( + IllegalArgumentException.class, + () -> createAction("^^", "org", "secret", "model", "user", sender) + ); + assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); + } + } + + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any(), any()); + + var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any(), any()); + + var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send OpenAI chat completions request to [%s]", getUrl(webServer)))); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled_WhenUrlIsNull() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any(), any()); + + var action = createAction(null, "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("Failed to send OpenAI chat completions request")); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any(), any()); + + var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send OpenAI chat completions request to [%s]", getUrl(webServer)))); + } + + public void testExecute_ThrowsExceptionWithNullUrl() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any(), any()); + + var action = createAction(null, "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("Failed to send OpenAI chat completions request")); + } + + public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0613", + "system_fingerprint": "fp_44709d6fcb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello there, how may I assist you today?" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc", "def"), listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), CoreMatchers.is("OpenAI completions only accepts 1 input")); + assertThat(thrownException.status(), CoreMatchers.is(RestStatus.BAD_REQUEST)); + } + } + + public static Map buildExpectedChatCompletionResultMap(List results) { + return Map.of( + ChatCompletionResults.COMPLETION, + results.stream().map(result -> Map.of(ChatCompletionResults.Result.RESULT, result)).toList() + ); + } + + private OpenAiChatCompletionAction createAction( + String url, + String org, + String apiKey, + String modelName, + @Nullable String user, + Sender sender + ) { + var model = createChatCompletionModel(url, org, apiKey, modelName, user); + + return new OpenAiChatCompletionAction(sender, model, createWithEmptySettings(threadPool)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandlerTests.java new file mode 100644 index 0000000000000..5c3585b630073 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandlerTests.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.apache.http.Header; +import org.apache.http.HeaderElement; +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.RequestTests; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OpenAiChatCompletionResponseHandlerTests extends ESTestCase { + + public void testHandle429InputAndOutputTokensTooLarge_ThrowWithoutRetrying() { + String responseBody = """ + { + "error": { + "message": "The input or output tokens must be reduced in order to run successfully", + "type": "content_too_large", + "param": null, + "code": null + } + } + """; + ByteArrayInputStream responseBodyStream = new ByteArrayInputStream(responseBody.getBytes(StandardCharsets.UTF_8)); + + var header = mock(Header.class); + when(header.getElements()).thenReturn(new HeaderElement[] {}); + + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(429); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getFirstHeader(anyString())).thenReturn(header); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + var mockRequest = RequestTests.mockRequest("id"); + var httpResult = new HttpResult(httpResponse, responseBodyStream.readAllBytes()); + var handler = new OpenAiChatCompletionResponseHandler("", (request, result) -> null); + + var retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); + + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + is( + "Received a rate limit status code for request from inference entity id [id] status [429]. " + + "Error message: [The input or output tokens must be reduced in order to run successfully]" + ) + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java index 2d3ff25222ab9..0690bf56893ca 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java @@ -56,6 +56,22 @@ public void testXContent_InputTypeSearch_EmbeddingTypesInt8_TruncateNone() throw {"texts":["abc"],"model":"model","input_type":"search_query","embedding_types":["int8"],"truncate":"none"}""")); } + public void testXContent_InputTypeSearch_EmbeddingTypesByte_TruncateNone() throws IOException { + var entity = new CohereEmbeddingsRequestEntity( + List.of("abc"), + new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.NONE), + "model", + CohereEmbeddingType.BYTE + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + MatcherAssert.assertThat(xContentResult, is(""" + {"texts":["abc"],"model":"model","input_type":"search_query","embedding_types":["int8"],"truncate":"none"}""")); + } + public void testXContent_WritesNoOptionalFields_WhenTheyAreNotDefined() throws IOException { var entity = new CohereEmbeddingsRequestEntity(List.of("abc"), CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java index d3783f6fed76b..32911eeb44adf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java @@ -21,7 +21,6 @@ import org.hamcrest.MatcherAssert; import java.io.IOException; -import java.net.URISyntaxException; import java.util.List; import java.util.Map; @@ -30,7 +29,7 @@ import static org.hamcrest.Matchers.is; public class CohereEmbeddingsRequestTests extends ESTestCase { - public void testCreateRequest_UrlDefined() throws URISyntaxException, IOException { + public void testCreateRequest_UrlDefined() throws IOException { var request = createRequest( List.of("abc"), CohereEmbeddingsModelTests.createModel("url", "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, null, null) @@ -50,10 +49,10 @@ public void testCreateRequest_UrlDefined() throws URISyntaxException, IOExceptio ); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc")))); + MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "embedding_types", List.of("float")))); } - public void testCreateRequest_AllOptionsDefined() throws URISyntaxException, IOException { + public void testCreateRequest_AllOptionsDefined() throws IOException { var request = createRequest( List.of("abc"), CohereEmbeddingsModelTests.createModel( @@ -100,7 +99,7 @@ public void testCreateRequest_AllOptionsDefined() throws URISyntaxException, IOE ); } - public void testCreateRequest_InputTypeSearch_EmbeddingTypeInt8_TruncateEnd() throws URISyntaxException, IOException { + public void testCreateRequest_InputTypeSearch_EmbeddingTypeInt8_TruncateEnd() throws IOException { var request = createRequest( List.of("abc"), CohereEmbeddingsModelTests.createModel( @@ -147,7 +146,7 @@ public void testCreateRequest_InputTypeSearch_EmbeddingTypeInt8_TruncateEnd() th ); } - public void testCreateRequest_TruncateNone() throws URISyntaxException, IOException { + public void testCreateRequest_TruncateNone() throws IOException { var request = createRequest( List.of("abc"), CohereEmbeddingsModelTests.createModel( @@ -175,11 +174,11 @@ public void testCreateRequest_TruncateNone() throws URISyntaxException, IOExcept ); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "truncate", "none"))); + MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "truncate", "none", "embedding_types", List.of("float")))); } - public static CohereEmbeddingsRequest createRequest(List input, CohereEmbeddingsModel model) throws URISyntaxException { - var account = new CohereAccount(model.getServiceSettings().getCommonSettings().getUri(), model.getSecretSettings().apiKey()); + public static CohereEmbeddingsRequest createRequest(List input, CohereEmbeddingsModel model) { + var account = new CohereAccount(model.getServiceSettings().getCommonSettings().uri(), model.getSecretSettings().apiKey()); return new CohereEmbeddingsRequest(account, input, model); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..0b61bf060fc5f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntityTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.openai; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; + +public class OpenAiChatCompletionRequestEntityTests extends ESTestCase { + + public void testXContent_WritesUserWhenDefined() throws IOException { + var entity = new OpenAiChatCompletionRequestEntity(List.of("abc"), "model", "user"); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"abc"}],"model":"model","n":1,"user":"user"}""")); + + } + + public void testXContent_DoesNotWriteUserWhenItIsNull() throws IOException { + var entity = new OpenAiChatCompletionRequestEntity(List.of("abc"), "model", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"abc"}],"model":"model","n":1}""")); + } + + public void testXContent_ThrowsIfModelIsNull() { + assertThrows(NullPointerException.class, () -> new OpenAiChatCompletionRequestEntity(List.of("abc"), null, "user")); + } + + public void testXContent_ThrowsIfMessagesAreNull() { + assertThrows(NullPointerException.class, () -> new OpenAiChatCompletionRequestEntity(null, "model", "user")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java new file mode 100644 index 0000000000000..7858bdf4d1259 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.openai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiChatCompletionRequest.buildDefaultUri; +import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class OpenAiChatCompletionRequestTests extends ESTestCase { + + public void testCreateRequest_WithUrlOrganizationUserDefined() throws IOException { + var request = createRequest("www.google.com", "org", "secret", "abc", "model", "user"); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getURI().toString(), is("www.google.com")); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + assertThat(httpPost.getLastHeader(ORGANIZATION_HEADER).getValue(), is("org")); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.get("n"), is(1)); + } + + public void testCreateRequest_WithDefaultUrl() throws URISyntaxException, IOException { + var request = createRequest(null, "org", "secret", "abc", "model", "user"); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getURI().toString(), is(buildDefaultUri().toString())); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + assertThat(httpPost.getLastHeader(ORGANIZATION_HEADER).getValue(), is("org")); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.get("n"), is(1)); + } + + public void testCreateRequest_WithDefaultUrlAndWithoutUserOrganization() throws URISyntaxException, IOException { + var request = createRequest(null, null, "secret", "abc", "model", null); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getURI().toString(), is(OpenAiChatCompletionRequest.buildDefaultUri().toString())); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + assertNull(httpPost.getLastHeader(ORGANIZATION_HEADER)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(3)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("n"), is(1)); + } + + public void testTruncate_DoesNotReduceInputTextSize() throws URISyntaxException, IOException { + var request = createRequest(null, null, "secret", "abcd", "model", null); + var truncatedRequest = request.truncate(); + assertThat(request.getURI().toString(), is(OpenAiChatCompletionRequest.buildDefaultUri().toString())); + + var httpRequest = truncatedRequest.createHttpRequest(); + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(3)); + + // We do not truncate for OpenAi chat completions + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abcd")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("n"), is(1)); + } + + public void testTruncationInfo_ReturnsNull() { + var request = createRequest(null, null, "secret", "abcd", "model", null); + assertNull(request.getTruncationInfo()); + } + + public static OpenAiChatCompletionRequest createRequest( + @Nullable String url, + @Nullable String org, + String apiKey, + String input, + String model, + @Nullable String user + ) { + var chatCompletionModel = OpenAiChatCompletionModelTests.createChatCompletionModel(url, org, apiKey, model, user); + + var account = new OpenAiAccount( + chatCompletionModel.getServiceSettings().uri(), + org, + chatCompletionModel.getSecretSettings().apiKey() + ); + return new OpenAiChatCompletionRequest(account, List.of(input), chatCompletionModel); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java new file mode 100644 index 0000000000000..4f7cd9ea89a14 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java @@ -0,0 +1,218 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentEOFException; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Locale; + +import static org.hamcrest.Matchers.containsString; + +public class XContentUtilsTests extends ESTestCase { + + public void testMoveToFirstToken() throws IOException { + var json = """ + { + "key": "value" + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + assertNull(parser.currentToken()); + + XContentUtils.moveToFirstToken(parser); + + assertEquals(XContentParser.Token.START_OBJECT, parser.currentToken()); + } + } + + public void testMoveToFirstToken_DoesNotMoveIfAlreadyAtAToken() throws IOException { + var json = """ + { + "key": "value" + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + // position at a valid token + parser.nextToken(); + assertEquals(XContentParser.Token.START_OBJECT, parser.currentToken()); + + XContentUtils.moveToFirstToken(parser); + + // still at the beginning of the object + assertEquals(XContentParser.Token.START_OBJECT, parser.currentToken()); + } + } + + public void testPositionParserAtTokenAfterField() throws IOException { + var json = """ + { + "key": "value" + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + XContentUtils.positionParserAtTokenAfterField(parser, "key", "some error"); + + assertEquals("value", parser.text()); + } + } + + public void testPositionParserAtTokenAfterField_ThrowsIfFieldIsMissing() throws IOException { + var json = """ + { + "key": "value" + } + """; + var errorFormat = "Error: %s"; + var missingField = "missing field"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + var exception = expectThrows( + IllegalStateException.class, + () -> XContentUtils.positionParserAtTokenAfterField(parser, missingField, errorFormat) + ); + + assertEquals(String.format(Locale.ROOT, errorFormat, missingField), exception.getMessage()); + } + } + + public void testPositionParserAtTokenAfterField_ThrowsWithMalformedJSON() throws IOException { + var json = """ + { + "key": "value", + "foo": "bar" + """; + var errorFormat = "Error: %s"; + var missingField = "missing field"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + var exception = expectThrows( + XContentEOFException.class, + () -> XContentUtils.positionParserAtTokenAfterField(parser, missingField, errorFormat) + ); + + assertThat(exception.getMessage(), containsString("Unexpected end-of-input")); + } + } + + public void testConsumeUntilObjectEnd() throws IOException { + var json = """ + { + "key": "value", + "foo": true, + "bar": 0.1 + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + parser.nextToken(); + parser.nextToken(); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); // fully parsed + } + } + + public void testConsumeUntilObjectEnd_SkipArray() throws IOException { + var json = """ + { + "key": "value", + "skip_array": [1.0, 2.0, 3.0] + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + } + + public void testConsumeUntilObjectEnd_SkipNestedObject() throws IOException { + var json = """ + { + "key": "value", + "skip_obj": { + "foo": "bar" + } + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); // fully parsed + } + } + + public void testConsumeUntilObjectEnd_InArray() throws IOException { + var json = """ + [ + { + "key": "value", + "skip_obj": { + "foo": "bar" + } + }, + { + "key": "value", + "skip_array": [1.0, 2.0, 3.0] + }, + { + "key": "value", + "skip_field1": "f1", + "skip_field2": "f2" + } + ] + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + + // Parser now inside object 1 + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("key", parser.currentName()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + + // Start of object 2 + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + + // Start of object 3 + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("skip_field1", parser.currentName()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + + assertEquals(XContentParser.Token.END_ARRAY, parser.nextToken()); + assertNull(parser.nextToken()); // fully parsed + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java new file mode 100644 index 0000000000000..18f702014e2d8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java @@ -0,0 +1,215 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.openai; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class OpenAiChatCompletionResponseEntityTests extends ESTestCase { + + public void testFromResponse_CreatesResultsForASingleItem() throws IOException { + String responseJson = """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "some content" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + + ChatCompletionResults chatCompletionResults = OpenAiChatCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(chatCompletionResults.getResults().size(), equalTo(1)); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { + String responseJson = """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "not_choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "some content" + }, + "logprobs": null, + "finish_reason": "stop" + }, + ], + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> OpenAiChatCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [choices] in OpenAI chat completions response")); + } + + public void testFromResponse_FailsWhenChoicesFieldNotAnArray() { + String responseJson = """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "choices": { + "test": { + "index": 0, + "message": { + "role": "assistant", + "content": "some content" + }, + "logprobs": null, + "finish_reason": "stop" + }, + }, + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> OpenAiChatCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [FIELD_NAME]") + ); + } + + public void testFromResponse_FailsWhenMessageDoesNotExist() { + String responseJson = """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "not_message": { + "role": "assistant", + "content": "some content" + }, + "logprobs": null, + "finish_reason": "stop" + }, + ], + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> OpenAiChatCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [message] in OpenAI chat completions response")); + } + + public void testFromResponse_FailsWhenMessageValueIsAString() { + String responseJson = """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": "some content", + "logprobs": null, + "finish_reason": "stop" + }, + ], + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> OpenAiChatCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [VALUE_STRING]") + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java index 010e990a3ce80..4583ba9d21b6d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java @@ -327,4 +327,63 @@ public void testFromResponse_FailsWhenEmbeddingValueIsAnObject() { is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]") ); } + + public void testFieldsInDifferentOrderServer() throws IOException { + // The fields of the objects in the data array are reordered + String response = """ + { + "created": 1711530064, + "object": "list", + "id": "6667830b-716b-4796-9a61-33b67b5cc81d", + "model": "mxbai-embed-large-v1", + "data": [ + { + "embedding": [ + -0.9, + 0.5, + 0.3 + ], + "index": 0, + "object": "embedding" + }, + { + "index": 0, + "embedding": [ + 0.1, + 0.5 + ], + "object": "embedding" + }, + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.5, + 0.5 + ] + } + ], + "usage": { + "prompt_tokens": 0, + "completion_tokens": 0, + "total_tokens": 0 + } + }"""; + + TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), response.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat( + parsedResults.embeddings(), + is( + List.of( + new TextEmbeddingResults.Embedding(List.of(-0.9F, 0.5F, 0.3F)), + new TextEmbeddingResults.Embedding(List.of(0.1F, 0.5F)), + new TextEmbeddingResults.Embedding(List.of(0.5F, 0.5F)) + ) + ) + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 2417148c84ac2..768f053295d13 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -76,7 +77,7 @@ public void testGetUnparsedModelMap_ThrowsResourceNotFound_WhenNoHitsReturned() public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIndexReceived() { var client = mockClient(); - var unknownIndexHit = SearchHit.createFromMap(Map.of("_index", "unknown_index")); + var unknownIndexHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", "unknown_index")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { unknownIndexHit })); var registry = new ModelRegistry(client); @@ -93,7 +94,7 @@ public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIn public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFindInferenceEntry() { var client = mockClient(); - var inferenceSecretsHit = SearchHit.createFromMap(Map.of("_index", ".secrets-inference")); + var inferenceSecretsHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".secrets-inference")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceSecretsHit })); var registry = new ModelRegistry(client); @@ -110,7 +111,7 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFindInferenceSecretsEntry() { var client = mockClient(); - var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference")); + var inferenceHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".inference")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); var registry = new ModelRegistry(client); @@ -140,9 +141,9 @@ public void testGetModelWithSecrets() { } """; - var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference")); + var inferenceHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".inference")); inferenceHit.sourceRef(BytesReference.fromByteBuffer(ByteBuffer.wrap(Strings.toUTF8Bytes(config)))); - var inferenceSecretsHit = SearchHit.createFromMap(Map.of("_index", ".secrets-inference")); + var inferenceSecretsHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".secrets-inference")); inferenceSecretsHit.sourceRef(BytesReference.fromByteBuffer(ByteBuffer.wrap(Strings.toUTF8Bytes(secrets)))); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit, inferenceSecretsHit })); @@ -171,7 +172,7 @@ public void testGetModelNoSecrets() { } """; - var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference")); + var inferenceHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".inference")); inferenceHit.sourceRef(BytesReference.fromByteBuffer(ByteBuffer.wrap(Strings.toUTF8Bytes(config)))); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java new file mode 100644 index 0000000000000..444f6792abe63 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class ChatCompletionResultsTests extends AbstractWireSerializingTestCase { + + public void testToXContent_CreateTheRightFormatForASingleChatCompletionResult() { + String resultContent = "content"; + var result = new ChatCompletionResults(List.of(new ChatCompletionResults.Result(resultContent))); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, resultContent)))) + ); + + String xContentResult = Strings.toString(result, true, true); + assertThat(xContentResult, is(""" + { + "completion" : [ + { + "result" : "content" + } + ] + }""")); + } + + public void testToXContent_CreatesTheRightFormatForMultipleCompletionResults() { + String resultOneContent = "content 1"; + String resultTwoContent = "content 2"; + + var entity = new ChatCompletionResults( + List.of(new ChatCompletionResults.Result(resultOneContent), new ChatCompletionResults.Result(resultTwoContent)) + ); + + assertThat( + entity.asMap(), + is( + Map.of( + ChatCompletionResults.COMPLETION, + List.of( + Map.of(ChatCompletionResults.Result.RESULT, resultOneContent), + Map.of(ChatCompletionResults.Result.RESULT, resultTwoContent) + ) + ) + ) + ); + + String xContentResult = Strings.toString(entity, true, true); + assertThat(xContentResult, is(""" + { + "completion" : [ + { + "result" : "content 1" + }, + { + "result" : "content 2" + } + ] + }""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return ChatCompletionResults::new; + } + + @Override + protected ChatCompletionResults createTestInstance() { + return createRandomResults(); + } + + @Override + protected ChatCompletionResults mutateInstance(ChatCompletionResults instance) throws IOException { + // if true we reduce the chat results list by a random amount, if false we add a chat result to the list + if (randomBoolean()) { + // -1 to remove at least one item from the list + int end = randomInt(instance.results().size() - 1); + return new ChatCompletionResults(instance.results().subList(0, end)); + } else { + List completionResults = new ArrayList<>(instance.results()); + completionResults.add(createRandomChatCompletionResult()); + return new ChatCompletionResults(completionResults); + } + } + + public static ChatCompletionResults createRandomResults() { + int numOfCompletionResults = randomIntBetween(1, 10); + List chatCompletionResults = new ArrayList<>(numOfCompletionResults); + + for (int i = 0; i < numOfCompletionResults; i++) { + chatCompletionResults.add(createRandomChatCompletionResult()); + } + + return new ChatCompletionResults(chatCompletionResults); + } + + private static ChatCompletionResults.Result createRandomChatCompletionResult() { + return new ChatCompletionResults.Result(randomAlphaOfLengthBetween(10, 300)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ErrorChunkedInferenceResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ErrorChunkedInferenceResultsTests.java new file mode 100644 index 0000000000000..4be00ea9e5822 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ErrorChunkedInferenceResultsTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.results; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; + +import java.io.IOException; + +public class ErrorChunkedInferenceResultsTests extends AbstractWireSerializingTestCase { + + public static ErrorChunkedInferenceResults createRandomResults() { + return new ErrorChunkedInferenceResults( + randomBoolean() + ? new ElasticsearchTimeoutException(randomAlphaOfLengthBetween(10, 50)) + : new ElasticsearchStatusException(randomAlphaOfLengthBetween(10, 50), randomFrom(RestStatus.values())) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return ErrorChunkedInferenceResults::new; + } + + @Override + protected ErrorChunkedInferenceResults createTestInstance() { + return createRandomResults(); + } + + @Override + protected ErrorChunkedInferenceResults mutateInstance(ErrorChunkedInferenceResults instance) throws IOException { + return new ErrorChunkedInferenceResults(new RuntimeException(randomAlphaOfLengthBetween(10, 50))); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java index b5ea720490b5a..7805c6706dc61 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java @@ -151,7 +151,7 @@ public void testFromMap_PrefersModelId_OverModel() { public void testFromMap_MissingUrl_DoesNotThrowException() { var serviceSettings = CohereServiceSettings.fromMap(new HashMap<>(Map.of()), ConfigurationParseContext.PERSISTENT); - assertNull(serviceSettings.getUri()); + assertNull(serviceSettings.uri()); } public void testFromMap_EmptyUrl_ThrowsError() { @@ -196,7 +196,10 @@ public void testFromMap_InvalidSimilarity_ThrowsError() { MatcherAssert.assertThat( thrownException.getMessage(), - is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];") + is( + "Validation Failed: 1: [service_settings] Invalid value [by_size] received. [similarity] " + + "must be one of [cosine, dot_product, l2_norm];" + ) ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index dae4c20d00d78..3a5527466c126 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -16,11 +16,13 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; @@ -98,8 +100,8 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModel() throws IOExce MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); MatcherAssert.assertThat( embeddingsModel.getTaskSettings(), @@ -130,8 +132,8 @@ public void testParseRequestConfig_OptionalTaskSettings() throws IOException { MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), equalTo(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); @@ -256,7 +258,7 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModelWithoutUrl() thr MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); + assertNull(embeddingsModel.getServiceSettings().getCommonSettings().uri()); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); }, (e) -> fail("Model parsing should have succeeded " + e.getMessage())); @@ -294,8 +296,8 @@ public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModel() MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -344,7 +346,7 @@ public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWit MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); + assertNull(embeddingsModel.getServiceSettings().getCommonSettings().uri()); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.INGEST, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -353,7 +355,7 @@ public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWit public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { try (var service = createCohereService()) { var persistedConfig = getPersistedConfigMap( - CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", CohereEmbeddingType.INT8), + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", DenseVectorFieldMapper.ElementType.BYTE), getTaskSettingsMap(InputType.SEARCH, CohereTruncation.NONE), getSecretSettingsMap("secret") ); @@ -369,9 +371,9 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.INT8)); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.BYTE)); MatcherAssert.assertThat( embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.NONE)) @@ -401,7 +403,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -426,8 +428,8 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSe MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -450,7 +452,7 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSe MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -477,8 +479,8 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInTa MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -496,8 +498,8 @@ public void testParsePersistedConfig_CreatesACohereEmbeddingsModel() throws IOEx MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE))); assertNull(embeddingsModel.getSecretSettings()); } @@ -534,8 +536,8 @@ public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWithoutUrl() t MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + assertNull(embeddingsModel.getServiceSettings().getCommonSettings().uri()); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); assertNull(embeddingsModel.getSecretSettings()); @@ -555,7 +557,7 @@ public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); assertNull(embeddingsModel.getSecretSettings()); } @@ -573,7 +575,7 @@ public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInServiceSettin MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, null))); assertNull(embeddingsModel.getSecretSettings()); } @@ -594,8 +596,8 @@ public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInTaskSettings( MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.INGEST, null))); assertNull(embeddingsModel.getSecretSettings()); } @@ -684,7 +686,10 @@ public void testInfer_SendsRequest() throws IOException { MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document"))); + MatcherAssert.assertThat( + requestMap, + is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document", "embedding_types", List.of("float"))) + ); } } @@ -751,6 +756,135 @@ public void testCheckModelConfig_UpdatesDimensions() throws IOException { } } + public void testCheckModelConfig_UpdatesSimilarityToDotProduct_WhenItIsNull() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 10, + 1, + null, + null + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat( + result, + // the dimension is set to 2 because there are 2 embeddings returned from the mock server + is( + CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 10, + 2, + null, + null, + SimilarityMeasure.DOT_PRODUCT + ) + ) + ); + } + } + + public void testCheckModelConfig_DoesNotUpdateSimilarity_WhenItIsSpecifiedAsCosine() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 10, + 1, + null, + null, + SimilarityMeasure.COSINE + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat( + result, + // the dimension is set to 2 because there are 2 embeddings returned from the mock server + is( + CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 10, + 2, + null, + null, + SimilarityMeasure.COSINE + ) + ) + ); + } + } + public void testInfer_UnauthorisedResponse() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); @@ -838,7 +972,10 @@ public void testInfer_SetsInputTypeToIngest_FromInferParameter_WhenTaskSettingsA MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document"))); + MatcherAssert.assertThat( + requestMap, + is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document", "embedding_types", List.of("float"))) + ); } } @@ -905,7 +1042,10 @@ public void testInfer_SetsInputTypeToIngestFromInferParameter_WhenModelSettingIs MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document"))); + MatcherAssert.assertThat( + requestMap, + is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document", "embedding_types", List.of("float"))) + ); } } @@ -965,7 +1105,10 @@ public void testInfer_DoesNotSetInputType_WhenNotPresentInTaskSettings_AndUnspec MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model"))); + MatcherAssert.assertThat( + requestMap, + is(Map.of("texts", List.of("abc"), "model", "model", "embedding_types", List.of("float"))) + ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java new file mode 100644 index 0000000000000..ed13e5a87e71b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.embeddings; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class CohereEmbeddingTypeTests extends ESTestCase { + public void testTranslateToVersion_ReturnsInt8_WhenVersionIsBeforeByteEnumAddition_WhenSpecifyingByte() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.BYTE, new TransportVersion(8_613_00_0)), + is(CohereEmbeddingType.INT8) + ); + } + + public void testTranslateToVersion_ReturnsInt8_WhenVersionIsBeforeByteEnumAddition_WhenSpecifyingInt8() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.INT8, new TransportVersion(8_613_00_0)), + is(CohereEmbeddingType.INT8) + ); + } + + public void testTranslateToVersion_ReturnsFloat_WhenVersionIsBeforeByteEnumAddition_WhenSpecifyingFloat() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.FLOAT, new TransportVersion(8_613_00_0)), + is(CohereEmbeddingType.FLOAT) + ); + } + + public void testTranslateToVersion_ReturnsByte_WhenVersionOnByteEnumAddition_WhenSpecifyingByte() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.BYTE, TransportVersions.ML_INFERENCE_EMBEDDING_BYTE_ADDED), + is(CohereEmbeddingType.BYTE) + ); + } + + public void testTranslateToVersion_ReturnsFloat_WhenVersionOnByteEnumAddition_WhenSpecifyingFloat() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.FLOAT, TransportVersions.ML_INFERENCE_EMBEDDING_BYTE_ADDED), + is(CohereEmbeddingType.FLOAT) + ); + } + + public void testFromElementType_CovertsFloatToCohereEmbeddingTypeFloat() { + assertThat(CohereEmbeddingType.fromElementType(DenseVectorFieldMapper.ElementType.FLOAT), is(CohereEmbeddingType.FLOAT)); + } + + public void testFromElementType_CovertsByteToCohereEmbeddingTypeByte() { + assertThat(CohereEmbeddingType.fromElementType(DenseVectorFieldMapper.ElementType.BYTE), is(CohereEmbeddingType.BYTE)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java index ec36ac5ce58d5..b80414adce8c8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java @@ -18,6 +18,7 @@ import org.hamcrest.MatcherAssert; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap; import static org.hamcrest.Matchers.is; @@ -218,7 +219,30 @@ public static CohereEmbeddingsModel createModel( "service", new CohereEmbeddingsServiceSettings( new CohereServiceSettings(url, SimilarityMeasure.DOT_PRODUCT, dimensions, tokenLimit, model), - embeddingType + Objects.requireNonNullElse(embeddingType, CohereEmbeddingType.FLOAT) + ), + taskSettings, + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } + + public static CohereEmbeddingsModel createModel( + String url, + String apiKey, + CohereEmbeddingsTaskSettings taskSettings, + @Nullable Integer tokenLimit, + @Nullable Integer dimensions, + @Nullable String model, + @Nullable CohereEmbeddingType embeddingType, + @Nullable SimilarityMeasure similarityMeasure + ) { + return new CohereEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new CohereEmbeddingsServiceSettings( + new CohereServiceSettings(url, similarityMeasure, dimensions, tokenLimit, model), + Objects.requireNonNullElse(embeddingType, CohereEmbeddingType.FLOAT) ), taskSettings, new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index 41906cca15fe9..f6419c9405e4b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; @@ -37,7 +38,7 @@ public class CohereEmbeddingsServiceSettingsTests extends AbstractWireSerializingTestCase { public static CohereEmbeddingsServiceSettings createRandom() { var commonSettings = CohereServiceSettingsTests.createRandom(); - var embeddingType = randomBoolean() ? randomFrom(CohereEmbeddingType.values()) : null; + var embeddingType = randomFrom(CohereEmbeddingType.values()); return new CohereEmbeddingsServiceSettings(commonSettings, embeddingType); } @@ -62,7 +63,7 @@ public void testFromMap() { CohereServiceSettings.OLD_MODEL_ID_FIELD, model, CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, - CohereEmbeddingType.INT8.toString() + DenseVectorFieldMapper.ElementType.BYTE.toString() ) ), ConfigurationParseContext.PERSISTENT @@ -73,7 +74,7 @@ public void testFromMap() { is( new CohereEmbeddingsServiceSettings( new CohereServiceSettings(ServiceUtils.createUri(url), SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens, model), - CohereEmbeddingType.INT8 + CohereEmbeddingType.BYTE ) ) ); @@ -102,7 +103,7 @@ public void testFromMap_WithModelId() { CohereEmbeddingType.INT8.toString() ) ), - ConfigurationParseContext.PERSISTENT + ConfigurationParseContext.REQUEST ); MatcherAssert.assertThat( @@ -138,7 +139,7 @@ public void testFromMap_PrefersModelId_OverModel() { CohereServiceSettings.MODEL_ID, model, CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, - CohereEmbeddingType.INT8.toString() + CohereEmbeddingType.BYTE.toString() ) ), ConfigurationParseContext.PERSISTENT @@ -149,15 +150,15 @@ public void testFromMap_PrefersModelId_OverModel() { is( new CohereEmbeddingsServiceSettings( new CohereServiceSettings(ServiceUtils.createUri(url), SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens, model), - CohereEmbeddingType.INT8 + CohereEmbeddingType.BYTE ) ) ); } - public void testFromMap_MissingEmbeddingType_DoesNotThrowException() { + public void testFromMap_MissingEmbeddingType_DefaultsToFloat() { var serviceSettings = CohereEmbeddingsServiceSettings.fromMap(new HashMap<>(Map.of()), ConfigurationParseContext.PERSISTENT); - assertNull(serviceSettings.getEmbeddingType()); + assertThat(serviceSettings.getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); } public void testFromMap_EmptyEmbeddingType_ThrowsError() { @@ -180,12 +181,12 @@ public void testFromMap_EmptyEmbeddingType_ThrowsError() { ); } - public void testFromMap_InvalidEmbeddingType_ThrowsError() { + public void testFromMap_InvalidEmbeddingType_ThrowsError_ForRequest() { var thrownException = expectThrows( ValidationException.class, () -> CohereEmbeddingsServiceSettings.fromMap( new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, "abc")), - ConfigurationParseContext.PERSISTENT + ConfigurationParseContext.REQUEST ) ); @@ -193,17 +194,18 @@ public void testFromMap_InvalidEmbeddingType_ThrowsError() { thrownException.getMessage(), is( Strings.format( - "Validation Failed: 1: [service_settings] Invalid value [abc] received. [embedding_type] must be one of [float, int8];" + "Validation Failed: 1: [service_settings] Invalid value [abc] received. " + + "[embedding_type] must be one of [byte, float, int8];" ) ) ); } - public void testFromMap_InvalidEmbeddingType_ThrowsError_WhenByteFromPersistedConfig() { + public void testFromMap_InvalidEmbeddingType_ThrowsError_ForPersistent() { var thrownException = expectThrows( ValidationException.class, () -> CohereEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, CohereEmbeddingsServiceSettings.EMBEDDING_TYPE_BYTE)), + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, "abc")), ConfigurationParseContext.PERSISTENT ) ); @@ -212,7 +214,8 @@ public void testFromMap_InvalidEmbeddingType_ThrowsError_WhenByteFromPersistedCo thrownException.getMessage(), is( Strings.format( - "Validation Failed: 1: [service_settings] Invalid value [byte] received. [embedding_type] must be one of [float, int8];" + "Validation Failed: 1: [service_settings] Invalid value [abc] received. " + + "[embedding_type] must be one of [byte, float];" ) ) ); @@ -233,10 +236,35 @@ public void testFromMap_ReturnsFailure_WhenEmbeddingTypesAreNotValid() { ); } - public void testFromMap_ConvertsCohereEmbeddingType_FromByteToInt8() { + public void testFromMap_ConvertsElementTypeByte_ToCohereEmbeddingTypeByte() { + assertThat( + CohereEmbeddingsServiceSettings.fromMap( + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, DenseVectorFieldMapper.ElementType.BYTE.toString())), + ConfigurationParseContext.PERSISTENT + ), + is(new CohereEmbeddingsServiceSettings(new CohereServiceSettings((URI) null, null, null, null, null), CohereEmbeddingType.BYTE)) + ); + } + + public void testFromMap_ConvertsElementTypeFloat_ToCohereEmbeddingTypeFloat() { + assertThat( + CohereEmbeddingsServiceSettings.fromMap( + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, DenseVectorFieldMapper.ElementType.FLOAT.toString())), + ConfigurationParseContext.PERSISTENT + ), + is( + new CohereEmbeddingsServiceSettings( + new CohereServiceSettings((URI) null, null, null, null, null), + CohereEmbeddingType.FLOAT + ) + ) + ); + } + + public void testFromMap_ConvertsInt8_ToCohereEmbeddingTypeInt8() { assertThat( CohereEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, CohereEmbeddingsServiceSettings.EMBEDDING_TYPE_BYTE)), + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, CohereEmbeddingType.INT8.toString())), ConfigurationParseContext.REQUEST ), is(new CohereEmbeddingsServiceSettings(new CohereServiceSettings((URI) null, null, null, null, null), CohereEmbeddingType.INT8)) @@ -281,11 +309,7 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry(entries); } - public static Map getServiceSettingsMap( - @Nullable String url, - @Nullable String model, - @Nullable CohereEmbeddingType embeddingType - ) { + public static Map getServiceSettingsMap(@Nullable String url, @Nullable String model, @Nullable Enum embeddingType) { var map = new HashMap<>(CohereServiceSettingsTests.getServiceSettingsMap(url, model)); if (embeddingType != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 4f0deaceb17da..80ceb855f9e94 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -19,13 +19,18 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResultsTests; +import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; +import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.util.ArrayList; @@ -36,7 +41,10 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.any; @@ -229,16 +237,17 @@ public void testParsePersistedConfig() { settings.put( ModelConfigurations.SERVICE_SETTINGS, new HashMap<>( - Map.of(ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, 1, ElasticsearchInternalServiceSettings.NUM_THREADS, 4) + Map.of( + ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElasticsearchInternalServiceSettings.NUM_THREADS, + 4, + ServiceFields.SIMILARITY, + SimilarityMeasure.L2_NORM.toString() + ) ) ); - var e5ServiceSettings = new MultilingualE5SmallInternalServiceSettings( - 1, - 4, - ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID - ); - expectThrows(IllegalArgumentException.class, () -> service.parsePersistedConfig(randomInferenceEntityId, taskType, settings)); } @@ -284,7 +293,9 @@ public void testParsePersistedConfig() { ElasticsearchInternalServiceSettings.NUM_THREADS, 4, InternalServiceSettings.MODEL_ID, - ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID + ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID, + ServiceFields.DIMENSIONS, + 1 ) ) ); @@ -346,6 +357,7 @@ public void testChunkInfer() { var mlTrainedModelResults = new ArrayList(); mlTrainedModelResults.add(ChunkedTextEmbeddingResultsTests.createRandomResults()); mlTrainedModelResults.add(ChunkedTextEmbeddingResultsTests.createRandomResults()); + mlTrainedModelResults.add(new ErrorInferenceResults(new RuntimeException("boom"))); var response = new InferTrainedModelDeploymentAction.Response(mlTrainedModelResults); ThreadPool threadpool = new TestThreadPool("test"); @@ -372,7 +384,7 @@ public void testChunkInfer() { var gotResults = new AtomicBoolean(); var resultsListener = ActionListener.>wrap(chunkedResponse -> { - assertThat(chunkedResponse, hasSize(2)); + assertThat(chunkedResponse, hasSize(3)); assertThat(chunkedResponse.get(0), instanceOf(ChunkedTextEmbeddingResults.class)); var result1 = (ChunkedTextEmbeddingResults) chunkedResponse.get(0); assertEquals( @@ -385,6 +397,9 @@ public void testChunkInfer() { ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults) mlTrainedModelResults.get(1)).getChunks(), result2.getChunks() ); + var result3 = (ErrorChunkedInferenceResults) chunkedResponse.get(2); + assertThat(result3.getException(), instanceOf(RuntimeException.class)); + assertThat(result3.getException().getMessage(), containsString("boom")); gotResults.set(true); }, ESTestCase::fail); @@ -403,6 +418,63 @@ public void testChunkInfer() { assertTrue("Listener not called", gotResults.get()); } + @SuppressWarnings("unchecked") + public void testChunkInferSetsTokenization() { + var expectedSpan = new AtomicInteger(); + var expectedWindowSize = new AtomicReference(); + + Client client = mock(Client.class); + ThreadPool threadpool = new TestThreadPool("test"); + try { + when(client.threadPool()).thenReturn(threadpool); + doAnswer(invocationOnMock -> { + var request = (InferTrainedModelDeploymentAction.Request) invocationOnMock.getArguments()[1]; + assertThat(request.getUpdate(), instanceOf(TokenizationConfigUpdate.class)); + var update = (TokenizationConfigUpdate) request.getUpdate(); + assertEquals(update.getSpanSettings().span(), expectedSpan.get()); + assertEquals(update.getSpanSettings().maxSequenceLength(), expectedWindowSize.get()); + return null; + }).when(client) + .execute( + same(InferTrainedModelDeploymentAction.INSTANCE), + any(InferTrainedModelDeploymentAction.Request.class), + any(ActionListener.class) + ); + + var model = new MultilingualE5SmallModel( + "foo", + TaskType.TEXT_EMBEDDING, + "e5", + new MultilingualE5SmallInternalServiceSettings(1, 1, "cross-platform") + ); + var service = createService(client); + + expectedSpan.set(-1); + expectedWindowSize.set(null); + service.chunkedInfer( + model, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + null, + ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) + ); + + expectedSpan.set(-1); + expectedWindowSize.set(256); + service.chunkedInfer( + model, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + new ChunkingOptions(256, null), + ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) + ); + } finally { + terminate(threadpool); + } + } + private ElasticsearchInternalService createService(Client client) { var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); return new ElasticsearchInternalService(context); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java index 6da634afddeb0..dbb50260edaf1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java @@ -23,8 +23,11 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResultsTests; +import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import java.util.ArrayList; import java.util.Collections; @@ -33,6 +36,8 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; @@ -330,6 +335,7 @@ public void testChunkInfer() { var mlTrainedModelResults = new ArrayList(); mlTrainedModelResults.add(ChunkedTextExpansionResultsTests.createRandomResults()); mlTrainedModelResults.add(ChunkedTextExpansionResultsTests.createRandomResults()); + mlTrainedModelResults.add(new ErrorInferenceResults(new RuntimeException("boom"))); var response = new InferTrainedModelDeploymentAction.Response(mlTrainedModelResults); ThreadPool threadpool = new TestThreadPool("test"); @@ -357,7 +363,7 @@ public void testChunkInfer() { var gotResults = new AtomicBoolean(); var resultsListener = ActionListener.>wrap(chunkedResponse -> { - assertThat(chunkedResponse, hasSize(2)); + assertThat(chunkedResponse, hasSize(3)); assertThat(chunkedResponse.get(0), instanceOf(ChunkedSparseEmbeddingResults.class)); var result1 = (ChunkedSparseEmbeddingResults) chunkedResponse.get(0); assertEquals( @@ -370,6 +376,9 @@ public void testChunkInfer() { ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults) mlTrainedModelResults.get(1)).getChunks(), result2.getChunkedResults() ); + var result3 = (ErrorChunkedInferenceResults) chunkedResponse.get(2); + assertThat(result3.getException(), instanceOf(RuntimeException.class)); + assertThat(result3.getException().getMessage(), containsString("boom")); gotResults.set(true); }, ESTestCase::fail); @@ -388,6 +397,64 @@ public void testChunkInfer() { assertTrue("Listener not called", gotResults.get()); } + @SuppressWarnings("unchecked") + public void testChunkInferSetsTokenization() { + var expectedSpan = new AtomicInteger(); + var expectedWindowSize = new AtomicReference(); + + ThreadPool threadpool = new TestThreadPool("test"); + Client client = mock(Client.class); + try { + when(client.threadPool()).thenReturn(threadpool); + doAnswer(invocationOnMock -> { + var request = (InferTrainedModelDeploymentAction.Request) invocationOnMock.getArguments()[1]; + assertThat(request.getUpdate(), instanceOf(TokenizationConfigUpdate.class)); + var update = (TokenizationConfigUpdate) request.getUpdate(); + assertEquals(update.getSpanSettings().span(), expectedSpan.get()); + assertEquals(update.getSpanSettings().maxSequenceLength(), expectedWindowSize.get()); + return null; + }).when(client) + .execute( + same(InferTrainedModelDeploymentAction.INSTANCE), + any(InferTrainedModelDeploymentAction.Request.class), + any(ActionListener.class) + ); + + var model = new ElserInternalModel( + "foo", + TaskType.SPARSE_EMBEDDING, + "elser", + new ElserInternalServiceSettings(1, 1, "elser"), + new ElserMlNodeTaskSettings() + ); + var service = createService(client); + + expectedSpan.set(-1); + expectedWindowSize.set(null); + service.chunkedInfer( + model, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + null, + ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) + ); + + expectedSpan.set(-1); + expectedWindowSize.set(256); + service.chunkedInfer( + model, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + new ChunkingOptions(256, null), + ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) + ); + } finally { + terminate(threadpool); + } + } + private ElserInternalService createService(Client client) { var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); return new ElserInternalService(context); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java index f32fafd493395..6fd56c30516be 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -117,7 +117,13 @@ public void testFromMap_InvalidSimilarity_ThrowsError() { () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.SIMILARITY, similarity))) ); - assertThat(thrownException.getMessage(), is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];")); + assertThat( + thrownException.getMessage(), + is( + "Validation Failed: 1: [service_settings] Invalid value [by_size] received. [similarity] " + + "must be one of [cosine, dot_product, l2_norm];" + ) + ); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index c4c49065cd79c..be18963cc72c1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; @@ -512,6 +513,59 @@ public void testCheckModelConfig_IncludesMaxTokens() throws IOException { } } + public void testCheckModelConfig_UsesUserSpecifiedSimilarity() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new HuggingFaceService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1, 2, SimilarityMeasure.COSINE); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var result = listener.actionGet(TIMEOUT); + assertThat( + result, + is(HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1, 1, SimilarityMeasure.COSINE)) + ); + } + } + + public void testCheckModelConfig_LeavesSimilarityAsNull_WhenUnspecified() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new HuggingFaceService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1, 2, null); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var result = listener.actionGet(TIMEOUT); + assertThat(result, is(HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1, 1, null))); + } + } + private HuggingFaceService createHuggingFaceService() { return new HuggingFaceService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java index cb37ccfead45d..7fcfd5ee46a90 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.inference.services.huggingface.embeddings; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; @@ -52,4 +54,20 @@ public static HuggingFaceEmbeddingsModel createModel(String url, String apiKey, new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } + + public static HuggingFaceEmbeddingsModel createModel( + String url, + String apiKey, + int tokenLimit, + int dimensions, + @Nullable SimilarityMeasure similarityMeasure + ) { + return new HuggingFaceEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new HuggingFaceServiceSettings(createUri(url), similarityMeasure, dimensions, tokenLimit), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index d819b2b243872..6cfcf974d3250 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests; import org.hamcrest.MatcherAssert; @@ -119,6 +120,41 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModel() throws IOExc } } + public void testParseRequestConfig_CreatesAnOpenAiChatCompletionsModel() throws IOException { + var url = "url"; + var organization = "org"; + var model = "model"; + var user = "user"; + var secret = "secret"; + + try (var service = createOpenAiService()) { + ActionListener modelVerificationListener = ActionListener.wrap(m -> { + assertThat(m, instanceOf(OpenAiChatCompletionModel.class)); + + var completionsModel = (OpenAiChatCompletionModel) m; + + assertThat(completionsModel.getServiceSettings().uri().toString(), is(url)); + assertThat(completionsModel.getServiceSettings().organizationId(), is(organization)); + assertThat(completionsModel.getServiceSettings().modelId(), is(model)); + assertThat(completionsModel.getTaskSettings().user(), is(user)); + assertThat(completionsModel.getSecretSettings().apiKey().toString(), is(secret)); + + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( + "id", + TaskType.COMPLETION, + getRequestConfigMap( + getServiceSettingsMap(model, url, organization), + getTaskSettingsMap(user), + getSecretSettingsMap(secret) + ), + Set.of(), + modelVerificationListener + ); + } + } + public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException { try (var service = createOpenAiService()) { ActionListener modelVerificationListener = ActionListener.wrap( @@ -244,6 +280,33 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlO } } + public void testParseRequestConfig_CreatesAnOpenAiChatCompletionsModelWithoutUserWithoutUserUrlOrganization() throws IOException { + var model = "model"; + var secret = "secret"; + + try (var service = createOpenAiService()) { + ActionListener modelVerificationListener = ActionListener.wrap(m -> { + assertThat(m, instanceOf(OpenAiChatCompletionModel.class)); + + var completionsModel = (OpenAiChatCompletionModel) m; + assertNull(completionsModel.getServiceSettings().uri()); + assertNull(completionsModel.getServiceSettings().organizationId()); + assertThat(completionsModel.getServiceSettings().modelId(), is(model)); + assertNull(completionsModel.getTaskSettings().user()); + assertThat(completionsModel.getSecretSettings().apiKey().toString(), is(secret)); + + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( + "id", + TaskType.COMPLETION, + getRequestConfigMap(getServiceSettingsMap(model, null, null), getTaskSettingsMap(null), getSecretSettingsMap(secret)), + Set.of(), + modelVerificationListener + ); + } + } + public void testParseRequestConfig_MovesModel() throws IOException { try (var service = createOpenAiService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { @@ -889,6 +952,118 @@ public void testCheckModelConfig_ReturnsModelWithSameDimensions_AndDocProductSet public void testCheckModelConfig_ReturnsNewModelReference_AndDoesNotSendDimensionsField_WhenNotSetByUser() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user", null, 100, 100, false); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var returnedModel = listener.actionGet(TIMEOUT); + assertThat( + returnedModel, + is( + OpenAiEmbeddingsModelTests.createModel( + getUrl(webServer), + "org", + "secret", + "model", + "user", + SimilarityMeasure.DOT_PRODUCT, + 100, + 2, + false + ) + ) + ); + + assertThat(webServer.requests(), hasSize(1)); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat(requestMap, Matchers.is(Map.of("input", List.of("how big"), "model", "model", "user", "user"))); + } + } + + public void testCheckModelConfig_ReturnsNewModelReference_SetsSimilarityToDocProduct_WhenNull() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user", null, 100, 100, false); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var returnedModel = listener.actionGet(TIMEOUT); + assertThat( + returnedModel, + is( + OpenAiEmbeddingsModelTests.createModel( + getUrl(webServer), + "org", + "secret", + "model", + "user", + SimilarityMeasure.DOT_PRODUCT, + 100, + 2, + false + ) + ) + ); + + assertThat(webServer.requests(), hasSize(1)); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat(requestMap, Matchers.is(Map.of("input", List.of("how big"), "model", "model", "user", "user"))); + } + } + + public void testCheckModelConfig_ReturnsNewModelReference_DoesNotOverrideSimilarity_WhenNotNull() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { String responseJson = """ @@ -937,7 +1112,7 @@ public void testCheckModelConfig_ReturnsNewModelReference_AndDoesNotSendDimensio "secret", "model", "user", - SimilarityMeasure.DOT_PRODUCT, + SimilarityMeasure.COSINE, 100, 2, false diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java new file mode 100644 index 0000000000000..efc1fcc921ef3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionRequestTaskSettingsTests.getChatCompletionRequestTaskSettingsMap; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class OpenAiChatCompletionModelTests extends ESTestCase { + + public void testOverrideWith_OverridesUser() { + var model = createChatCompletionModel("url", "org", "api_key", "model_name", null); + var requestTaskSettingsMap = getChatCompletionRequestTaskSettingsMap("user_override"); + + var overriddenModel = OpenAiChatCompletionModel.of(model, requestTaskSettingsMap); + + assertThat(overriddenModel, is(createChatCompletionModel("url", "org", "api_key", "model_name", "user_override"))); + } + + public void testOverrideWith_EmptyMap() { + var model = createChatCompletionModel("url", "org", "api_key", "model_name", null); + + var requestTaskSettingsMap = Map.of(); + + var overriddenModel = OpenAiChatCompletionModel.of(model, requestTaskSettingsMap); + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_NullMap() { + var model = createChatCompletionModel("url", "org", "api_key", "model_name", null); + + var overriddenModel = OpenAiChatCompletionModel.of(model, null); + assertThat(overriddenModel, sameInstance(model)); + } + + public static OpenAiChatCompletionModel createChatCompletionModel( + String url, + @Nullable String org, + String apiKey, + String modelName, + @Nullable String user + ) { + return new OpenAiChatCompletionModel( + "id", + TaskType.COMPLETION, + "service", + new OpenAiChatCompletionServiceSettings(modelName, url, org, null), + new OpenAiChatCompletionTaskSettings(user), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java new file mode 100644 index 0000000000000..6fbdd3bf622d3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class OpenAiChatCompletionRequestTaskSettingsTests extends ESTestCase { + + public void testFromMap_ReturnsEmptySettings_WhenTheMapIsEmpty() { + var settings = OpenAiChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertNull(settings.user()); + } + + public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() { + var settings = OpenAiChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "value"))); + assertNull(settings.user()); + } + + public void testFromMap_ReturnsUser() { + var settings = OpenAiChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); + assertThat(settings.user(), is("user")); + } + + public static Map getChatCompletionRequestTaskSettingsMap(@Nullable String user) { + var map = new HashMap(); + + if (user != null) { + map.put(OpenAiServiceFields.USER, user); + } + + return map; + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java new file mode 100644 index 0000000000000..ba2460f7bc09a --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java @@ -0,0 +1,194 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; + +import java.io.IOException; +import java.net.URI; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class OpenAiChatCompletionServiceSettingsTests extends AbstractWireSerializingTestCase { + + public void testFromMap_Request_CreatesSettingsCorrectly() { + var modelId = "some model"; + var url = "https://www.elastic.co"; + var org = "organization"; + var maxInputTokens = 8192; + + var serviceSettings = OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + ServiceFields.URL, + url, + OpenAiServiceFields.ORGANIZATION, + org, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens + ) + ) + ); + + assertThat(serviceSettings, is(new OpenAiChatCompletionServiceSettings(modelId, ServiceUtils.createUri(url), org, maxInputTokens))); + } + + public void testFromMap_MissingUrl_DoesNotThrowException() { + var modelId = "some model"; + var organization = "org"; + var maxInputTokens = 8192; + + var serviceSettings = OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + OpenAiServiceFields.ORGANIZATION, + organization, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens + ) + ) + ); + + assertNull(serviceSettings.uri()); + assertThat(serviceSettings.modelId(), is(modelId)); + assertThat(serviceSettings.organizationId(), is(organization)); + assertThat(serviceSettings.maxInputTokens(), is(maxInputTokens)); + } + + public void testFromMap_EmptyUrl_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> OpenAiChatCompletionServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, "", ServiceFields.MODEL_ID, "model"))) + ); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", + ServiceFields.URL + ) + ) + ); + } + + public void testFromMap_MissingOrganization_DoesNotThrowException() { + var modelId = "some model"; + var maxInputTokens = 8192; + + var serviceSettings = OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId, ServiceFields.MAX_INPUT_TOKENS, maxInputTokens)) + ); + + assertNull(serviceSettings.uri()); + assertThat(serviceSettings.modelId(), is(modelId)); + assertThat(serviceSettings.maxInputTokens(), is(maxInputTokens)); + } + + public void testFromMap_EmptyOrganization_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>(Map.of(OpenAiServiceFields.ORGANIZATION, "", ServiceFields.MODEL_ID, "model")) + ) + ); + + assertThat( + thrownException.getMessage(), + containsString( + org.elasticsearch.common.Strings.format( + "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", + OpenAiServiceFields.ORGANIZATION + ) + ) + ); + } + + public void testFromMap_InvalidUrl_ThrowsError() { + var url = "https://www.abc^.com"; + var thrownException = expectThrows( + ValidationException.class, + () -> OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.MODEL_ID, "model")) + ) + ); + + assertThat( + thrownException.getMessage(), + is(Strings.format("Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", url, ServiceFields.URL)) + ); + } + + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new OpenAiChatCompletionServiceSettings("model", "url", "org", 1024); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"model_id":"model","url":"url","organization_id":"org","max_input_tokens":1024}""")); + } + + public void testToXContent_DoesNotWriteOptionalValues() throws IOException { + URI uri = null; + + var serviceSettings = new OpenAiChatCompletionServiceSettings("model", uri, null, null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"model_id":"model"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return OpenAiChatCompletionServiceSettings::new; + } + + @Override + protected OpenAiChatCompletionServiceSettings createTestInstance() { + return createRandomWithNonNullUrl(); + } + + @Override + protected OpenAiChatCompletionServiceSettings mutateInstance(OpenAiChatCompletionServiceSettings instance) throws IOException { + return createRandomWithNonNullUrl(); + } + + private static OpenAiChatCompletionServiceSettings createRandomWithNonNullUrl() { + return createRandom(randomAlphaOfLength(15)); + } + + private static OpenAiChatCompletionServiceSettings createRandom(String url) { + var modelId = randomAlphaOfLength(8); + var organizationId = randomFrom(randomAlphaOfLength(15), null); + var maxInputTokens = randomFrom(randomIntBetween(128, 4096), null); + + return new OpenAiChatCompletionServiceSettings(modelId, ServiceUtils.createUri(url), organizationId, maxInputTokens); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java new file mode 100644 index 0000000000000..f2bd26a4e6432 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class OpenAiChatCompletionTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static OpenAiChatCompletionTaskSettings createRandomWithUser() { + return new OpenAiChatCompletionTaskSettings(randomAlphaOfLength(15)); + } + + public void testFromMap_WithUser() { + assertEquals( + new OpenAiChatCompletionTaskSettings("user"), + OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))) + ); + } + + public void testFromMap_UserIsEmptyString() { + var thrownException = expectThrows( + ValidationException.class, + () -> OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, ""))) + ); + + assertThat( + thrownException.getMessage(), + is(Strings.format("Validation Failed: 1: [task_settings] Invalid value empty string. [user] must be a non-empty string;")) + ); + } + + public void testFromMap_MissingUser_DoesNotThrowException() { + var taskSettings = OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of())); + assertNull(taskSettings.user()); + } + + public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { + var taskSettings = OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); + + var overriddenTaskSettings = OpenAiChatCompletionTaskSettings.of( + taskSettings, + OpenAiChatCompletionRequestTaskSettings.EMPTY_SETTINGS + ); + assertThat(overriddenTaskSettings, is(taskSettings)); + } + + public void testOverrideWith_UsesOverriddenSettings() { + var taskSettings = OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); + + var requestTaskSettings = OpenAiChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user2"))); + + var overriddenTaskSettings = OpenAiChatCompletionTaskSettings.of(taskSettings, requestTaskSettings); + assertThat(overriddenTaskSettings, is(new OpenAiChatCompletionTaskSettings("user2"))); + } + + @Override + protected Writeable.Reader instanceReader() { + return OpenAiChatCompletionTaskSettings::new; + } + + @Override + protected OpenAiChatCompletionTaskSettings createTestInstance() { + return createRandomWithUser(); + } + + @Override + protected OpenAiChatCompletionTaskSettings mutateInstance(OpenAiChatCompletionTaskSettings instance) throws IOException { + return createRandomWithUser(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java index 5a39fcb61ff0a..c95853e2d0128 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import java.util.HashMap; import java.util.Map; @@ -27,7 +28,7 @@ public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() } public void testFromMap_ReturnsUser() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user"))); + var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); assertThat(settings.user(), is("user")); } @@ -35,7 +36,7 @@ public static Map getRequestTaskSettingsMap(@Nullable String use var map = new HashMap(); if (user != null) { - map.put(OpenAiEmbeddingsTaskSettings.USER, user); + map.put(OpenAiServiceFields.USER, user); } return map; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java index 00cea6dc6ed21..0ada6d96195e6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import org.hamcrest.CoreMatchers; import java.io.IOException; @@ -79,7 +80,7 @@ public void testFromMap_Request_CreatesSettingsCorrectly() { modelId, ServiceFields.URL, url, - OpenAiEmbeddingsServiceSettings.ORGANIZATION, + OpenAiServiceFields.ORGANIZATION, org, ServiceFields.SIMILARITY, similarity, @@ -121,7 +122,7 @@ public void testFromMap_Request_DimensionsSetByUser_IsFalse_WhenDimensionsAreNot modelId, ServiceFields.URL, url, - OpenAiEmbeddingsServiceSettings.ORGANIZATION, + OpenAiServiceFields.ORGANIZATION, org, ServiceFields.SIMILARITY, similarity, @@ -162,7 +163,7 @@ public void testFromMap_Persistent_CreatesSettingsCorrectly() { modelId, ServiceFields.URL, url, - OpenAiEmbeddingsServiceSettings.ORGANIZATION, + OpenAiServiceFields.ORGANIZATION, org, ServiceFields.SIMILARITY, similarity, @@ -219,7 +220,7 @@ public void testFromMap_PersistentContext_ThrowsException_WhenDimensionsSetByUse public void testFromMap_MissingUrl_DoesNotThrowException() { var serviceSettings = OpenAiEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(ServiceFields.MODEL_ID, "m", OpenAiEmbeddingsServiceSettings.ORGANIZATION, "org")), + new HashMap<>(Map.of(ServiceFields.MODEL_ID, "m", OpenAiServiceFields.ORGANIZATION, "org")), ConfigurationParseContext.REQUEST ); assertNull(serviceSettings.uri()); @@ -260,7 +261,7 @@ public void testFromMap_EmptyOrganization_ThrowsError() { var thrownException = expectThrows( ValidationException.class, () -> OpenAiEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsServiceSettings.ORGANIZATION, "", ServiceFields.MODEL_ID, "m")), + new HashMap<>(Map.of(OpenAiServiceFields.ORGANIZATION, "", ServiceFields.MODEL_ID, "m")), ConfigurationParseContext.REQUEST ) ); @@ -270,7 +271,7 @@ public void testFromMap_EmptyOrganization_ThrowsError() { containsString( Strings.format( "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", - OpenAiEmbeddingsServiceSettings.ORGANIZATION + OpenAiServiceFields.ORGANIZATION ) ) ); @@ -302,7 +303,13 @@ public void testFromMap_InvalidSimilarity_ThrowsError() { ) ); - assertThat(thrownException.getMessage(), is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];")); + assertThat( + thrownException.getMessage(), + is( + "Validation Failed: 1: [service_settings] Invalid value [by_size] received. [similarity] " + + "must be one of [cosine, dot_product, l2_norm];" + ) + ); } public void testToXContent_WritesDimensionsSetByUserTrue() throws IOException { @@ -375,7 +382,7 @@ public static Map getServiceSettingsMap(String modelId, @Nullabl } if (org != null) { - map.put(OpenAiEmbeddingsServiceSettings.ORGANIZATION, org); + map.put(OpenAiServiceFields.ORGANIZATION, org); } return map; } @@ -395,7 +402,7 @@ public static Map getServiceSettingsMap( } if (org != null) { - map.put(OpenAiEmbeddingsServiceSettings.ORGANIZATION, org); + map.put(OpenAiServiceFields.ORGANIZATION, org); } if (dimensions != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java index 6448b66d11cf3..c5a510ef9de0c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -38,10 +39,7 @@ public static OpenAiEmbeddingsTaskSettings createRandom() { public void testFromMap_WithUser() { assertEquals( new OpenAiEmbeddingsTaskSettings("user"), - OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user")), - ConfigurationParseContext.REQUEST - ) + OpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user")), ConfigurationParseContext.REQUEST) ); } @@ -49,7 +47,7 @@ public void testFromMap_UserIsEmptyString() { var thrownException = expectThrows( ValidationException.class, () -> OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "")), + new HashMap<>(Map.of(OpenAiServiceFields.USER, "")), ConfigurationParseContext.REQUEST ) ); @@ -67,7 +65,7 @@ public void testFromMap_MissingUser_DoesNotThrowException() { public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user")), + new HashMap<>(Map.of(OpenAiServiceFields.USER, "user")), ConfigurationParseContext.PERSISTENT ); @@ -77,13 +75,11 @@ public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { public void testOverrideWith_UsesOverriddenSettings() { var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user")), + new HashMap<>(Map.of(OpenAiServiceFields.USER, "user")), ConfigurationParseContext.PERSISTENT ); - var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user2")) - ); + var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user2"))); var overriddenTaskSettings = OpenAiEmbeddingsTaskSettings.of(taskSettings, requestTaskSettings); MatcherAssert.assertThat(overriddenTaskSettings, is(new OpenAiEmbeddingsTaskSettings("user2"))); @@ -108,7 +104,7 @@ public static Map getTaskSettingsMap(@Nullable String user) { var map = new HashMap(); if (user != null) { - map.put(OpenAiEmbeddingsTaskSettings.USER, user); + map.put(OpenAiServiceFields.USER, user); } return map; diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 52424956ef53e..e0ce1f92b2a37 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -76,7 +76,7 @@ private static UnsignedLongFieldMapper toType(FieldMapper in) { return (UnsignedLongFieldMapper) in; } - public static final class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.DimensionBuilder { private final Parameter indexed; private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); private final Parameter stored = Parameter.storeParam(m -> toType(m).stored, false); @@ -195,7 +195,7 @@ Number parsedNullValue() { @Override public UnsignedLongFieldMapper build(MapperBuilderContext context) { - if (context.parentObjectContainsDimensions()) { + if (inheritDimensionParameterFromParentObject(context)) { dimension.setValue(true); } UnsignedLongFieldType fieldType = new UnsignedLongFieldType( @@ -539,9 +539,7 @@ static Long parseUpperRangeTerm(Object value, boolean include) { return longValue; } - /** - * @return true if field has been marked as a dimension field - */ + @Override public boolean isDimension() { return isDimension; } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index 7a96bec42eb7b..055561c747a63 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -15,6 +15,8 @@ dependencies { javaRestTestImplementation project(path: xpackModule('monitoring')) javaRestTestImplementation project(path: xpackModule('transform')) javaRestTestImplementation project(path: xpackModule('rank-rrf')) + javaRestTestImplementation project(path: xpackModule('ql')) + javaRestTestImplementation project(path: xpackModule('esql')) } // location for keys and certificates diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index f9213a7fcaeb8..c4640bc17845a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -90,9 +90,11 @@ import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.authc.TokenMetadata; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.ilm.IndexLifecycle; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.autoscaling.MlScalingReason; +import org.elasticsearch.xpack.ql.plugin.QlPlugin; import org.elasticsearch.xpack.slm.SnapshotLifecycle; import org.elasticsearch.xpack.slm.history.SnapshotLifecycleTemplateRegistry; import org.elasticsearch.xpack.transform.Transform; @@ -154,7 +156,10 @@ protected Collection> nodePlugins() { SnapshotLifecycle.class, // The feature reset API touches transform custom cluster state so we need this plugin to understand it Transform.class, - DataStreamsPlugin.class + DataStreamsPlugin.class, + // ESQL and its dependency needed for node features + QlPlugin.class, + EsqlPlugin.class ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java index 76321608ba4fb..dc04d65103f5f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestParameters; @@ -17,7 +18,8 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; @@ -26,8 +28,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilder; @@ -37,6 +37,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher; import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; @@ -53,6 +54,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceStats; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModelSizeStats; import org.elasticsearch.xpack.core.ml.utils.TransportVersionUtils; +import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelDefinitionDoc; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; @@ -65,6 +67,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -73,7 +76,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.ml.utils.InferenceProcessorInfoExtractor.pipelineIdsByResource; -public class TransportGetTrainedModelsStatsAction extends HandledTransportAction< +public class TransportGetTrainedModelsStatsAction extends TransportAction< GetTrainedModelsStatsAction.Request, GetTrainedModelsStatsAction.Response> { @@ -82,25 +85,22 @@ public class TransportGetTrainedModelsStatsAction extends HandledTransportAction private final Client client; private final ClusterService clusterService; private final TrainedModelProvider trainedModelProvider; + private final Executor executor; @Inject public TransportGetTrainedModelsStatsAction( TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, + ThreadPool threadPool, TrainedModelProvider trainedModelProvider, Client client ) { - super( - GetTrainedModelsStatsAction.NAME, - transportService, - actionFilters, - GetTrainedModelsStatsAction.Request::new, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); + super(GetTrainedModelsStatsAction.NAME, actionFilters, transportService.getTaskManager()); this.client = client; this.clusterService = clusterService; this.trainedModelProvider = trainedModelProvider; + this.executor = threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME); } @Override @@ -108,6 +108,15 @@ protected void doExecute( Task task, GetTrainedModelsStatsAction.Request request, ActionListener listener + ) { + // workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can + executor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(task, request, l))); + } + + protected void doExecuteForked( + Task task, + GetTrainedModelsStatsAction.Request request, + ActionListener listener ) { final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); final ModelAliasMetadata modelAliasMetadata = ModelAliasMetadata.fromState(clusterService.state()); @@ -116,101 +125,108 @@ protected void doExecute( GetTrainedModelsStatsAction.Response.Builder responseBuilder = new GetTrainedModelsStatsAction.Response.Builder(); - ListenableFuture> modelSizeStatsListener = new ListenableFuture<>(); - modelSizeStatsListener.addListener(listener.delegateFailureAndWrap((l, modelSizeStatsByModelId) -> { - responseBuilder.setModelSizeStatsByModelId(modelSizeStatsByModelId); - l.onResponse( - responseBuilder.build(modelToDeployments(responseBuilder.getExpandedModelIdsWithAliases().keySet(), assignmentMetadata)) - ); - })); - - ListenableFuture deploymentStatsListener = new ListenableFuture<>(); - deploymentStatsListener.addListener(listener.delegateFailureAndWrap((delegate, deploymentStats) -> { - // deployment stats for each matching deployment - // not necessarily for all models - responseBuilder.setDeploymentStatsByDeploymentId( - deploymentStats.getStats() - .results() + SubscribableListener + + .>>>newForked(l -> { + // When the request resource is a deployment find the model used in that deployment for the model stats + final var idExpression = addModelsUsedInMatchingDeployments(request.getResourceId(), assignmentMetadata); + + logger.debug("Expanded models/deployment Ids request [{}]", idExpression); + + // the request id may contain deployment ids + // It is not an error if these don't match a model id but + // they need to be included in case the deployment id is also + // a model id. Hence, the `matchedDeploymentIds` parameter + trainedModelProvider.expandIds( + idExpression, + request.isAllowNoResources(), + request.getPageParams(), + Collections.emptySet(), + modelAliasMetadata, + parentTaskId, + matchedDeploymentIds, + l + ); + }) + .andThenAccept(tuple -> responseBuilder.setExpandedModelIdsWithAliases(tuple.v2()).setTotalModelCount(tuple.v1())) + + .andThen( + (l, ignored) -> executeAsyncWithOrigin( + client, + ML_ORIGIN, + TransportNodesStatsAction.TYPE, + nodeStatsRequest(clusterService.state(), parentTaskId), + l + ) + ) + .>andThen(executor, null, (l, nodesStatsResponse) -> { + // find all pipelines whether using the model id, alias or deployment id. + Set allPossiblePipelineReferences = responseBuilder.getExpandedModelIdsWithAliases() + .entrySet() .stream() - .collect(Collectors.toMap(AssignmentStats::getDeploymentId, Function.identity())) - ); + .flatMap(entry -> Stream.concat(entry.getValue().stream(), Stream.of(entry.getKey()))) + .collect(Collectors.toSet()); + allPossiblePipelineReferences.addAll(matchedDeploymentIds); - int numberOfAllocations = deploymentStats.getStats().results().stream().mapToInt(AssignmentStats::getNumberOfAllocations).sum(); - modelSizeStats( - responseBuilder.getExpandedModelIdsWithAliases(), - request.isAllowNoResources(), - parentTaskId, - modelSizeStatsListener, - numberOfAllocations - ); - })); - - ListenableFuture> inferenceStatsListener = new ListenableFuture<>(); - // inference stats are per model and are only - // persisted for boosted tree models - inferenceStatsListener.addListener(listener.delegateFailureAndWrap((l, inferenceStats) -> { - responseBuilder.setInferenceStatsByModelId( - inferenceStats.stream().collect(Collectors.toMap(InferenceStats::getModelId, Function.identity())) - ); - getDeploymentStats(client, request.getResourceId(), parentTaskId, assignmentMetadata, deploymentStatsListener); - })); - - ListenableFuture nodesStatsListener = new ListenableFuture<>(); - nodesStatsListener.addListener(listener.delegateFailureAndWrap((delegate, nodesStatsResponse) -> { - // find all pipelines whether using the model id, - // alias or deployment id. - Set allPossiblePipelineReferences = responseBuilder.getExpandedModelIdsWithAliases() - .entrySet() - .stream() - .flatMap(entry -> Stream.concat(entry.getValue().stream(), Stream.of(entry.getKey()))) - .collect(Collectors.toSet()); - allPossiblePipelineReferences.addAll(matchedDeploymentIds); - - Map> pipelineIdsByResource = pipelineIdsByResource(clusterService.state(), allPossiblePipelineReferences); - Map modelIdIngestStats = inferenceIngestStatsByModelId( - nodesStatsResponse, - modelAliasMetadata, - pipelineIdsByResource - ); - responseBuilder.setIngestStatsByModelId(modelIdIngestStats); - trainedModelProvider.getInferenceStats( - responseBuilder.getExpandedModelIdsWithAliases().keySet().toArray(new String[0]), - parentTaskId, - inferenceStatsListener - ); - })); - - ListenableFuture>>> idsListener = new ListenableFuture<>(); - idsListener.addListener(listener.delegateFailureAndWrap((delegate, tuple) -> { - responseBuilder.setExpandedModelIdsWithAliases(tuple.v2()).setTotalModelCount(tuple.v1()); - executeAsyncWithOrigin( - client, - ML_ORIGIN, - TransportNodesStatsAction.TYPE, - nodeStatsRequest(clusterService.state(), parentTaskId), - nodesStatsListener - ); - })); - - // When the request resource is a deployment find the - // model used in that deployment for the model stats - String idExpression = addModelsUsedInMatchingDeployments(request.getResourceId(), assignmentMetadata); - logger.debug("Expanded models/deployment Ids request [{}]", idExpression); - - // the request id may contain deployment ids - // It is not an error if these don't match a model id but - // they need to be included in case the deployment id is also - // a model id. Hence, the `matchedDeploymentIds` parameter - trainedModelProvider.expandIds( - idExpression, - request.isAllowNoResources(), - request.getPageParams(), - Collections.emptySet(), - modelAliasMetadata, - parentTaskId, - matchedDeploymentIds, - idsListener - ); + Map> pipelineIdsByResource = pipelineIdsByResource( + clusterService.state(), + allPossiblePipelineReferences + ); + Map modelIdIngestStats = inferenceIngestStatsByModelId( + nodesStatsResponse, + modelAliasMetadata, + pipelineIdsByResource + ); + responseBuilder.setIngestStatsByModelId(modelIdIngestStats); + trainedModelProvider.getInferenceStats( + responseBuilder.getExpandedModelIdsWithAliases().keySet().toArray(new String[0]), + parentTaskId, + l + ); + }) + .andThenAccept( + // inference stats are per model and are only persisted for boosted tree models + inferenceStats -> responseBuilder.setInferenceStatsByModelId( + inferenceStats.stream().collect(Collectors.toMap(InferenceStats::getModelId, Function.identity())) + ) + ) + + .andThen( + executor, + null, + (l, ignored) -> getDeploymentStats(client, request.getResourceId(), parentTaskId, assignmentMetadata, l) + ) + .andThenApply(deploymentStats -> { + // deployment stats for each matching deployment not necessarily for all models + responseBuilder.setDeploymentStatsByDeploymentId( + deploymentStats.getStats() + .results() + .stream() + .collect(Collectors.toMap(AssignmentStats::getDeploymentId, Function.identity())) + ); + return deploymentStats.getStats().results().stream().mapToInt(AssignmentStats::getNumberOfAllocations).sum(); + }) + + .>andThen( + executor, + null, + (l, numberOfAllocations) -> modelSizeStats( + responseBuilder.getExpandedModelIdsWithAliases(), + request.isAllowNoResources(), + parentTaskId, + l, + numberOfAllocations + ) + ) + .andThenAccept(responseBuilder::setModelSizeStatsByModelId) + + .andThenApply( + ignored -> responseBuilder.build( + modelToDeployments(responseBuilder.getExpandedModelIdsWithAliases().keySet(), assignmentMetadata) + ) + ) + + .addListener(listener, executor, null); } static String addModelsUsedInMatchingDeployments(String idExpression, TrainedModelAssignmentMetadata assignmentMetadata) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java index 650c02af00837..faef29ff65070 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java @@ -93,13 +93,13 @@ public ChangePointAggregator(String name, String bucketsPath, Map maybeBucketsValue = extractDoubleBucketedValues( + Optional maybeBucketValues = extractDoubleBucketedValues( bucketsPaths()[0], aggregations, BucketHelpers.GapPolicy.SKIP, true ); - if (maybeBucketsValue.isEmpty()) { + if (maybeBucketValues.isEmpty()) { return new InternalChangePointAggregation( name(), metadata(), @@ -107,7 +107,7 @@ public InternalAggregation doReduce(InternalAggregations aggregations, Aggregati new ChangeType.Indeterminable("unable to find valid bucket values in bucket path [" + bucketsPaths()[0] + "]") ); } - MlAggsHelper.DoubleBucketValues bucketValues = maybeBucketsValue.get(); + MlAggsHelper.DoubleBucketValues bucketValues = maybeBucketValues.get(); if (bucketValues.getValues().length < (2 * MINIMUM_BUCKETS) + 2) { return new InternalChangePointAggregation( name(), @@ -146,7 +146,7 @@ public InternalAggregation doReduce(InternalAggregations aggregations, Aggregati static ChangeType testForSpikeOrDip(MlAggsHelper.DoubleBucketValues bucketValues, double pValueThreshold) { try { SpikeAndDipDetector detect = new SpikeAndDipDetector(bucketValues.getValues()); - ChangeType result = detect.at(pValueThreshold); + ChangeType result = detect.at(pValueThreshold, bucketValues); logger.trace("spike or dip p-value: [{}]", result.pValue()); return result; } catch (NotStrictlyPositiveException nspe) { @@ -552,7 +552,7 @@ ChangeType changeType(MlAggsHelper.DoubleBucketValues bucketValues, double slope case TREND_CHANGE: return new ChangeType.TrendChange(pValueVsStationary(), rSquared(), bucketValues.getBucketIndex(changePoint)); case DISTRIBUTION_CHANGE: - return new ChangeType.DistributionChange(pValue, changePoint); + return new ChangeType.DistributionChange(pValue, bucketValues.getBucketIndex(changePoint)); } throw new RuntimeException("Unknown change type [" + type + "]."); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetector.java index 8bbd793637ab3..b628ea3324cf1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetector.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.ml.aggs.changepoint; +import org.elasticsearch.xpack.ml.aggs.MlAggsHelper; + import java.util.Arrays; /** @@ -133,29 +135,29 @@ private double[] removeIf(ExcludedPredicate should, double[] values) { spikeTestKDE = new KDE(spikeKDEValues, 1.36); } - ChangeType at(double pValueThreshold) { + ChangeType at(double pValueThreshold, MlAggsHelper.DoubleBucketValues bucketValues) { if (dipIndex == -1 || spikeIndex == -1) { return new ChangeType.Indeterminable( "not enough buckets to check for dip or spike. Requires at least [3]; found [" + numValues + "]" ); } - KDE.ValueAndMagnitude dipLeftLeftTailTest = dipTestKDE.cdf(dipValue); + KDE.ValueAndMagnitude dipLeftTailTest = dipTestKDE.cdf(dipValue); KDE.ValueAndMagnitude spikeRightTailTest = spikeTestKDE.sf(spikeValue); - double dipPValue = dipLeftLeftTailTest.pValue(numValues); + double dipPValue = dipLeftTailTest.pValue(numValues); double spikePValue = spikeRightTailTest.pValue(numValues); if (dipPValue < pValueThreshold && spikePValue < pValueThreshold) { - if (dipLeftLeftTailTest.isMoreSignificant(spikeRightTailTest)) { - return new ChangeType.Dip(dipPValue, dipIndex); + if (dipLeftTailTest.isMoreSignificant(spikeRightTailTest)) { + return new ChangeType.Dip(dipPValue, bucketValues.getBucketIndex(dipIndex)); } - return new ChangeType.Spike(spikePValue, spikeIndex); + return new ChangeType.Spike(spikePValue, bucketValues.getBucketIndex(spikeIndex)); } if (dipPValue < pValueThreshold) { - return new ChangeType.Dip(dipPValue, dipIndex); + return new ChangeType.Dip(dipPValue, bucketValues.getBucketIndex(dipIndex)); } if (spikePValue < pValueThreshold) { - return new ChangeType.Spike(spikePValue, spikeIndex); + return new ChangeType.Spike(spikePValue, bucketValues.getBucketIndex(spikeIndex)); } return new ChangeType.Stationary(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java index 836c9a78f19d9..93dc8077196d7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java @@ -73,10 +73,8 @@ static Config fromBase64EncodedResource(String resourcePath) throws IOException private final int[] offsets; // The entire normalized bytes representations delimited by NULL private final byte[] normalizedStrUtf8Bytes; - // Continually reused to copy a single char into utf8 bytes - private final byte[] reusableCharByteBuffer = new byte[4]; // reusable char buffer for decoding utf8 bytes to determine char offset corrections - private final char[] reusableCharDecodeBuffer = new char[8]; + private final char[] reusableCharDecodeBuffer = new char[64]; private Reader transformedInput; public PrecompiledCharMapNormalizer(int[] offsets, String normalizedStr, Reader in) { @@ -172,7 +170,6 @@ Reader normalize(CharSequence str) { ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(CharBuffer.wrap(str)); byte[] strBytes = new byte[byteBuffer.limit()]; byteBuffer.get(strBytes); - int[] strCp = str.codePoints().toArray(); BreakIterator b = BreakIterator.getCharacterInstance(Locale.ROOT); b.setText(str); // We iterate the whole string, so b.first() is always `0` diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/InferenceProcessorInfoExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/InferenceProcessorInfoExtractor.java index 48b570e927b15..5a2f044d1f7be 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/InferenceProcessorInfoExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/InferenceProcessorInfoExtractor.java @@ -13,6 +13,7 @@ import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.transport.Transports; import java.util.HashMap; import java.util.LinkedHashSet; @@ -75,6 +76,7 @@ public static int countInferenceProcessors(ClusterState state) { */ @SuppressWarnings("unchecked") public static Map> pipelineIdsByResource(ClusterState state, Set ids) { + assert Transports.assertNotTransportThread("non-trivial nested loops over cluster state structures"); Map> pipelineIdsByModelIds = new HashMap<>(); Metadata metadata = state.metadata(); if (metadata == null) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetectorTests.java index 5653af2a000f5..fe91aa3e6a600 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetectorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.aggs.changepoint; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ml.aggs.MlAggsHelper; import java.util.Arrays; @@ -19,10 +20,13 @@ public class SpikeAndDipDetectorTests extends ESTestCase { public void testTooLittleData() { for (int i = 0; i < 4; i++) { + long[] docCounts = new long[i]; double[] values = new double[i]; + Arrays.fill(docCounts, 1); Arrays.fill(values, 1.0); + MlAggsHelper.DoubleBucketValues bucketValues = new MlAggsHelper.DoubleBucketValues(docCounts, values); SpikeAndDipDetector detect = new SpikeAndDipDetector(values); - assertThat(detect.at(0.01), instanceOf(ChangeType.Indeterminable.class)); + assertThat(detect.at(0.01, bucketValues), instanceOf(ChangeType.Indeterminable.class)); } } @@ -142,24 +146,42 @@ public void testDetection() { // Check vs some expected values. { + long[] docCounts = new long[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; double[] values = new double[] { 0.1, 3.1, 1.2, 1.7, 0.9, 2.3, -0.8, 3.2, 1.2, 1.3, 1.1, 1.0, 8.5, 0.5, 2.6, 0.7 }; + MlAggsHelper.DoubleBucketValues bucketValues = new MlAggsHelper.DoubleBucketValues(docCounts, values); SpikeAndDipDetector detect = new SpikeAndDipDetector(values); - ChangeType change = detect.at(0.05); + ChangeType change = detect.at(0.05, bucketValues); assertThat(change, instanceOf(ChangeType.Spike.class)); assertThat(change.pValue(), closeTo(3.0465e-12, 1e-15)); } { + long[] docCounts = new long[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; double[] values = new double[] { 0.1, 3.1, 1.2, 1.7, 0.9, 2.3, -4.2, 3.2, 1.2, 1.3, 1.1, 1.0, 3.5, 0.5, 2.6, 0.7 }; + MlAggsHelper.DoubleBucketValues bucketValues = new MlAggsHelper.DoubleBucketValues(docCounts, values); SpikeAndDipDetector detect = new SpikeAndDipDetector(values); - ChangeType change = detect.at(0.05); + ChangeType change = detect.at(0.05, bucketValues); assertThat(change, instanceOf(ChangeType.Dip.class)); assertThat(change.pValue(), closeTo(1.2589e-08, 1e-11)); } } + + public void testMissingBuckets() { + long[] docCounts = new long[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; + double[] values = new double[] { 1.0, 2.0, 0.7, 1.0, 1.5, 1.1, 2.2, 10.0, 0.3, 0.4, 0.7, 0.9, 1.4, 2.1, 1.2, 1.0 }; + int[] buckets = new int[] { 0, 2, 3, 6, 7, 8, 9, 10, 11, 12, 13, 15, 17, 18, 19, 20 }; + MlAggsHelper.DoubleBucketValues bucketValues = new MlAggsHelper.DoubleBucketValues(docCounts, values, buckets); + + SpikeAndDipDetector detect = new SpikeAndDipDetector(values); + + ChangeType change = detect.at(0.01, bucketValues); + + assertThat(change, instanceOf(ChangeType.Spike.class)); + assertThat(change.changePoint(), equalTo(10)); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java index 8d8cded819e23..25c5191afc218 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.persistent.UpdatePersistentTaskStatusAction; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -243,7 +244,11 @@ public void testPersistProgress_ProgressDocumentCreated() throws IOException { } public void testPersistProgress_ProgressDocumentUpdated() throws IOException { - var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + var hits = new SearchHits( + new SearchHit[] { SearchResponseUtils.searchHitFromMap(Map.of("_index", ".ml-state-dummy")) }, + null, + 0.0f + ); try { testPersistProgress(hits, ".ml-state-dummy"); } finally { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunnerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunnerTests.java index d265ae17ce6ea..78ee3e1d6e4fa 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunnerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunnerTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -151,7 +152,7 @@ public void testInferTestDocs_GivenCancelWasCalled() { private static Deque buildSearchHits(List> vals) { return vals.stream().map(InferenceRunnerTests::fromMap).map(reference -> { - var pooled = SearchHit.createFromMap(Collections.singletonMap("_source", reference)); + var pooled = SearchResponseUtils.searchHitFromMap(Collections.singletonMap("_source", reference)); try { return pooled.asUnpooled(); } finally { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java index d542b97eee192..eef9902d35e59 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java @@ -57,6 +57,11 @@ public void testEmoji() throws IOException { assertNormalization("😀", parsed, "😀"); } + public void testCharThatNormalizesToLongText() throws IOException { + PrecompiledCharMapNormalizer.Config parsed = loadTestCharMap(); + assertNormalization("ﷺ", parsed, "صلى الله عليه وسلم"); + } + private void assertNormalization(String input, PrecompiledCharMapNormalizer.Config config, String expected) throws IOException { PrecompiledCharMapNormalizer normalizer = new PrecompiledCharMapNormalizer( config.offsets(), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index db81fc2db3348..654ce7bf965bd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; @@ -358,7 +359,11 @@ public void testPersistQuantilesSync_QuantilesDocumentCreated() { } public void testPersistQuantilesSync_QuantilesDocumentUpdated() { - var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + var hits = new SearchHits( + new SearchHit[] { SearchResponseUtils.searchHitFromMap(Map.of("_index", ".ml-state-dummy")) }, + null, + 0.0f + ); try { testPersistQuantilesSync(hits, ".ml-state-dummy"); } finally { @@ -399,7 +404,11 @@ public void testPersistQuantilesAsync_QuantilesDocumentCreated() { } public void testPersistQuantilesAsync_QuantilesDocumentUpdated() { - var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + var hits = new SearchHits( + new SearchHit[] { SearchResponseUtils.searchHitFromMap(Map.of("_index", ".ml-state-dummy")) }, + null, + 0.0f + ); try { testPersistQuantilesAsync(hits, ".ml-state-dummy"); } finally { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java index a7ba148584637..64d0e2b835ffb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.elasticsearch.xpack.ml.utils.persistence.ResultsPersisterService; @@ -124,7 +125,7 @@ public void testStateRead_StateDocumentCreated() throws IOException { public void testStateRead_StateDocumentUpdated() throws IOException { testStateRead( - SearchHits.unpooled(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), + SearchHits.unpooled(new SearchHit[] { SearchResponseUtils.searchHitFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), ".ml-state-dummy" ); } diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java index 501d564bbda0d..62b8242e7df86 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java @@ -42,7 +42,6 @@ public void testGetStackTracesUnfiltered() throws Exception { assertEquals("vmlinux", response.getExecutables().get("lHp5_WAgpLy2alrUVab6HA")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106308") public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception { BoolQueryBuilder query = QueryBuilders.boolQuery(); query.must().add(QueryBuilders.termQuery("transaction.name", "encodeSha1")); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java index 383f46c97f02f..58b018a13e096 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java @@ -143,6 +143,9 @@ protected final void doSetupData() throws Exception { bulkIndex("data/apm-legacy-test.ndjson"); refresh(); + + // temporary workaround for #106657, see also #106308. + forceMerge(); } @After diff --git a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson index 58e8281e1d32c..e12a670a79d18 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson +++ b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson @@ -1,4 +1,4 @@ {"create": {"_index": "profiling-hosts","_id":"eLH27YsBj2lLi3tJYlvr"}} -{"profiling.project.id":100,"host.id":"8457605156473051743","@timestamp":1700504426,"ecs.version":"1.12.0","profiling.agent.build_timestamp":1688111067,"profiling.instance.private_ipv4s":["192.168.1.2"],"ec2.instance_life_cycle":"on-demand","profiling.agent.config.map_scale_factor":0,"ec2.instance_type":"i3.2xlarge","profiling.host.ip":"192.168.1.2","profiling.agent.config.bpf_log_level":0,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.file":"/etc/prodfiler/prodfiler.conf","ec2.local_ipv4":"192.168.1.2","profiling.agent.config.no_kernel_version_check":false,"profiling.host.machine":"x86_64","profiling.host.tags":["cloud_provider:aws","cloud_environment:qa","cloud_region:eu-west-1"],"profiling.agent.config.probabilistic_threshold":100,"profiling.agent.config.disable_tls":false,"profiling.agent.config.tracers":"all","profiling.agent.start_time":1700090045589,"profiling.agent.config.max_elements_per_interval":800,"ec2.placement.region":"eu-west-1","profiling.agent.config.present_cpu_cores":8,"profiling.host.kernel_version":"9.9.9-0-aws","profiling.agent.config.bpf_log_size":65536,"profiling.agent.config.known_traces_entries":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.verbose":false,"profiling.agent.config.probabilistic_interval":"1m0s","ec2.placement.availability_zone_id":"euw1-az1","ec2.security_groups":"","ec2.local_hostname":"ip-192-168-1-2.eu-west-1.compute.internal","ec2.placement.availability_zone":"eu-west-1c","profiling.agent.config.upload_symbols":false,"profiling.host.sysctl.kernel.bpf_stats_enabled":0,"profiling.host.name":"ip-192-168-1-2","ec2.mac":"00:11:22:33:44:55","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-aws","profiling.agent.config.cache_directory":"/var/cache/optimyze/","profiling.agent.version":"v8.12.0","ec2.hostname":"ip-192-168-1-2.eu-west-1.compute.internal","profiling.agent.config.elastic_mode":false,"ec2.ami_id":"ami-aaaaaaaaaaa","ec2.instance_id":"i-0b999999999999999"} +{"profiling.project.id":100,"host.id":"8457605156473051743","@timestamp":1700504426,"ecs.version":"1.12.0","profiling.agent.build_timestamp":1688111067,"profiling.instance.private_ipv4s":["192.168.1.2"],"ec2.instance_life_cycle":"on-demand","profiling.agent.config.map_scale_factor":0,"host.type":"i3.2xlarge","profiling.host.ip":"192.168.1.2","profiling.agent.config.bpf_log_level":0,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.file":"/etc/prodfiler/prodfiler.conf","ec2.local_ipv4":"192.168.1.2","profiling.agent.config.no_kernel_version_check":false,"host.arch":"amd64","profiling.host.tags":["cloud_provider:aws","cloud_environment:qa","cloud_region:eu-west-1"],"profiling.agent.config.probabilistic_threshold":100,"profiling.agent.config.disable_tls":false,"profiling.agent.config.tracers":"all","profiling.agent.start_time":1700090045589,"profiling.agent.config.max_elements_per_interval":800,"cloud.provider":"aws","cloud.region":"eu-west-1","profiling.agent.config.present_cpu_cores":8,"profiling.host.kernel_version":"9.9.9-0-aws","profiling.agent.config.bpf_log_size":65536,"profiling.agent.config.known_traces_entries":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.verbose":false,"profiling.agent.config.probabilistic_interval":"1m0s","ec2.placement.availability_zone_id":"euw1-az1","ec2.security_groups":"","ec2.local_hostname":"ip-192-168-1-2.eu-west-1.compute.internal","ec2.placement.availability_zone":"eu-west-1c","profiling.agent.config.upload_symbols":false,"profiling.host.sysctl.kernel.bpf_stats_enabled":0,"profiling.host.name":"ip-192-168-1-2","ec2.mac":"00:11:22:33:44:55","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-aws","profiling.agent.config.cache_directory":"/var/cache/optimyze/","profiling.agent.version":"v8.12.0","ec2.hostname":"ip-192-168-1-2.eu-west-1.compute.internal","profiling.agent.config.elastic_mode":false,"ec2.ami_id":"ami-aaaaaaaaaaa","ec2.instance_id":"i-0b999999999999999"} {"create": {"_index": "profiling-hosts", "_id": "u_fHlYwBkmZvQ6tVo1Lr"}} -{"profiling.project.id":100,"host.id":"7416508186220657211","@timestamp":1703319912,"ecs.version":"1.12.0","profiling.agent.version":"8.11.0","profiling.agent.config.map_scale_factor":0,"profiling.agent.config.probabilistic_threshold":100,"profiling.host.name":"ip-192-186-1-3","profiling.agent.config.no_kernel_version_check":false,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.elastic_mode":false,"azure.compute.vmsize":"Standard_D4s_v3","azure.compute.environment":"AzurePublicCloud","profiling.agent.config.bpf_log_level":0,"profiling.agent.config.known_traces_entries":65536,"profiling.agent.config.ca_address":"example.com:443","profiling.agent.config.tags":"cloud_provider:azure;cloud_environment:qa;cloud_region:eastus2","profiling.host.tags":["cloud_provider:azure","cloud_environment:qa","cloud_region:eastus2"],"profiling.host.kernel_version":"9.9.9-0-azure","profiling.agent.revision":"head-52cc2030","azure.compute.subscriptionid":"1-2-3-4-5","profiling.host.sysctl.kernel.bpf_stats_enabled":0,"profiling.host.machine":"x86_64","azure.compute.zone":"3","profiling.agent.config.cache_directory":"/var/cache/Elastic/universal-profiling","azure.compute.name":"example-qa-eastus2-001-v1-zone3_6","profiling.agent.config.probabilistic_interval":"1m0s","azure.compute.location":"eastus2","azure.compute.version":"1234.20230510.233254","profiling.instance.private_ipv4s":["192.168.1.3"],"profiling.agent.build_timestamp":1699000836,"profiling.agent.config.file":"/etc/Elastic/universal-profiling/pf-host-agent.conf","profiling.agent.config.bpf_log_size":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.tracers":"all","profiling.agent.config.present_cpu_cores":4,"profiling.agent.start_time":1702306987358,"profiling.agent.config.disable_tls":false,"azure.compute.ostype":"Linux","profiling.host.ip":"192.168.1.3","profiling.agent.config.max_elements_per_interval":400,"profiling.agent.config.upload_symbols":false,"azure.compute.tags":"bootstrap-version:v1;ece-id:001;environment:qa;identifier:v1;initial-config:;managed-by:terraform;monitored-by:core-infrastructure;owner:core-infrastructure;region_type:ess;role:blueprint;secondary_role:;vars-identifier:eastus2-001-v1","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-azure","profiling.agent.config.verbose":false,"azure.compute.vmid":"1-2-3-4-5"} +{"profiling.project.id":100,"host.id":"7416508186220657211","@timestamp":1703319912,"ecs.version":"1.12.0","profiling.agent.version":"8.11.0","profiling.agent.config.map_scale_factor":0,"profiling.agent.config.probabilistic_threshold":100,"profiling.host.name":"ip-192-186-1-3","profiling.agent.config.no_kernel_version_check":false,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.elastic_mode":false,"host.type":"Standard_D4s_v3","azure.compute.environment":"AzurePublicCloud","profiling.agent.config.bpf_log_level":0,"profiling.agent.config.known_traces_entries":65536,"profiling.agent.config.ca_address":"example.com:443","profiling.agent.config.tags":"cloud_provider:azure;cloud_environment:qa;cloud_region:eastus2","profiling.host.tags":["cloud_provider:azure","cloud_environment:qa","cloud_region:eastus2"],"profiling.host.kernel_version":"9.9.9-0-azure","profiling.agent.revision":"head-52cc2030","azure.compute.subscriptionid":"1-2-3-4-5","profiling.host.sysctl.kernel.bpf_stats_enabled":0,"host.arch":"amd64","azure.compute.zone":"3","profiling.agent.config.cache_directory":"/var/cache/Elastic/universal-profiling","azure.compute.name":"example-qa-eastus2-001-v1-zone3_6","profiling.agent.config.probabilistic_interval":"1m0s","cloud.provider":"azure","cloud.region":"eastus2","azure.compute.version":"1234.20230510.233254","profiling.instance.private_ipv4s":["192.168.1.3"],"profiling.agent.build_timestamp":1699000836,"profiling.agent.config.file":"/etc/Elastic/universal-profiling/pf-host-agent.conf","profiling.agent.config.bpf_log_size":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.tracers":"all","profiling.agent.config.present_cpu_cores":4,"profiling.agent.start_time":1702306987358,"profiling.agent.config.disable_tls":false,"azure.compute.ostype":"Linux","profiling.host.ip":"192.168.1.3","profiling.agent.config.max_elements_per_interval":400,"profiling.agent.config.upload_symbols":false,"azure.compute.tags":"bootstrap-version:v1;ece-id:001;environment:qa;identifier:v1;initial-config:;managed-by:terraform;monitored-by:core-infrastructure;owner:core-infrastructure;region_type:ess;role:blueprint;secondary_role:;vars-identifier:eastus2-001-v1","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-azure","profiling.agent.config.verbose":false,"azure.compute.vmid":"1-2-3-4-5"} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java index 454cd35b396b9..d69178f158a88 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java @@ -53,14 +53,13 @@ public double getAnnualCO2Tons(String hostID, long samples) { } private double getKiloWattsPerCore(HostMetadata host) { - if ("aarch64".equals(host.profilingHostMachine)) { - // Assume that AARCH64 (aka ARM64) machines are more energy efficient than x86_64 machines. - return customKilowattsPerCoreARM64; - } - if ("x86_64".equals(host.profilingHostMachine)) { - return customKilowattsPerCoreX86; - } - return DEFAULT_KILOWATTS_PER_CORE; + return switch (host.hostArchitecture) { + // For the OTEL donation of the profiling agent, we switch to OTEL semantic conventions, + // which require "arm64" and "amd64" to be reported as the host architecture. + case "arm64", "aarch64" -> customKilowattsPerCoreARM64; + case "amd64", "x86_64" -> customKilowattsPerCoreX86; + default -> DEFAULT_KILOWATTS_PER_CORE; + }; } private double getCO2TonsPerKWH(HostMetadata host) { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java index e9f912a3f60e5..e1e3e27e951bf 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java @@ -20,22 +20,26 @@ final class HostMetadata implements ToXContentObject { static final int DEFAULT_PROFILING_NUM_CORES = 4; final String hostID; final InstanceType instanceType; - final String profilingHostMachine; // aarch64 or x86_64 + final String hostArchitecture; // arm64 or amd64, (pre-8.14.0: aarch64 or x86_64) final int profilingNumCores; // number of cores on the profiling host machine - HostMetadata(String hostID, InstanceType instanceType, String profilingHostMachine, Integer profilingNumCores) { + HostMetadata(String hostID, InstanceType instanceType, String hostArchitecture, Integer profilingNumCores) { this.hostID = hostID; this.instanceType = instanceType; - this.profilingHostMachine = profilingHostMachine; + this.hostArchitecture = hostArchitecture; this.profilingNumCores = profilingNumCores != null ? profilingNumCores : DEFAULT_PROFILING_NUM_CORES; } public static HostMetadata fromSource(Map source) { if (source != null) { String hostID = (String) source.get("host.id"); - String profilingHostMachine = (String) source.get("profiling.host.machine"); + String hostArchitecture = (String) source.get("host.arch"); + if (hostArchitecture == null) { + // fallback to pre-8.14.0 field name + hostArchitecture = (String) source.get("profiling.host.machine"); + } Integer profilingNumCores = (Integer) source.get("profiling.agent.config.present_cpu_cores"); - return new HostMetadata(hostID, InstanceType.fromHostSource(source), profilingHostMachine, profilingNumCores); + return new HostMetadata(hostID, InstanceType.fromHostSource(source), hostArchitecture, profilingNumCores); } return new HostMetadata("", new InstanceType("", "", ""), "", null); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java index ee649e381c85d..3aa0a79df13bc 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java @@ -35,6 +35,45 @@ final class InstanceType implements ToXContentObject { * @return the {@link InstanceType} */ public static InstanceType fromHostSource(Map source) { + String provider = (String) source.get("cloud.provider"); + if (provider != null) { + String region = (String) source.get("cloud.region"); + String instanceType = (String) source.get("host.type"); + return new InstanceType(provider, region, instanceType); + } + + // Check and handle pre-8.14.0 host sources for backwards-compatibility. + InstanceType instanceType = fromObsoleteHostSource(source); + if (instanceType != null) { + return instanceType; + } + + // Support for configured tags (ECS). + // Example of tags: + // "profiling.host.tags": [ + // "cloud_provider:aws", + // "cloud_environment:qa", + // "cloud_region:eu-west-1", + // ], + String region = null; + List tags = listOf(source.get("profiling.host.tags")); + for (String tag : tags) { + String[] kv = tag.toLowerCase(Locale.ROOT).split(":", 2); + if (kv.length != 2) { + continue; + } + if ("cloud_provider".equals(kv[0])) { + provider = kv[1]; + } + if ("cloud_region".equals(kv[0])) { + region = kv[1]; + } + } + + return new InstanceType(provider, region, null); + } + + private static InstanceType fromObsoleteHostSource(Map source) { // Check and handle AWS. String region = (String) source.get("ec2.placement.region"); if (region != null) { @@ -67,30 +106,7 @@ public static InstanceType fromHostSource(Map source) { return new InstanceType("azure", region, instanceType); } - // Support for configured tags (ECS). - // Example of tags: - // "profiling.host.tags": [ - // "cloud_provider:aws", - // "cloud_environment:qa", - // "cloud_region:eu-west-1", - // ], - String provider = null; - region = null; - List tags = listOf(source.get("profiling.host.tags")); - for (String tag : tags) { - String[] kv = tag.toLowerCase(Locale.ROOT).split(":", 2); - if (kv.length != 2) { - continue; - } - if ("cloud_provider".equals(kv[0])) { - provider = kv[1]; - } - if ("cloud_region".equals(kv[0])) { - region = kv[1]; - } - } - - return new InstanceType(provider, region, null); + return null; } @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java index c90e0e52c4d58..e1698e71afab2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java @@ -46,13 +46,15 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry { // version 3: Add optional component template 'profiling-ilm@custom' to all ILM-managed index templates // version 4: Added 'service.name' keyword mapping to profiling-events // version 5: Add optional component template '@custom' to all index templates that reference component templates - public static final int INDEX_TEMPLATE_VERSION = 5; + // version 6: Added 'host.arch' keyword mapping to profiling-hosts + // version 7: Added 'host.type', 'cloud.provider', 'cloud.region' keyword mappings to profiling-hosts + public static final int INDEX_TEMPLATE_VERSION = 7; // history for individual indices / index templates. Only bump these for breaking changes that require to create a new index public static final int PROFILING_EVENTS_VERSION = 2; public static final int PROFILING_EXECUTABLES_VERSION = 1; public static final int PROFILING_METRICS_VERSION = 1; - public static final int PROFILING_HOSTS_VERSION = 1; + public static final int PROFILING_HOSTS_VERSION = 2; public static final int PROFILING_STACKFRAMES_VERSION = 1; public static final int PROFILING_STACKTRACES_VERSION = 1; public static final int PROFILING_SYMBOLS_VERSION = 1; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java index d8f93cd129916..5c24e295909bc 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java @@ -15,16 +15,42 @@ public class HostMetadataTests extends ESTestCase { public void testCreateFromSourceAWS() { final String hostID = "1440256254710195396"; - final String machine = "x86_64"; + final String arch = "amd64"; final String provider = "aws"; final String region = "eu-west-1"; final String instanceType = "md5x.large"; // tag::noformat - HostMetadata host = HostMetadata.fromSource( - Map.of( + HostMetadata host = HostMetadata.fromSource ( + Map.of ( "host.id", hostID, - "profiling.host.machine", machine, + "host.arch", arch, + "host.type", instanceType, + "cloud.provider", provider, + "cloud.region", region + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(arch, host.hostArchitecture); + assertEquals(provider, host.instanceType.provider); + assertEquals(region, host.instanceType.region); + assertEquals(instanceType, host.instanceType.name); + } + + public void testCreateFromSourceAWSCompat() { + final String hostID = "1440256254710195396"; + final String arch = "x86_64"; + final String provider = "aws"; + final String region = "eu-west-1"; + final String instanceType = "md5x.large"; + + // tag::noformat + HostMetadata host = HostMetadata.fromSource ( + Map.of ( + "host.id", hostID, + "host.arch", arch, "ec2.instance_type", instanceType, "ec2.placement.region", region ) @@ -32,7 +58,7 @@ public void testCreateFromSourceAWS() { // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(arch, host.hostArchitecture); assertEquals(provider, host.instanceType.provider); assertEquals(region, host.instanceType.region); assertEquals(instanceType, host.instanceType.name); @@ -40,7 +66,33 @@ public void testCreateFromSourceAWS() { public void testCreateFromSourceGCP() { final String hostID = "1440256254710195396"; - final String machine = "x86_64"; + final String arch = "amd64"; + final String provider = "gcp"; + final String[] regions = { "", "", "europe-west1", "europewest", "europe-west1" }; + + for (String region : regions) { + // tag::noformat + HostMetadata host = HostMetadata.fromSource ( + Map.of ( + "host.id", hostID, + "host.arch", arch, + "cloud.provider", provider, + "cloud.region", region + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(arch, host.hostArchitecture); + assertEquals(provider, host.instanceType.provider); + assertEquals(region, host.instanceType.region); + assertEquals("", host.instanceType.name); + } + } + + public void testCreateFromSourceGCPCompat() { + final String hostID = "1440256254710195396"; + final String arch = "x86_64"; final String provider = "gcp"; final String[] regions = { "", "", "europe-west1", "europewest", "europe-west1" }; final String[] zones = { @@ -58,14 +110,14 @@ public void testCreateFromSourceGCP() { HostMetadata host = HostMetadata.fromSource( Map.of( "host.id", hostID, - "profiling.host.machine", machine, + "host.arch", arch, "gce.instance.zone", zone ) ); // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(arch, host.hostArchitecture); assertEquals(provider, host.instanceType.provider); assertEquals(region, host.instanceType.region); assertEquals("", host.instanceType.name); @@ -74,7 +126,7 @@ public void testCreateFromSourceGCP() { public void testCreateFromSourceGCPZoneFuzzer() { final String hostID = "1440256254710195396"; - final String machine = "x86_64"; + final String arch = "x86_64"; final String provider = "gcp"; final Character[] chars = new Character[] { '/', '-', 'a' }; @@ -92,14 +144,14 @@ public void testCreateFromSourceGCPZoneFuzzer() { HostMetadata host = HostMetadata.fromSource( Map.of( "host.id", hostID, - "profiling.host.machine", machine, + "host.arch", arch, "gce.instance.zone", zone ) ); // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(arch, host.hostArchitecture); assertEquals(provider, host.instanceType.provider); assertNotNull(host.instanceType.region); assertEquals("", host.instanceType.name); @@ -110,7 +162,7 @@ public void testCreateFromSourceGCPZoneFuzzer() { public void testCreateFromSourceAzure() { final String hostID = "1440256254710195396"; - final String machine = "x86_64"; + final String arch = "amd64"; final String provider = "azure"; final String region = "eastus2"; @@ -118,14 +170,14 @@ public void testCreateFromSourceAzure() { HostMetadata host = HostMetadata.fromSource( Map.of( "host.id", hostID, - "profiling.host.machine", machine, + "host.arch", arch, "azure.compute.location", region ) ); // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(arch, host.hostArchitecture); assertEquals(provider, host.instanceType.provider); assertEquals(region, host.instanceType.region); assertEquals("", host.instanceType.name); @@ -133,7 +185,7 @@ public void testCreateFromSourceAzure() { public void testCreateFromSourceECS() { final String hostID = "1440256254710195396"; - final String machine = "x86_64"; + final String arch = "amd64"; final String provider = "any-provider"; final String region = "any-region"; @@ -141,26 +193,47 @@ public void testCreateFromSourceECS() { HostMetadata host = HostMetadata.fromSource( Map.of( "host.id", hostID, - "profiling.host.machine", machine, - "profiling.host.tags", Arrays.asList( - "cloud_provider:"+provider, "cloud_environment:qa", "cloud_region:"+region) + "host.arch", arch, + "profiling.host.tags", Arrays.asList ( + "cloud_provider:" + provider, "cloud_environment:qa", "cloud_region:" + region) ) ); // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(arch, host.hostArchitecture); assertEquals(provider, host.instanceType.provider); assertEquals(region, host.instanceType.region); assertEquals("", host.instanceType.name); } public void testCreateFromSourceNoProvider() { + final String hostID = "1440256254710195396"; + final String arch = "amd64"; + + // tag::noformat + HostMetadata host = HostMetadata.fromSource( + Map.of( + "host.id", hostID, + "host.arch", arch + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(arch, host.hostArchitecture); + assertEquals("", host.instanceType.provider); + assertEquals("", host.instanceType.region); + assertEquals("", host.instanceType.name); + } + + public void testCreateFromSourceArchitectureFallback() { final String hostID = "1440256254710195396"; final String machine = "x86_64"; // tag::noformat HostMetadata host = HostMetadata.fromSource( + // Missing host.arch field, pre-8.14.0 architecture value Map.of( "host.id", hostID, "profiling.host.machine", machine @@ -169,7 +242,7 @@ public void testCreateFromSourceNoProvider() { // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(machine, host.hostArchitecture); assertEquals("", host.instanceType.provider); assertEquals("", host.instanceType.region); assertEquals("", host.instanceType.name); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java index 32bd76cf84e19..05e4d59843a10 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java @@ -108,7 +108,19 @@ public BytesRef wktToWkb(String wkt) { } } + public Geometry wktToGeometry(String wkt) { + try { + return WellKnownText.fromWKT(GeometryValidator.NOOP, false, wkt); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to parse WKT: " + e.getMessage(), e); + } + } + public String wkbToWkt(BytesRef wkb) { return WellKnownText.fromWKB(wkb.bytes, wkb.offset, wkb.length); } + + public Geometry wkbToGeometry(BytesRef wkb) { + return WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); + } } diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java index 1cab7dd87195b..bc7e0b2a93bf5 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ql.optimizer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.TestUtils; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -60,7 +61,6 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.type.EsField; import org.elasticsearch.xpack.ql.util.StringUtils; import java.time.ZoneId; @@ -69,7 +69,6 @@ import static java.util.Arrays.asList; import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.ql.TestUtils.equalsOf; import static org.elasticsearch.xpack.ql.TestUtils.fieldAttribute; @@ -157,15 +156,7 @@ private static Literal L(Object value) { } private static FieldAttribute getFieldAttribute() { - return getFieldAttribute("a"); - } - - private static FieldAttribute getFieldAttribute(String name) { - return getFieldAttribute(name, INTEGER); - } - - private static FieldAttribute getFieldAttribute(String name, DataType dataType) { - return new FieldAttribute(EMPTY, name, new EsField(name + "f", dataType, emptyMap(), true)); + return TestUtils.getFieldAttribute("a"); } // @@ -548,9 +539,9 @@ public void testCombineComparisonsIntoRange() { // 1 < a AND a < 3 AND 2 < b AND b < 4 AND c < 4 -> (1 < a < 3) AND (2 < b < 4) AND c < 4 public void testCombineMultipleComparisonsIntoRange() { - FieldAttribute fa = getFieldAttribute("a"); - FieldAttribute fb = getFieldAttribute("b"); - FieldAttribute fc = getFieldAttribute("c"); + FieldAttribute fa = TestUtils.getFieldAttribute("a"); + FieldAttribute fb = TestUtils.getFieldAttribute("b"); + FieldAttribute fc = TestUtils.getFieldAttribute("c"); ZoneId zoneId = randomZone(); GreaterThan agt1 = new GreaterThan(EMPTY, fa, ONE, zoneId); @@ -1040,9 +1031,9 @@ public void testCombineBinaryComparisonsWithDifferentUpperLimitInclusion() { // (a = 1 AND b = 3 AND c = 4) OR (a = 2 AND b = 3 AND c = 4) -> (b = 3 AND c = 4) AND (a = 1 OR a = 2) public void testBooleanSimplificationCommonExpressionSubstraction() { - FieldAttribute fa = getFieldAttribute("a"); - FieldAttribute fb = getFieldAttribute("b"); - FieldAttribute fc = getFieldAttribute("c"); + FieldAttribute fa = TestUtils.getFieldAttribute("a"); + FieldAttribute fb = TestUtils.getFieldAttribute("b"); + FieldAttribute fc = TestUtils.getFieldAttribute("c"); Expression a1 = equalsOf(fa, ONE); Expression a2 = equalsOf(fa, TWO); @@ -1414,7 +1405,7 @@ public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() { // a == 1 AND a == 2 -> nop for date/time fields public void testPropagateEquals_ignoreDateTimeFields() { - FieldAttribute fa = getFieldAttribute("a", DataTypes.DATETIME); + FieldAttribute fa = TestUtils.getFieldAttribute("a", DataTypes.DATETIME); Equals eq1 = equalsOf(fa, ONE); Equals eq2 = equalsOf(fa, TWO); And and = new And(EMPTY, eq1, eq2); @@ -1564,8 +1555,8 @@ public void testEqualsBehindAnd() throws Exception { } public void testTwoEqualsDifferentFields() throws Exception { - FieldAttribute fieldOne = getFieldAttribute("ONE"); - FieldAttribute fieldTwo = getFieldAttribute("TWO"); + FieldAttribute fieldOne = TestUtils.getFieldAttribute("ONE"); + FieldAttribute fieldTwo = TestUtils.getFieldAttribute("TWO"); Or or = new Or(EMPTY, equalsOf(fieldOne, ONE), equalsOf(fieldTwo, TWO)); Expression e = new CombineDisjunctionsToIn().rule(or); @@ -1670,7 +1661,11 @@ public void testIsNullAndMultipleComparison() throws Exception { FieldAttribute fa = getFieldAttribute(); IsNull isNull = new IsNull(EMPTY, fa); - And nestedAnd = new And(EMPTY, lessThanOf(getFieldAttribute("b"), ONE), lessThanOf(getFieldAttribute("c"), ONE)); + And nestedAnd = new And( + EMPTY, + lessThanOf(TestUtils.getFieldAttribute("b"), ONE), + lessThanOf(TestUtils.getFieldAttribute("c"), ONE) + ); And and = new And(EMPTY, isNull, nestedAnd); And top = new And(EMPTY, and, lessThanOf(fa, ONE)); @@ -1689,7 +1684,7 @@ public void testIsNullAndDeeplyNestedExpression() throws Exception { greaterThanOf(new Div(EMPTY, new Add(EMPTY, fa, ONE), TWO), ONE), greaterThanOf(new Add(EMPTY, fa, TWO), ONE) ); - Expression kept = new And(EMPTY, isNull, lessThanOf(getFieldAttribute("b"), THREE)); + Expression kept = new And(EMPTY, isNull, lessThanOf(TestUtils.getFieldAttribute("b"), THREE)); And and = new And(EMPTY, nullified, kept); Expression optimized = new PropagateNullable().rule(and); @@ -1729,8 +1724,8 @@ public void testIsNullDisjunction() throws Exception { public void testCombineFilters() throws Exception { EsRelation relation = relation(); - GreaterThan conditionA = greaterThanOf(getFieldAttribute("a"), ONE); - LessThan conditionB = lessThanOf(getFieldAttribute("b"), TWO); + GreaterThan conditionA = greaterThanOf(TestUtils.getFieldAttribute("a"), ONE); + LessThan conditionB = lessThanOf(TestUtils.getFieldAttribute("b"), TWO); Filter fa = new Filter(EMPTY, relation, conditionA); Filter fb = new Filter(EMPTY, fa, conditionB); @@ -1740,11 +1735,11 @@ public void testCombineFilters() throws Exception { public void testPushDownFilter() throws Exception { EsRelation relation = relation(); - GreaterThan conditionA = greaterThanOf(getFieldAttribute("a"), ONE); - LessThan conditionB = lessThanOf(getFieldAttribute("b"), TWO); + GreaterThan conditionA = greaterThanOf(TestUtils.getFieldAttribute("a"), ONE); + LessThan conditionB = lessThanOf(TestUtils.getFieldAttribute("b"), TWO); Filter fa = new Filter(EMPTY, relation, conditionA); - List projections = singletonList(getFieldAttribute("b")); + List projections = singletonList(TestUtils.getFieldAttribute("b")); Project project = new Project(EMPTY, fa, projections); Filter fb = new Filter(EMPTY, project, conditionB); @@ -1754,12 +1749,12 @@ public void testPushDownFilter() throws Exception { public void testPushDownFilterThroughAgg() throws Exception { EsRelation relation = relation(); - GreaterThan conditionA = greaterThanOf(getFieldAttribute("a"), ONE); - LessThan conditionB = lessThanOf(getFieldAttribute("b"), TWO); + GreaterThan conditionA = greaterThanOf(TestUtils.getFieldAttribute("a"), ONE); + LessThan conditionB = lessThanOf(TestUtils.getFieldAttribute("b"), TWO); GreaterThanOrEqual aggregateCondition = greaterThanOrEqualOf(new Count(EMPTY, ONE, false), THREE); Filter fa = new Filter(EMPTY, relation, conditionA); - List projections = singletonList(getFieldAttribute("b")); + List projections = singletonList(TestUtils.getFieldAttribute("b")); // invalid aggregate but that's fine cause its properties are not used by this rule Aggregate aggregate = new Aggregate(EMPTY, fa, emptyList(), emptyList()); Filter fb = new Filter(EMPTY, aggregate, new And(EMPTY, aggregateCondition, conditionB)); @@ -1773,7 +1768,7 @@ public void testPushDownFilterThroughAgg() throws Exception { public void testIsNotNullOnIsNullField() { EsRelation relation = relation(); - var fieldA = getFieldAttribute("a"); + var fieldA = TestUtils.getFieldAttribute("a"); Expression inn = isNotNull(fieldA); Filter f = new Filter(EMPTY, relation, inn); @@ -1782,7 +1777,7 @@ public void testIsNotNullOnIsNullField() { public void testIsNotNullOnOperatorWithOneField() { EsRelation relation = relation(); - var fieldA = getFieldAttribute("a"); + var fieldA = TestUtils.getFieldAttribute("a"); Expression inn = isNotNull(new Add(EMPTY, fieldA, ONE)); Filter f = new Filter(EMPTY, relation, inn); Filter expected = new Filter(EMPTY, relation, new And(EMPTY, isNotNull(fieldA), inn)); @@ -1792,8 +1787,8 @@ public void testIsNotNullOnOperatorWithOneField() { public void testIsNotNullOnOperatorWithTwoFields() { EsRelation relation = relation(); - var fieldA = getFieldAttribute("a"); - var fieldB = getFieldAttribute("b"); + var fieldA = TestUtils.getFieldAttribute("a"); + var fieldB = TestUtils.getFieldAttribute("b"); Expression inn = isNotNull(new Add(EMPTY, fieldA, fieldB)); Filter f = new Filter(EMPTY, relation, inn); Filter expected = new Filter(EMPTY, relation, new And(EMPTY, new And(EMPTY, isNotNull(fieldA), isNotNull(fieldB)), inn)); @@ -1803,7 +1798,7 @@ public void testIsNotNullOnOperatorWithTwoFields() { public void testIsNotNullOnFunctionWithOneField() { EsRelation relation = relation(); - var fieldA = getFieldAttribute("a"); + var fieldA = TestUtils.getFieldAttribute("a"); var pattern = L("abc"); Expression inn = isNotNull( new And(EMPTY, new TestStartsWith(EMPTY, fieldA, pattern, false), greaterThanOf(new Add(EMPTY, ONE, TWO), THREE)) @@ -1817,8 +1812,8 @@ public void testIsNotNullOnFunctionWithOneField() { public void testIsNotNullOnFunctionWithTwoFields() { EsRelation relation = relation(); - var fieldA = getFieldAttribute("a"); - var fieldB = getFieldAttribute("b"); + var fieldA = TestUtils.getFieldAttribute("a"); + var fieldB = TestUtils.getFieldAttribute("b"); var pattern = L("abc"); Expression inn = isNotNull(new TestStartsWith(EMPTY, fieldA, fieldB, false)); diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java index 8ff7cac54bf5c..3159c6ea41547 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java @@ -82,6 +82,7 @@ import static org.elasticsearch.test.ESTestCase.randomZone; import static org.elasticsearch.xpack.ql.TestUtils.StringContainsRegex.containsRegex; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; +import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; @@ -445,6 +446,14 @@ else if (query == null) { return arr; } + public static FieldAttribute getFieldAttribute(String name) { + return getFieldAttribute(name, INTEGER); + } + + public static FieldAttribute getFieldAttribute(String name, DataType dataType) { + return new FieldAttribute(EMPTY, name, new EsField(name + "f", dataType, emptyMap(), true)); + } + // Matcher which extends the functionality of org.hamcrest.Matchers.matchesPattern(String)} // by allowing to match detected regex groups later on in the pattern, e.g.: // "(?.+?)"....... \k....."} diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle index 94aa196f8e8e1..333364c6167c0 100644 --- a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle @@ -5,28 +5,17 @@ * 2.0. */ -import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.util.ports.ReservedPortRange - -import java.nio.file.Files -import java.nio.file.Paths - -import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE - -apply plugin: 'elasticsearch.test.fixtures' -apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' apply plugin: 'elasticsearch.internal-available-ports' -final Project hdfsFixtureProject = project(':test:fixtures:hdfs2-fixture') -final Project krbFixtureProject = project(':test:fixtures:krb5kdc-fixture') -final Project hdfsRepoPluginProject = project(':plugins:repository-hdfs') - dependencies { + clusterPlugins project(':plugins:repository-hdfs') javaRestTestImplementation(testArtifact(project(xpackModule('searchable-snapshots')))) - javaRestTestImplementation hdfsRepoPluginProject + javaRestTestImplementation project(path: ':test:fixtures:hdfs-fixture', configuration:"shadowedHdfs2") + javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') + javaRestTestRuntimeOnly "com.google.guava:guava:16.0.1" + javaRestTestRuntimeOnly "commons-cli:commons-cli:1.2" } restResources { @@ -35,152 +24,7 @@ restResources { } } -testFixtures.useFixture(krbFixtureProject.path, 'hdfs-snapshot') - -configurations { - hdfsFixture -} - -dependencies { - hdfsFixture hdfsFixtureProject - // Set the keytab files in the classpath so that we can access them from test code without the security manager freaking out. - if (isEclipse == false) { - javaRestTestRuntimeOnly files(krbFixtureProject.ext.krb5Keytabs("hdfs-snapshot", "hdfs_hdfs.build.elastic.co.keytab").parent){ - builtBy ":test:fixtures:krb5kdc-fixture:preProcessFixture" - } - } -} - -normalization { - runtimeClasspath { - // ignore generated keytab files for the purposes of build avoidance - ignore '*.keytab' - // ignore fixture ports file which is on the classpath primarily to pacify the security manager - ignore 'ports' - } -} - -String realm = "BUILD.ELASTIC.CO" -String krb5conf = krbFixtureProject.ext.krb5Conf("hdfs") - -// Create HDFS File System Testing Fixtures -for (String fixtureName : ['hdfsFixture', 'secureHdfsFixture']) { - project.tasks.register(fixtureName, org.elasticsearch.gradle.internal.test.AntFixture) { - dependsOn project.configurations.hdfsFixture, krbFixtureProject.tasks.postProcessFixture - executable = "${BuildParams.runtimeJavaHome}/bin/java" - env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}" - maxWaitInSeconds 60 - BuildParams.withFipsEnabledOnly(it) - waitCondition = { fixture, ant -> - // the hdfs.MiniHDFS fixture writes the ports file when - // it's ready, so we can just wait for the file to exist - return fixture.portsFile.exists() - } - final List miniHDFSArgs = [] - - // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options - if (name.equals('secureHdfsFixture')) { - miniHDFSArgs.addAll(["--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"]) - miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}") - } - // configure port dynamically - def portRange = project.getExtensions().getByType(ReservedPortRange) - miniHDFSArgs.add("-Dhdfs.config.port=${portRange.getOrAllocate(name)}") - - // Common options - miniHDFSArgs.add('hdfs.MiniHDFS') - miniHDFSArgs.add(baseDir) - - // If it's a secure fixture, then set the principal name and keytab locations to use for auth. - if (name.equals('secureHdfsFixture')) { - miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") - miniHDFSArgs.add(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")) - } - - args miniHDFSArgs.toArray() - } -} - -// Disable integration test if Fips mode tasks.named("javaRestTest").configure { - description = "Runs rest tests against an elasticsearch cluster with HDFS." - def hdfsPort = project.getExtensions().getByType(ReservedPortRange).getOrAllocate("hdfsFixture") - systemProperty 'test.hdfs.uri', "hdfs://localhost:$hdfsPort" - nonInputProperties.systemProperty 'test.hdfs.path', '/user/elasticsearch/test/searchable_snapshots/simple' - BuildParams.withFipsEnabledOnly(it) -} - -tasks.register("javaRestTestSecure", RestIntegTestTask) { - description = "Runs rest tests against an elasticsearch cluster with Secured HDFS." - def hdfsPort = project.getExtensions().getByType(ReservedPortRange).getOrAllocate("secureHdfsFixture") - nonInputProperties.systemProperty 'test.hdfs.uri', "hdfs://localhost:$hdfsPort" - nonInputProperties.systemProperty 'test.hdfs.path', '/user/elasticsearch/test/searchable_snapshots/secure' - nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}" - nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}" - nonInputProperties.systemProperty( - "test.krb5.keytab.hdfs", - project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab") - ) - testClassesDirs = sourceSets.javaRestTest.output.classesDirs - classpath = sourceSets.javaRestTest.runtimeClasspath - BuildParams.withFipsEnabledOnly(it) -} -tasks.named("check").configure { dependsOn("javaRestTestSecure") } - -testClusters.configureEach { - testDistribution = 'DEFAULT' - plugin(hdfsRepoPluginProject.path) - setting 'xpack.license.self_generated.type', 'trial' - - setting 'xpack.searchable.snapshot.shared_cache.size', '16MB' - setting 'xpack.searchable.snapshot.shared_cache.region_size', '256KB' - - setting 'xpack.security.enabled', 'false' -} - -testClusters.matching { it.name == "javaRestTestSecure" }.configureEach { - systemProperty "java.security.krb5.conf", krb5conf - jvmArgs "--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED" - extraConfigFile( - "repository-hdfs/krb5.keytab", - file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE - ) -} - -// Determine HDFS Fixture compatibility for the current build environment. -boolean fixtureSupported = false -if (OS.current() != OS.WINDOWS) { - // hdfs fixture will not start without hadoop native libraries on windows - String nativePath = System.getenv("HADOOP_HOME") - if (nativePath != null) { - java.nio.file.Path path = Paths.get(nativePath) - if (Files.isDirectory(path) && - Files.exists(path.resolve("bin").resolve("winutils.exe")) && - Files.exists(path.resolve("bin").resolve("hadoop.dll")) && - Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { - fixtureSupported = true - } else { - throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin") - } - } -} else { - fixtureSupported = true -} - -boolean legalPath = rootProject.rootDir.toString().contains(" ") == false -if (legalPath == false) { - fixtureSupported = false -} - -if (fixtureSupported) { - tasks.named("javaRestTest").configure {dependsOn "hdfsFixture" } - tasks.named("javaRestTestSecure").configure {dependsOn "secureHdfsFixture" } -} else { - tasks.named("javaRestTest").configure {enabled = false } - tasks.named("javaRestTestSecure").configure { enabled = false } - if (legalPath) { - logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") - } else { - logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'") - } + usesDefaultDistribution() + jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/HdfsSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/HdfsSearchableSnapshotsIT.java index 515be1adccc9b..b8ace1d32d0f7 100644 --- a/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/HdfsSearchableSnapshotsIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/HdfsSearchableSnapshotsIT.java @@ -7,13 +7,39 @@ package org.elasticsearch.xpack.searchablesnapshots.hdfs; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; import org.elasticsearch.xpack.searchablesnapshots.AbstractSearchableSnapshotsRestTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; -import static org.hamcrest.Matchers.blankOrNullString; -import static org.hamcrest.Matchers.not; - +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class }) public class HdfsSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestTestCase { + public static HdfsFixture hdfsFixture = new HdfsFixture(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.searchable.snapshot.shared_cache.size", "16MB") + .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(hdfsFixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Override protected String writeRepositoryType() { return "hdfs"; @@ -21,19 +47,9 @@ protected String writeRepositoryType() { @Override protected Settings writeRepositorySettings() { - final String uri = System.getProperty("test.hdfs.uri"); - assertThat(uri, not(blankOrNullString())); - - final String path = System.getProperty("test.hdfs.path"); - assertThat(path, not(blankOrNullString())); - - // Optional based on type of test - final String principal = System.getProperty("test.krb5.principal.es"); - + final String uri = "hdfs://localhost:" + hdfsFixture.getPort(); + final String path = "/user/elasticsearch/test/searchable_snapshots/simple"; Settings.Builder repositorySettings = Settings.builder().put("client", "searchable_snapshots").put("uri", uri).put("path", path); - if (principal != null) { - repositorySettings.put("security.principal", principal); - } return repositorySettings.build(); } } diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/SecureHdfsSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/SecureHdfsSearchableSnapshotsIT.java new file mode 100644 index 0000000000000..cf30fae9861ff --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/SecureHdfsSearchableSnapshotsIT.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.hdfs; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.krb5kdc.Krb5kDcContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.elasticsearch.xpack.searchablesnapshots.AbstractSearchableSnapshotsRestTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class, TestContainersThreadFilter.class }) +public class SecureHdfsSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestTestCase { + + public static Krb5kDcContainer krb5Fixture = new Krb5kDcContainer(); + + public static HdfsFixture hdfsFixture = new HdfsFixture().withKerberos(() -> krb5Fixture.getPrincipal(), () -> krb5Fixture.getKeytab()); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.searchable.snapshot.shared_cache.size", "16MB") + .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .systemProperty("java.security.krb5.conf", () -> krb5Fixture.getConfPath().toString()) + .configFile("repository-hdfs/krb5.keytab", Resource.fromFile(() -> krb5Fixture.getEsKeytab())) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(krb5Fixture).around(hdfsFixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected String writeRepositoryType() { + return "hdfs"; + } + + @Override + protected Settings writeRepositorySettings() { + final String uri = "hdfs://localhost:" + hdfsFixture.getPort(); + final String path = "/user/elasticsearch/test/searchable_snapshots/secure"; + Settings.Builder repositorySettings = Settings.builder().put("client", "searchable_snapshots").put("uri", uri).put("path", path); + + final String principal = "elasticsearch@BUILD.ELASTIC.CO"; + repositorySettings.put("security.principal", principal); + return repositorySettings.build(); + } + +} diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java index fee5129f8c9b8..ccf9d66a5bc21 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java @@ -47,7 +47,7 @@ public class RemoteClusterSecurityBwcRestIT extends AbstractRemoteClusterSecurit static { fulfillingCluster = ElasticsearchCluster.local() .version(OLD_CLUSTER_VERSION) - .distribution(DistributionType.INTEG_TEST) + .distribution(DistributionType.DEFAULT) .name("fulfilling-cluster") .apply(commonClusterConfig) .setting("xpack.ml.enabled", "false") diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index 850dfe5dffa99..08bca3ffdaeea 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -1824,9 +1824,12 @@ private void expectMetadata(final String apiKeyId, final Map exp assertOK(response); try (XContentParser parser = responseAsParser(response)) { final var apiKeyResponse = GetApiKeyResponse.fromXContent(parser); - assertThat(apiKeyResponse.getApiKeyInfos().length, equalTo(1)); + assertThat(apiKeyResponse.getApiKeyInfoList().size(), equalTo(1)); // ApiKey metadata is set to empty Map if null - assertThat(apiKeyResponse.getApiKeyInfos()[0].getMetadata(), equalTo(expectedMetadata == null ? Map.of() : expectedMetadata)); + assertThat( + apiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getMetadata(), + equalTo(expectedMetadata == null ? Map.of() : expectedMetadata) + ); } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/GetApiKeysRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/GetApiKeysRestIT.java index e9dc00acf3211..a321a5a758bf3 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/GetApiKeysRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/GetApiKeysRestIT.java @@ -27,18 +27,17 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; import javax.annotation.Nullable; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -103,7 +102,7 @@ public void testGetApiKeysWithActiveOnlyFlag() throws Exception { // We get an empty result when no API keys active getSecurityClient().invalidateApiKeys(apiKeyId1); - assertThat(getApiKeysWithRequestParams(Map.of("active_only", "true")).getApiKeyInfos(), emptyArray()); + assertThat(getApiKeysWithRequestParams(Map.of("active_only", "true")).getApiKeyInfoList(), emptyIterable()); { // Using together with id parameter, returns 404 for inactive key @@ -166,12 +165,12 @@ public void testGetApiKeysWithActiveOnlyFlagAndMultipleUsers() throws Exception manageApiKeyUserApiKeyId ); assertThat( - getApiKeysWithRequestParams(Map.of("active_only", "true", "username", MANAGE_OWN_API_KEY_USER)).getApiKeyInfos(), - emptyArray() + getApiKeysWithRequestParams(Map.of("active_only", "true", "username", MANAGE_OWN_API_KEY_USER)).getApiKeyInfoList(), + emptyIterable() ); assertThat( - getApiKeysWithRequestParams(MANAGE_OWN_API_KEY_USER, Map.of("active_only", "true", "owner", "true")).getApiKeyInfos(), - emptyArray() + getApiKeysWithRequestParams(MANAGE_OWN_API_KEY_USER, Map.of("active_only", "true", "owner", "true")).getApiKeyInfoList(), + emptyIterable() ); // No more active API keys @@ -180,15 +179,15 @@ public void testGetApiKeysWithActiveOnlyFlagAndMultipleUsers() throws Exception assertThat( getApiKeysWithRequestParams( Map.of("active_only", "true", "username", randomFrom(MANAGE_SECURITY_USER, MANAGE_OWN_API_KEY_USER)) - ).getApiKeyInfos(), - emptyArray() + ).getApiKeyInfoList(), + emptyIterable() ); assertThat( getApiKeysWithRequestParams( randomFrom(MANAGE_SECURITY_USER, MANAGE_OWN_API_KEY_USER), Map.of("active_only", "true", "owner", "true") - ).getApiKeyInfos(), - emptyArray() + ).getApiKeyInfoList(), + emptyIterable() ); // With flag set to false, we get both inactive keys assertResponseContainsApiKeyIds( @@ -205,8 +204,8 @@ public void testInvalidateApiKey() throws Exception { setUserForRequest(request, MANAGE_SECURITY_USER); GetApiKeyResponse getApiKeyResponse = GetApiKeyResponse.fromXContent(getParser(client().performRequest(request))); - assertThat(getApiKeyResponse.getApiKeyInfos().length, equalTo(1)); - ApiKey apiKey = getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + ApiKey apiKey = getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(apiKey.isInvalidated(), equalTo(false)); assertThat(apiKey.getInvalidation(), nullValue()); assertThat(apiKey.getId(), equalTo(apiKeyId0)); @@ -226,8 +225,8 @@ public void testInvalidateApiKey() throws Exception { setUserForRequest(request, MANAGE_SECURITY_USER); getApiKeyResponse = GetApiKeyResponse.fromXContent(getParser(client().performRequest(request))); - assertThat(getApiKeyResponse.getApiKeyInfos().length, equalTo(1)); - apiKey = getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + apiKey = getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(apiKey.isInvalidated(), equalTo(true)); assertThat(apiKey.getInvalidation(), notNullValue()); assertThat(apiKey.getId(), equalTo(apiKeyId0)); @@ -245,7 +244,10 @@ private GetApiKeyResponse getApiKeysWithRequestParams(String userOnRequest, Map< } private static void assertResponseContainsApiKeyIds(GetApiKeyResponse response, String... ids) { - assertThat(Arrays.stream(response.getApiKeyInfos()).map(ApiKey::getId).collect(Collectors.toList()), containsInAnyOrder(ids)); + assertThat( + response.getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).map(ApiKey::getId).toList(), + containsInAnyOrder(ids) + ); } private static XContentParser getParser(Response response) throws IOException { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index 83be62beab4ec..591b20bd82f47 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.search.ClosePointInTimeRequest; @@ -48,6 +49,7 @@ import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; +import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.test.InternalSettingsPlugin; @@ -68,6 +70,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; @@ -77,6 +80,8 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; @@ -137,6 +142,9 @@ protected String configUsers() { + "\n" + "user9:" + usersPasswHashed + + "\n" + + "user_different_fields:" + + usersPasswHashed + "\n"; } @@ -150,7 +158,8 @@ protected String configUsersRoles() { role5:user4,user7 role6:user5,user7 role7:user6 - role8:user9"""; + role8:user9 + role_different_fields:user_different_fields"""; } @Override @@ -213,6 +222,16 @@ protected String configRoles() { privileges: [ ALL ] field_security: grant: [ 'field*', 'query' ] + role_different_fields: + indices: + - names: [ 'partial1*' ] + privileges: [ 'read' ] + field_security: + grant: [ value, partial ] + - names: [ 'partial2*' ] + privileges: [ 'read' ] + field_security: + grant: [ value ] """; } @@ -2336,4 +2355,49 @@ public void testLookupRuntimeFields() throws Exception { ); } + public void testSearchDifferentFieldsVisible() { + String firstName = "partial1" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + String secondName = "partial2" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + indexPartial(firstName, secondName); + SearchResponse response = client().filterWithHeader( + Map.of(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_different_fields", USERS_PASSWD)) + ).prepareSearch("partial*").addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)).get(); + try { + assertMap(response.getHits().getAt(0).getSourceAsMap(), matchesMap().entry("value", 1).entry("partial", 2)); + assertMap(response.getHits().getAt(1).getSourceAsMap(), matchesMap().entry("value", 2)); + } finally { + response.decRef(); + } + } + + /** + * The fields {@code partial} is only visible in one of the two backing indices and field caps should show it. + */ + public void testFieldCapsDifferentFieldsVisible() { + String firstName = "partial1_" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + String secondName = "partial2_" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + indexPartial(firstName, secondName); + FieldCapabilitiesResponse response = client().filterWithHeader( + Map.of(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_different_fields", USERS_PASSWD)) + ).prepareFieldCaps("partial*").setFields("value", "partial").get(); + try { + assertThat(response.get().keySet(), equalTo(Set.of("value", "partial"))); + assertThat(response.getField("value").keySet(), equalTo(Set.of("long"))); + assertThat(response.getField("partial").keySet(), equalTo(Set.of("long"))); + } finally { + response.decRef(); + } + } + + private void indexPartial(String firstName, String secondName) { + BulkResponse bulkResponse = client().prepareBulk() + .add(client().prepareIndex(firstName).setSource("value", 1, "partial", 2)) + .add(client().prepareIndex(secondName).setSource("value", 2, "partial", 3)) + .setRefreshPolicy(IMMEDIATE) + .get(); + for (var i : bulkResponse.getItems()) { + assertThat(i.getFailure(), nullValue()); + assertThat(i.status(), equalTo(RestStatus.CREATED)); + } + } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 5972d2b2c070f..f00b0b41c2fa7 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -137,7 +137,6 @@ import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -149,6 +148,7 @@ import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -399,7 +399,7 @@ public void testInvalidateApiKeysForApiKeyName() throws InterruptedException, Ex public void testInvalidateApiKeyWillClearApiKeyCache() throws IOException, ExecutionException, InterruptedException { final List services = Arrays.stream(internalCluster().getNodeNames()) .map(n -> internalCluster().getInstance(ApiKeyService.class, n)) - .collect(Collectors.toList()); + .toList(); // Create two API keys and authenticate with them Tuple apiKey1 = createApiKeyAndAuthenticateWithIt(); @@ -471,7 +471,7 @@ public void testDynamicDeletionInterval() throws Exception { refreshSecurityIndex(); // Get API keys to make sure remover didn't remove any yet - assertThat(getAllApiKeyInfo(client, false).length, equalTo(3)); + assertThat(getAllApiKeyInfo(client, false).size(), equalTo(3)); // Invalidate another key listener = new PlainActionFuture<>(); @@ -481,7 +481,7 @@ public void testDynamicDeletionInterval() throws Exception { refreshSecurityIndex(); // Get API keys to make sure remover didn't remove any yet (shouldn't be removed because of the long DELETE_INTERVAL) - assertThat(getAllApiKeyInfo(client, false).length, equalTo(3)); + assertThat(getAllApiKeyInfo(client, false).size(), equalTo(3)); // Update DELETE_INTERVAL to every 0 ms builder = Settings.builder(); @@ -499,7 +499,7 @@ public void testDynamicDeletionInterval() throws Exception { // Make sure all keys except the last invalidated one are deleted // There is a (tiny) risk that the remover runs after the invalidation and therefore deletes the key that was just // invalidated, so 0 or 1 keys can be returned from the get api - assertThat(getAllApiKeyInfo(client, false).length, in(Set.of(0, 1))); + assertThat(getAllApiKeyInfo(client, false).size(), in(Set.of(0, 1))); } finally { final Settings.Builder builder = Settings.builder(); builder.putNull(ApiKeyService.DELETE_INTERVAL.getKey()); @@ -516,7 +516,7 @@ private void verifyInvalidateResponse( assertThat(invalidateResponse.getInvalidatedApiKeys().size(), equalTo(noOfApiKeys)); assertThat( invalidateResponse.getInvalidatedApiKeys(), - containsInAnyOrder(responses.stream().map(r -> r.getId()).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)) + containsInAnyOrder(responses.stream().map(CreateApiKeyResponse::getId).toArray(String[]::new)) ); assertThat(invalidateResponse.getPreviouslyInvalidatedApiKeys().size(), equalTo(0)); assertThat(invalidateResponse.getErrors().size(), equalTo(0)); @@ -588,7 +588,11 @@ private void doTestInvalidKeysImmediatelyDeletedByRemover(String namePrefix) thr // The first API key with 1ms expiration should already be deleted Set expectedKeyIds = Sets.newHashSet(nonExpiringKey.getId(), createdApiKeys.get(0).getId(), createdApiKeys.get(1).getId()); boolean apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover = false; - for (ApiKey apiKey : getApiKeyResponseListener.get().getApiKeyInfos()) { + for (ApiKey apiKey : getApiKeyResponseListener.get() + .getApiKeyInfoList() + .stream() + .map(GetApiKeyResponse.Item::apiKeyInfo) + .toList()) { assertThat(apiKey.getId(), is(in(expectedKeyIds))); if (apiKey.getId().equals(nonExpiringKey.getId())) { assertThat(apiKey.isInvalidated(), is(false)); @@ -603,7 +607,7 @@ private void doTestInvalidKeysImmediatelyDeletedByRemover(String namePrefix) thr } } assertThat( - getApiKeyResponseListener.get().getApiKeyInfos().length, + getApiKeyResponseListener.get().getApiKeyInfoList().size(), is((apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover) ? 3 : 2) ); @@ -633,7 +637,11 @@ private void doTestInvalidKeysImmediatelyDeletedByRemover(String namePrefix) thr ); expectedKeyIds = Sets.newHashSet(nonExpiringKey.getId(), createdApiKeys.get(1).getId()); apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover = false; - for (ApiKey apiKey : getApiKeyResponseListener.get().getApiKeyInfos()) { + for (ApiKey apiKey : getApiKeyResponseListener.get() + .getApiKeyInfoList() + .stream() + .map(GetApiKeyResponse.Item::apiKeyInfo) + .toList()) { assertThat(apiKey.getId(), is(in(expectedKeyIds))); if (apiKey.getId().equals(nonExpiringKey.getId())) { assertThat(apiKey.isInvalidated(), is(false)); @@ -645,7 +653,7 @@ private void doTestInvalidKeysImmediatelyDeletedByRemover(String namePrefix) thr } } assertThat( - getApiKeyResponseListener.get().getApiKeyInfos().length, + getApiKeyResponseListener.get().getApiKeyInfoList().size(), is((apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover) ? 2 : 1) ); } @@ -684,7 +692,7 @@ private void doTestDeletionBehaviorWhenKeysBecomeInvalidBeforeAndAfterRetentionP GetApiKeyRequest.builder().apiKeyName(namePrefix + "*").build(), getApiKeyResponseListener ); - assertThat(getApiKeyResponseListener.get().getApiKeyInfos().length, is(noOfKeys)); + assertThat(getApiKeyResponseListener.get().getApiKeyInfoList().size(), is(noOfKeys)); // Expire the 1st key such that it cannot be deleted by the remover // hack doc to modify the expiration time @@ -783,7 +791,11 @@ private void doTestDeletionBehaviorWhenKeysBecomeInvalidBeforeAndAfterRetentionP createdApiKeys.get(7).getId(), createdApiKeys.get(8).getId() ); - for (ApiKey apiKey : getApiKeyResponseListener.get().getApiKeyInfos()) { + for (ApiKey apiKey : getApiKeyResponseListener.get() + .getApiKeyInfoList() + .stream() + .map(GetApiKeyResponse.Item::apiKeyInfo) + .toList()) { assertThat(apiKey.getId(), is(in(expectedKeyIds))); if (apiKey.getId().equals(createdApiKeys.get(0).getId())) { // has been expired, not invalidated @@ -805,7 +817,7 @@ private void doTestDeletionBehaviorWhenKeysBecomeInvalidBeforeAndAfterRetentionP fail("unexpected API key " + apiKey); } } - assertThat(getApiKeyResponseListener.get().getApiKeyInfos().length, is(4)); + assertThat(getApiKeyResponseListener.get().getApiKeyInfoList().size(), is(4)); } private void refreshSecurityIndex() throws Exception { @@ -842,7 +854,7 @@ public void testActiveApiKeysWithNoExpirationNeverGetDeletedByRemover() throws E tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), Collections.singleton(responses.get(0).getId()), Collections.singletonList(responses.get(1).getId()) ); @@ -888,7 +900,7 @@ public void testGetApiKeysForRealm() throws InterruptedException, ExecutionExcep tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), expectedValidKeyIds, invalidatedApiKeyIds ); @@ -913,7 +925,7 @@ public void testGetApiKeysForUser() throws Exception { tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), responses.stream().map(o -> o.getId()).collect(Collectors.toSet()), null ); @@ -937,7 +949,7 @@ public void testGetApiKeysForRealmAndUser() throws InterruptedException, Executi tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), Collections.singleton(responses.get(0).getId()), null ); @@ -961,7 +973,7 @@ public void testGetApiKeysForApiKeyId() throws InterruptedException, ExecutionEx tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), Collections.singleton(responses.get(0).getId()), null ); @@ -1003,7 +1015,7 @@ public void testGetApiKeysForApiKeyName() throws InterruptedException, Execution metadatas, List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - listener.get().getApiKeyInfos(), + listener.get().getApiKeyInfoList(), Collections.singleton(responses.get(0).getId()), null ); @@ -1020,7 +1032,7 @@ public void testGetApiKeysForApiKeyName() throws InterruptedException, Execution tuple1.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - listener2.get().getApiKeyInfos(), + listener2.get().getApiKeyInfoList(), createApiKeyResponses1.stream().map(CreateApiKeyResponse::getId).collect(Collectors.toSet()), null ); @@ -1043,7 +1055,7 @@ public void testGetApiKeysForApiKeyName() throws InterruptedException, Execution metadatas, List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - listener3.get().getApiKeyInfos(), + listener3.get().getApiKeyInfoList(), responses.stream().map(CreateApiKeyResponse::getId).collect(Collectors.toSet()), null ); @@ -1060,7 +1072,7 @@ public void testGetApiKeysForApiKeyName() throws InterruptedException, Execution null, List.of(), List.of(), - listener4.get().getApiKeyInfos(), + listener4.get().getApiKeyInfoList(), Collections.emptySet(), null ); @@ -1077,7 +1089,7 @@ public void testGetApiKeysForApiKeyName() throws InterruptedException, Execution tuple2.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - listener5.get().getApiKeyInfos(), + listener5.get().getApiKeyInfoList(), createApiKeyResponses2.stream().map(CreateApiKeyResponse::getId).collect(Collectors.toSet()), null ); @@ -1121,7 +1133,6 @@ public void testGetApiKeysOwnedByCurrentAuthenticatedUser() throws InterruptedEx GetApiKeyRequest.builder().ownedByAuthenticatedUser().withLimitedBy(withLimitedBy).build(), listener ); - GetApiKeyResponse response = listener.get(); verifyApiKeyInfos( userWithManageApiKeyRole, noOfApiKeysForUserWithManageApiKeyRole, @@ -1129,7 +1140,7 @@ public void testGetApiKeysOwnedByCurrentAuthenticatedUser() throws InterruptedEx tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), expectedLimitedByRoleDescriptors, - response.getApiKeyInfos(), + listener.get().getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), userWithManageApiKeyRoleApiKeys.stream().map(o -> o.getId()).collect(Collectors.toSet()), null ); @@ -1155,7 +1166,6 @@ public void testGetApiKeysOwnedByRunAsUserWhenOwnerIsTrue() throws ExecutionExce GetApiKeyRequest.builder().ownedByAuthenticatedUser().withLimitedBy(withLimitedBy).build(), listener ); - GetApiKeyResponse response = listener.get(); verifyApiKeyInfos( "user_with_manage_own_api_key_role", noOfApiKeysForUserWithManageApiKeyRole, @@ -1165,7 +1175,7 @@ public void testGetApiKeysOwnedByRunAsUserWhenOwnerIsTrue() throws ExecutionExce withLimitedBy ? List.of(new RoleDescriptor("manage_own_api_key_role", new String[] { "manage_own_api_key" }, null, null)) : null, - response.getApiKeyInfos(), + listener.get().getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), userWithManageOwnApiKeyRoleApiKeys.stream().map(o -> o.getId()).collect(Collectors.toSet()), null ); @@ -1191,7 +1201,6 @@ public void testGetApiKeysOwnedByRunAsUserWhenRunAsUserInfoIsGiven() throws Exec GetApiKeyRequest.builder().realmName("file").userName("user_with_manage_own_api_key_role").withLimitedBy(withLimitedBy).build(), listener ); - GetApiKeyResponse response = listener.get(); verifyApiKeyInfos( "user_with_manage_own_api_key_role", noOfApiKeysForUserWithManageApiKeyRole, @@ -1201,7 +1210,7 @@ public void testGetApiKeysOwnedByRunAsUserWhenRunAsUserInfoIsGiven() throws Exec withLimitedBy ? List.of(new RoleDescriptor("manage_own_api_key_role", new String[] { "manage_own_api_key" }, null, null)) : null, - response.getApiKeyInfos(), + listener.get().getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), userWithManageOwnApiKeyRoleApiKeys.stream().map(o -> o.getId()).collect(Collectors.toSet()), null ); @@ -1451,7 +1460,7 @@ public void testApiKeyAuthorizationApiKeyMustBeAbleToRetrieveItsOwnInformationBu tuple.v2(), List.of(new RoleDescriptor(DEFAULT_API_KEY_ROLE_DESCRIPTOR.getName(), Strings.EMPTY_ARRAY, null, null)), null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), Collections.singleton(responses.get(0).getId()), null ); @@ -1503,12 +1512,13 @@ public void testApiKeyViewLimitedBy() { // Can view itself without limited-by verifyApiKeyInfos( + ES_TEST_ROOT_USER, 1, responses1, tuple1.v2(), List.of(new RoleDescriptor(DEFAULT_API_KEY_ROLE_DESCRIPTOR.getName(), new String[] { "manage_own_api_key" }, null, null)), null, - new ApiKey[] { getApiKeyInfo(client1, apiKeyId1, false, randomBoolean()) }, + List.of(getApiKeyInfo(client1, apiKeyId1, false, randomBoolean())), Collections.singleton(apiKeyId1), null ); @@ -1538,24 +1548,26 @@ public void testApiKeyViewLimitedBy() { // View its own limited-by verifyApiKeyInfos( + ES_TEST_ROOT_USER, 1, responses3, tuple3.v2(), List.of(new RoleDescriptor(DEFAULT_API_KEY_ROLE_DESCRIPTOR.getName(), new String[] { "manage_api_key" }, null, null)), List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR), - new ApiKey[] { getApiKeyInfo(client3, apiKeyId3, true, randomBoolean()) }, + List.of(getApiKeyInfo(client3, apiKeyId3, true, randomBoolean())), Collections.singleton(apiKeyId3), null ); // View other key's limited-by verifyApiKeyInfos( + ES_TEST_ROOT_USER, 1, responses1, tuple1.v2(), List.of(new RoleDescriptor(DEFAULT_API_KEY_ROLE_DESCRIPTOR.getName(), new String[] { "manage_own_api_key" }, null, null)), List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR), - new ApiKey[] { getApiKeyInfo(client3, apiKeyId1, true, randomBoolean()) }, + List.of(getApiKeyInfo(client3, apiKeyId1, true, randomBoolean())), Collections.singleton(apiKeyId1), null ); @@ -1721,8 +1733,8 @@ public void testDerivedKeys() throws ExecutionException, InterruptedException { GetApiKeyRequest.builder().apiKeyId(key100Response.getId()).withLimitedBy().build(), future ); - assertThat(future.actionGet().getApiKeyInfos().length, equalTo(1)); - final RoleDescriptorsIntersection limitedBy = future.actionGet().getApiKeyInfos()[0].getLimitedBy(); + assertThat(future.actionGet().getApiKeyInfoList().size(), equalTo(1)); + RoleDescriptorsIntersection limitedBy = future.actionGet().getApiKeyInfoList().get(0).apiKeyInfo().getLimitedBy(); assertThat(limitedBy.roleDescriptorsList().size(), equalTo(1)); assertThat(limitedBy.roleDescriptorsList().iterator().next(), emptyIterable()); @@ -1761,8 +1773,8 @@ public void testApiKeyRunAsAnotherUserCanCreateApiKey() { GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyId(response2.getId()).ownedByAuthenticatedUser(true).build() ).actionGet(); - assertThat(getApiKeyResponse.getApiKeyInfos(), arrayWithSize(1)); - final ApiKey apiKeyInfo = getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + ApiKey apiKeyInfo = getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(apiKeyInfo.getId(), equalTo(response2.getId())); assertThat(apiKeyInfo.getUsername(), equalTo(ES_TEST_ROOT_USER)); assertThat(apiKeyInfo.getRealm(), equalTo("file")); @@ -2885,8 +2897,8 @@ private ApiKey getApiKeyInfo(Client client, String apiKeyId, boolean withLimited future ); final GetApiKeyResponse getApiKeyResponse = future.actionGet(); - assertThat(getApiKeyResponse.getApiKeyInfos(), arrayWithSize(1)); - return getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + return getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); } else { final PlainActionFuture future = new PlainActionFuture<>(); client.execute( @@ -2895,17 +2907,17 @@ private ApiKey getApiKeyInfo(Client client, String apiKeyId, boolean withLimited future ); final QueryApiKeyResponse queryApiKeyResponse = future.actionGet(); - assertThat(queryApiKeyResponse.getItems(), arrayWithSize(1)); - return queryApiKeyResponse.getItems()[0].getApiKey(); + assertThat(queryApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + return queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); } } - private ApiKey[] getAllApiKeyInfo(Client client, boolean withLimitedBy) { + private List getAllApiKeyInfo(Client client, boolean withLimitedBy) { if (randomBoolean()) { final PlainActionFuture future = new PlainActionFuture<>(); client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().withLimitedBy(withLimitedBy).build(), future); final GetApiKeyResponse getApiKeyResponse = future.actionGet(); - return getApiKeyResponse.getApiKeyInfos(); + return getApiKeyResponse.getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(); } else { final PlainActionFuture future = new PlainActionFuture<>(); client.execute( @@ -2914,7 +2926,7 @@ private ApiKey[] getAllApiKeyInfo(Client client, boolean withLimitedBy) { future ); final QueryApiKeyResponse queryApiKeyResponse = future.actionGet(); - return Arrays.stream(queryApiKeyResponse.getItems()).map(QueryApiKeyResponse.Item::getApiKey).toArray(ApiKey[]::new); + return queryApiKeyResponse.getApiKeyInfoList().stream().map(QueryApiKeyResponse.Item::apiKeyInfo).toList(); } } @@ -2967,7 +2979,8 @@ private void assertApiKeyNotCreated(Client client, String keyName) throws Execut 0, client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyName(keyName).ownedByAuthenticatedUser(false).build()) .get() - .getApiKeyInfos().length + .getApiKeyInfoList() + .size() ); } @@ -2977,7 +2990,7 @@ private void verifyApiKeyInfos( List> metadatas, List expectedRoleDescriptors, List expectedLimitedByRoleDescriptors, - ApiKey[] apiKeyInfos, + List apiKeyInfos, Set validApiKeyIds, List invalidatedApiKeyIds ) { @@ -2988,7 +3001,7 @@ private void verifyApiKeyInfos( metadatas, expectedRoleDescriptors, expectedLimitedByRoleDescriptors, - apiKeyInfos, + apiKeyInfos.stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), validApiKeyIds, invalidatedApiKeyIds ); @@ -3001,7 +3014,7 @@ private void verifyApiKeyInfos( List> metadatas, List expectedRoleDescriptors, List expectedLimitedByRoleDescriptors, - ApiKey[] apiKeyInfos, + List apiKeyInfos, Set validApiKeyIds, List invalidatedApiKeyIds ) { @@ -3025,16 +3038,16 @@ private void verifyApiKeyInfos( List> metadatas, List expectedRoleDescriptors, Function> expectedLimitedByRoleDescriptorsLookup, - ApiKey[] apiKeyInfos, + List apiKeyInfos, Set validApiKeyIds, List invalidatedApiKeyIds ) { - assertThat(apiKeyInfos.length, equalTo(expectedNumberOfApiKeys)); + assertThat(apiKeyInfos.size(), equalTo(expectedNumberOfApiKeys)); List expectedIds = responses.stream() .filter(o -> validApiKeyIds.contains(o.getId())) .map(o -> o.getId()) .collect(Collectors.toList()); - List actualIds = Arrays.stream(apiKeyInfos) + List actualIds = apiKeyInfos.stream() .filter(o -> o.isInvalidated() == false) .map(o -> o.getId()) .collect(Collectors.toList()); @@ -3043,19 +3056,19 @@ private void verifyApiKeyInfos( .filter(o -> validApiKeyIds.contains(o.getId())) .map(o -> o.getName()) .collect(Collectors.toList()); - List actualNames = Arrays.stream(apiKeyInfos) + List actualNames = apiKeyInfos.stream() .filter(o -> o.isInvalidated() == false) .map(o -> o.getName()) .collect(Collectors.toList()); assertThat(actualNames, containsInAnyOrder(expectedNames.toArray(Strings.EMPTY_ARRAY))); Set expectedUsernames = (validApiKeyIds.isEmpty()) ? Collections.emptySet() : Set.of(user); - Set actualUsernames = Arrays.stream(apiKeyInfos) + Set actualUsernames = apiKeyInfos.stream() .filter(o -> o.isInvalidated() == false) .map(o -> o.getUsername()) .collect(Collectors.toSet()); assertThat(actualUsernames, containsInAnyOrder(expectedUsernames.toArray(Strings.EMPTY_ARRAY))); if (invalidatedApiKeyIds != null) { - List actualInvalidatedApiKeyIds = Arrays.stream(apiKeyInfos) + List actualInvalidatedApiKeyIds = apiKeyInfos.stream() .filter(o -> o.isInvalidated()) .map(o -> o.getId()) .collect(Collectors.toList()); @@ -3073,7 +3086,7 @@ private void verifyApiKeyInfos( assertThat(apiKey.getMetadata(), equalTo(metadata == null ? Map.of() : metadata)); } } - Arrays.stream(apiKeyInfos).forEach(apiKeyInfo -> { + apiKeyInfos.stream().forEach(apiKeyInfo -> { assertThat(apiKeyInfo.getRoleDescriptors(), containsInAnyOrder(expectedRoleDescriptors.toArray(RoleDescriptor[]::new))); final List expectedLimitedByRoleDescriptors = expectedLimitedByRoleDescriptorsLookup.apply( apiKeyInfo.getUsername() diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java index 707e7b2846a9b..aceafaa0bfc66 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java @@ -92,7 +92,6 @@ import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.anEmptyMap; -import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyArray; @@ -101,6 +100,7 @@ import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -148,10 +148,10 @@ public void testQueryWithExpiredKeys() throws InterruptedException { .filter(QueryBuilders.rangeQuery("expiration").from(Instant.now().toEpochMilli())) ); final QueryApiKeyResponse queryApiKeyResponse = client().execute(QueryApiKeyAction.INSTANCE, queryApiKeyRequest).actionGet(); - assertThat(queryApiKeyResponse.getItems().length, equalTo(1)); - assertThat(queryApiKeyResponse.getItems()[0].getApiKey().getId(), equalTo(id2)); - assertThat(queryApiKeyResponse.getItems()[0].getApiKey().getName(), equalTo("long-lived")); - assertThat(queryApiKeyResponse.getItems()[0].getSortValues(), emptyArray()); + assertThat(queryApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getId(), equalTo(id2)); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getName(), equalTo("long-lived")); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(0).sortValues(), emptyArray()); } public void testCreatingApiKeyWithNoAccess() { @@ -286,8 +286,8 @@ public void testGetApiKeyWorksForTheApiKeyItself() { GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyId(apiKeyId).ownedByAuthenticatedUser(randomBoolean()).build() ).actionGet(); - assertThat(getApiKeyResponse.getApiKeyInfos().length, equalTo(1)); - assertThat(getApiKeyResponse.getApiKeyInfos()[0].getId(), equalTo(apiKeyId)); + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getId(), is(apiKeyId)); // Cannot get any other keys final ElasticsearchSecurityException e = expectThrows( @@ -613,8 +613,8 @@ public void testCreateCrossClusterApiKey() throws IOException { GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyId(apiKeyId).withLimitedBy(randomBoolean()).build() ).actionGet(); - assertThat(getApiKeyResponse.getApiKeyInfos(), arrayWithSize(1)); - final ApiKey getApiKeyInfo = getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + ApiKey getApiKeyInfo = getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(getApiKeyInfo.getType(), is(ApiKey.Type.CROSS_CLUSTER)); assertThat(getApiKeyInfo.getRoleDescriptors(), contains(expectedRoleDescriptor)); assertThat(getApiKeyInfo.getLimitedBy(), nullValue()); @@ -634,8 +634,8 @@ public void testCreateCrossClusterApiKey() throws IOException { randomBoolean() ); final QueryApiKeyResponse queryApiKeyResponse = client().execute(QueryApiKeyAction.INSTANCE, queryApiKeyRequest).actionGet(); - assertThat(queryApiKeyResponse.getItems(), arrayWithSize(1)); - final ApiKey queryApiKeyInfo = queryApiKeyResponse.getItems()[0].getApiKey(); + assertThat(queryApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + ApiKey queryApiKeyInfo = queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(queryApiKeyInfo.getType(), is(ApiKey.Type.CROSS_CLUSTER)); assertThat(queryApiKeyInfo.getRoleDescriptors(), contains(expectedRoleDescriptor)); assertThat(queryApiKeyInfo.getLimitedBy(), nullValue()); @@ -669,8 +669,8 @@ public void testUpdateCrossClusterApiKey() throws IOException { GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyId(apiKeyId).withLimitedBy(randomBoolean()).build() ).actionGet(); - assertThat(getApiKeyResponse.getApiKeyInfos(), arrayWithSize(1)); - final ApiKey getApiKeyInfo = getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + ApiKey getApiKeyInfo = getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(getApiKeyInfo.getType(), is(ApiKey.Type.CROSS_CLUSTER)); assertThat(getApiKeyInfo.getRoleDescriptors(), contains(originalRoleDescriptor)); assertThat(getApiKeyInfo.getLimitedBy(), nullValue()); @@ -740,8 +740,8 @@ public void testUpdateCrossClusterApiKey() throws IOException { randomBoolean() ); final QueryApiKeyResponse queryApiKeyResponse = client().execute(QueryApiKeyAction.INSTANCE, queryApiKeyRequest).actionGet(); - assertThat(queryApiKeyResponse.getItems(), arrayWithSize(1)); - final ApiKey queryApiKeyInfo = queryApiKeyResponse.getItems()[0].getApiKey(); + assertThat(queryApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + final ApiKey queryApiKeyInfo = queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(queryApiKeyInfo.getType(), is(ApiKey.Type.CROSS_CLUSTER)); assertThat( queryApiKeyInfo.getRoleDescriptors(), diff --git a/x-pack/plugin/security/src/main/java/module-info.java b/x-pack/plugin/security/src/main/java/module-info.java index 9806650f99094..cd1eb8a650149 100644 --- a/x-pack/plugin/security/src/main/java/module-info.java +++ b/x-pack/plugin/security/src/main/java/module-info.java @@ -65,6 +65,7 @@ exports org.elasticsearch.xpack.security.action.user to org.elasticsearch.server; exports org.elasticsearch.xpack.security.action.settings to org.elasticsearch.server; exports org.elasticsearch.xpack.security.operator to org.elasticsearch.internal.operator, org.elasticsearch.internal.security; + exports org.elasticsearch.xpack.security.authz to org.elasticsearch.internal.security; exports org.elasticsearch.xpack.security.authc to org.elasticsearch.xcontent; exports org.elasticsearch.xpack.security.slowlog to org.elasticsearch.server; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 3d27e9ee06ddb..5736d3e550f01 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -74,6 +74,7 @@ import org.elasticsearch.plugins.ClusterCoordinationPlugin; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.NetworkPlugin; @@ -290,6 +291,7 @@ import org.elasticsearch.xpack.security.authc.service.ServiceAccountService; import org.elasticsearch.xpack.security.authc.support.SecondaryAuthenticator; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.security.authz.AuthorizationDenialMessages; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.DlsFlsRequestCacheDifferentiator; import org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener; @@ -581,6 +583,7 @@ public class Security extends Plugin private final SetOnce realms = new SetOnce<>(); private final SetOnce client = new SetOnce<>(); private final SetOnce> reloadableComponents = new SetOnce<>(); + private final SetOnce authorizationDenialMessages = new SetOnce<>(); public Security(Settings settings) { this(settings, Collections.emptyList()); @@ -1007,6 +1010,9 @@ Collection createComponents( } requestInterceptors = Collections.unmodifiableSet(requestInterceptors); + if (authorizationDenialMessages.get() == null) { + authorizationDenialMessages.set(new AuthorizationDenialMessages.Default()); + } final AuthorizationService authzService = new AuthorizationService( settings, allRolesStore, @@ -1021,7 +1027,8 @@ Collection createComponents( getLicenseState(), expressionResolver, operatorPrivilegesService.get(), - restrictedIndices + restrictedIndices, + authorizationDenialMessages.get() ); components.add(nativeRolesStore); // used by roles actions @@ -1941,29 +1948,29 @@ public UnaryOperator> getIndexTemplateMetadat } @Override - public Function> getFieldFilter() { + public Function getFieldFilter() { if (enabled) { return index -> { XPackLicenseState licenseState = getLicenseState(); IndicesAccessControl indicesAccessControl = threadContext.get() .getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); if (indicesAccessControl == null) { - return MapperPlugin.NOOP_FIELD_PREDICATE; + return FieldPredicate.ACCEPT_ALL; } assert indicesAccessControl.isGranted(); IndicesAccessControl.IndexAccessControl indexPermissions = indicesAccessControl.getIndexPermissions(index); if (indexPermissions == null) { - return MapperPlugin.NOOP_FIELD_PREDICATE; + return FieldPredicate.ACCEPT_ALL; } FieldPermissions fieldPermissions = indexPermissions.getFieldPermissions(); if (fieldPermissions.hasFieldLevelSecurity() == false) { - return MapperPlugin.NOOP_FIELD_PREDICATE; + return FieldPredicate.ACCEPT_ALL; } if (FIELD_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState) == false) { // check license last, once we know FLS is actually used - return MapperPlugin.NOOP_FIELD_PREDICATE; + return FieldPredicate.ACCEPT_ALL; } - return fieldPermissions::grantsAccessTo; + return fieldPermissions.fieldPredicate(); }; } return MapperPlugin.super.getFieldFilter(); @@ -2098,6 +2105,7 @@ public void loadExtensions(ExtensionLoader loader) { loadSingletonExtensionAndSetOnce(loader, bulkUpdateApiKeyRequestTranslator, BulkUpdateApiKeyRequestTranslator.class); loadSingletonExtensionAndSetOnce(loader, createApiKeyRequestBuilderFactory, CreateApiKeyRequestBuilderFactory.class); loadSingletonExtensionAndSetOnce(loader, hasPrivilegesRequestBuilderFactory, HasPrivilegesRequestBuilderFactory.class); + loadSingletonExtensionAndSetOnce(loader, authorizationDenialMessages, AuthorizationDenialMessages.class); } private void loadSingletonExtensionAndSetOnce(ExtensionLoader loader, SetOnce setOnce, Class clazz) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index 78b5f3afc17cf..5f47cb9223f70 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Subject; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; -import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.Realms; @@ -63,12 +62,7 @@ public TransportGetUsersAction( this.settings = settings; this.usersStore = usersStore; this.reservedRealm = reservedRealm; - this.nativeRealmRef = realms.getRealmRefs() - .values() - .stream() - .filter(realmRef -> NativeRealmSettings.TYPE.equals(realmRef.getType())) - .findFirst() - .orElseThrow(() -> new IllegalStateException("native realm realm ref not found")); + this.nativeRealmRef = realms.getNativeRealmRef(); this.profileService = profileService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java index c5a8795779f08..ca5b9fc54db47 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Subject; -import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.profile.ProfileService; @@ -57,12 +56,7 @@ public TransportQueryUserAction( super(ActionTypes.QUERY_USER_ACTION.name(), actionFilters, transportService.getTaskManager()); this.usersStore = usersStore; this.profileService = profileService; - this.nativeRealmRef = realms.getRealmRefs() - .values() - .stream() - .filter(realmRef -> NativeRealmSettings.TYPE.equals(realmRef.getType())) - .findFirst() - .orElseThrow(() -> new IllegalStateException("native realm realm ref not found")); + this.nativeRealmRef = realms.getNativeRealmRef(); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index fea0c812e7e42..799d38377af5a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -1927,7 +1927,7 @@ public void getApiKeys( Arrays.toString(apiKeyIds), activeOnly ); - listener.onResponse(GetApiKeyResponse.emptyResponse()); + listener.onResponse(GetApiKeyResponse.EMPTY); } else { listener.onResponse(new GetApiKeyResponse(apiKeyInfos)); } @@ -1940,7 +1940,7 @@ public void queryApiKeys(SearchRequest searchRequest, boolean withLimitedBy, Act final SecurityIndexManager frozenSecurityIndex = securityIndex.defensiveCopy(); if (frozenSecurityIndex.indexExists() == false) { logger.debug("security index does not exist"); - listener.onResponse(QueryApiKeyResponse.emptyResponse()); + listener.onResponse(QueryApiKeyResponse.EMPTY); } else if (frozenSecurityIndex.isAvailable(SEARCH_SHARDS) == false) { listener.onFailure(frozenSecurityIndex.getUnavailableReason(SEARCH_SHARDS)); } else { @@ -1955,7 +1955,7 @@ public void queryApiKeys(SearchRequest searchRequest, boolean withLimitedBy, Act final long total = searchResponse.getHits().getTotalHits().value; if (total == 0) { logger.debug("No api keys found for query [{}]", searchRequest.source().query()); - listener.onResponse(QueryApiKeyResponse.emptyResponse()); + listener.onResponse(QueryApiKeyResponse.EMPTY); return; } final List apiKeyItem = Arrays.stream(searchResponse.getHits().getHits()) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java index 2ca70bee55d4e..2c0436a000968 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java @@ -49,6 +49,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; @@ -110,7 +111,13 @@ public Realms( // initRealms will add default file and native realm config if they are not explicitly configured final List initialRealms = initRealms(realmConfigs); realmRefs = calculateRealmRefs(realmConfigs, realmToDomainConfig); - initialRealms.forEach(realm -> realm.initRealmRef(realmRefs)); + for (Realm realm : initialRealms) { + Authentication.RealmRef realmRef = Objects.requireNonNull( + realmRefs.get(new RealmConfig.RealmIdentifier(realm.type(), realm.name())), + "realmRef can not be null" + ); + realm.setRealmRef(realmRef); + } this.allConfiguredRealms = initialRealms; this.allConfiguredRealms.forEach(r -> r.initialize(this.allConfiguredRealms, licenseState)); @@ -155,6 +162,12 @@ private Map calculateRealm new Authentication.RealmRef(realmIdentifier.getName(), realmIdentifier.getType(), nodeName, realmDomain) ); } + assert realmRefs.values().stream().filter(realmRef -> ReservedRealm.TYPE.equals(realmRef.getType())).toList().size() == 1 + : "there must be exactly one reserved realm configured"; + assert realmRefs.values().stream().filter(realmRef -> NativeRealmSettings.TYPE.equals(realmRef.getType())).toList().size() == 1 + : "there must be exactly one native realm configured"; + assert realmRefs.values().stream().filter(realmRef -> FileRealmSettings.TYPE.equals(realmRef.getType())).toList().size() == 1 + : "there must be exactly one file realm configured"; return Map.copyOf(realmRefs); } @@ -368,8 +381,52 @@ public Map domainUsageStats() { } } - public Map getRealmRefs() { - return realmRefs; + /** + * Retrieves the {@link Authentication.RealmRef}, which contains the {@link DomainConfig}, if configured, + * for the passed in {@link RealmConfig.RealmIdentifier}. + * If the realm is not currently configured, {@code null} is returned. + */ + public @Nullable Authentication.RealmRef getRealmRef(RealmConfig.RealmIdentifier realmIdentifier) { + // "file", "native", and "reserved" realms may be renamed, but they refer to the same corpus of users + if (FileRealmSettings.TYPE.equals(realmIdentifier.getType())) { + return getFileRealmRef(); + } else if (NativeRealmSettings.TYPE.equals(realmIdentifier.getType())) { + return getNativeRealmRef(); + } else if (ReservedRealm.TYPE.equals(realmIdentifier.getType())) { + return getReservedRealmRef(); + } else { + // but for other realms, it is assumed that a different realm name or realm type signifies a different corpus of users + return realmRefs.get(realmIdentifier); + } + } + + public Authentication.RealmRef getNativeRealmRef() { + return realmRefs.values() + .stream() + .filter(realmRef -> NativeRealmSettings.TYPE.equals(realmRef.getType())) + .findFirst() + .orElseThrow(() -> new IllegalStateException("native realm realm ref not found")); + } + + public Authentication.RealmRef getFileRealmRef() { + return realmRefs.values() + .stream() + .filter(realmRef -> FileRealmSettings.TYPE.equals(realmRef.getType())) + .findFirst() + .orElseThrow(() -> new IllegalStateException("file realm realm ref not found")); + } + + public Authentication.RealmRef getReservedRealmRef() { + return realmRefs.values() + .stream() + .filter(realmRef -> ReservedRealm.TYPE.equals(realmRef.getType())) + .findFirst() + .orElseThrow(() -> new IllegalStateException("reserved realm realm ref not found")); + } + + // should only be useful for testing + int getRealmRefsCount() { + return realmRefs.size(); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessages.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessages.java index 9d66318a1f0fb..ae3a09af4751d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessages.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessages.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.Subject; -import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; @@ -29,158 +29,202 @@ import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; import static org.elasticsearch.xpack.security.authz.AuthorizationService.isIndexAction; -class AuthorizationDenialMessages { - - private AuthorizationDenialMessages() {} - - static String runAsDenied(Authentication authentication, @Nullable AuthorizationInfo authorizationInfo, String action) { - assert authentication.isRunAs() : "constructing run as denied message but authentication for action was not run as"; - - String userText = authenticatedUserDescription(authentication); - String actionIsUnauthorizedMessage = actionIsUnauthorizedMessage(action, userText); - - String unauthorizedToRunAsMessage = "because " - + userText - + " is unauthorized to run as [" - + authentication.getEffectiveSubject().getUser().principal() - + "]"; - - return actionIsUnauthorizedMessage - + rolesDescription(authentication.getAuthenticatingSubject(), authorizationInfo.getAuthenticatedUserAuthorizationInfo()) - + ", " - + unauthorizedToRunAsMessage; - } - - static String actionDenied( +public interface AuthorizationDenialMessages { + String actionDenied( Authentication authentication, - @Nullable AuthorizationInfo authorizationInfo, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, String action, TransportRequest request, @Nullable String context - ) { - String userText = successfulAuthenticationDescription(authentication, authorizationInfo); - String remoteClusterText = authentication.isCrossClusterAccess() ? remoteClusterText(null) : ""; - String message = actionIsUnauthorizedMessage(action, remoteClusterText, userText); - if (context != null) { - message = message + " " + context; + ); + + String runAsDenied(Authentication authentication, @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, String action); + + String remoteActionDenied( + Authentication authentication, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, + String action, + String clusterAlias + ); + + class Default implements AuthorizationDenialMessages { + public Default() {} + + @Override + public String runAsDenied( + Authentication authentication, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, + String action + ) { + assert authentication.isRunAs() : "constructing run as denied message but authentication for action was not run as"; + + String userText = authenticatedUserDescription(authentication); + String actionIsUnauthorizedMessage = actionIsUnauthorizedMessage(action, userText); + + String unauthorizedToRunAsMessage = "because " + + userText + + " is unauthorized to run as [" + + authentication.getEffectiveSubject().getUser().principal() + + "]"; + + return actionIsUnauthorizedMessage + + rolesDescription(authentication.getAuthenticatingSubject(), authorizationInfo.getAuthenticatedUserAuthorizationInfo()) + + ", " + + unauthorizedToRunAsMessage; } - if (ClusterPrivilegeResolver.isClusterAction(action)) { - final Collection privileges = ClusterPrivilegeResolver.findPrivilegesThatGrant(action, request, authentication); - if (privileges != null && privileges.size() > 0) { - message = message - + ", this action is granted by the cluster privileges [" - + collectionToCommaDelimitedString(privileges) - + "]"; + @Override + public String actionDenied( + Authentication authentication, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, + String action, + TransportRequest request, + @Nullable String context + ) { + String userText = successfulAuthenticationDescription(authentication, authorizationInfo); + String remoteClusterText = authentication.isCrossClusterAccess() ? remoteClusterText(null) : ""; + String message = actionIsUnauthorizedMessage(action, remoteClusterText, userText); + if (context != null) { + message = message + " " + context; } - } else if (isIndexAction(action)) { - final Collection privileges = IndexPrivilege.findPrivilegesThatGrant(action); - if (privileges != null && privileges.size() > 0) { - message = message - + ", this action is granted by the index privileges [" - + collectionToCommaDelimitedString(privileges) - + "]"; + + if (ClusterPrivilegeResolver.isClusterAction(action)) { + final Collection privileges = findClusterPrivilegesThatGrant(authentication, action, request); + if (privileges != null && privileges.size() > 0) { + message = message + + ", this action is granted by the cluster privileges [" + + collectionToCommaDelimitedString(privileges) + + "]"; + } + } else if (isIndexAction(action)) { + final Collection privileges = findIndexPrivilegesThatGrant(action); + if (privileges != null && privileges.size() > 0) { + message = message + + ", this action is granted by the index privileges [" + + collectionToCommaDelimitedString(privileges) + + "]"; + } } + + return message; } - return message; - } + @Override + public String remoteActionDenied( + Authentication authentication, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, + String action, + String clusterAlias + ) { + assert isIndexAction(action); + String userText = successfulAuthenticationDescription(authentication, authorizationInfo); + String remoteClusterText = remoteClusterText(clusterAlias); + return actionIsUnauthorizedMessage(action, remoteClusterText, userText) + + " because no remote indices privileges apply for the target cluster"; + } - static String remoteActionDenied( - Authentication authentication, - @Nullable AuthorizationInfo authorizationInfo, - String action, - String clusterAlias - ) { - assert isIndexAction(action); - String userText = successfulAuthenticationDescription(authentication, authorizationInfo); - String remoteClusterText = remoteClusterText(clusterAlias); - return actionIsUnauthorizedMessage(action, remoteClusterText, userText) - + " because no remote indices privileges apply for the target cluster"; - } + protected Collection findClusterPrivilegesThatGrant( + Authentication authentication, + String action, + TransportRequest request + ) { + return ClusterPrivilegeResolver.findPrivilegesThatGrant(action, request, authentication); + } - private static String remoteClusterText(@Nullable String clusterAlias) { - return Strings.format("towards remote cluster%s ", clusterAlias == null ? "" : " [" + clusterAlias + "]"); - } + protected Collection findIndexPrivilegesThatGrant(String action) { + return IndexPrivilege.findPrivilegesThatGrant(action); + } + + private String remoteClusterText(@Nullable String clusterAlias) { + return Strings.format("towards remote cluster%s ", clusterAlias == null ? "" : " [" + clusterAlias + "]"); + } - private static String authenticatedUserDescription(Authentication authentication) { - String userText = (authentication.isServiceAccount() ? "service account" : "user") - + " [" - + authentication.getAuthenticatingSubject().getUser().principal() - + "]"; - if (authentication.isAuthenticatedAsApiKey() || authentication.isCrossClusterAccess()) { - final String apiKeyId = (String) authentication.getAuthenticatingSubject() - .getMetadata() - .get(AuthenticationField.API_KEY_ID_KEY); - assert apiKeyId != null : "api key id must be present in the metadata"; - userText = "API key id [" + apiKeyId + "] of " + userText; - if (authentication.isCrossClusterAccess()) { - final Authentication crossClusterAccessAuthentication = (Authentication) authentication.getAuthenticatingSubject() + private String authenticatedUserDescription(Authentication authentication) { + String userText = (authentication.isServiceAccount() ? "service account" : "user") + + " [" + + authentication.getAuthenticatingSubject().getUser().principal() + + "]"; + if (authentication.isAuthenticatedAsApiKey() || authentication.isCrossClusterAccess()) { + final String apiKeyId = (String) authentication.getAuthenticatingSubject() .getMetadata() - .get(AuthenticationField.CROSS_CLUSTER_ACCESS_AUTHENTICATION_KEY); - assert crossClusterAccessAuthentication != null : "cross cluster access authentication must be present in the metadata"; - userText = successfulAuthenticationDescription(crossClusterAccessAuthentication, null) + " authenticated by " + userText; + .get(AuthenticationField.API_KEY_ID_KEY); + assert apiKeyId != null : "api key id must be present in the metadata"; + userText = "API key id [" + apiKeyId + "] of " + userText; + if (authentication.isCrossClusterAccess()) { + final Authentication crossClusterAccessAuthentication = (Authentication) authentication.getAuthenticatingSubject() + .getMetadata() + .get(AuthenticationField.CROSS_CLUSTER_ACCESS_AUTHENTICATION_KEY); + assert crossClusterAccessAuthentication != null : "cross cluster access authentication must be present in the metadata"; + userText = successfulAuthenticationDescription(crossClusterAccessAuthentication, null) + + " authenticated by " + + userText; + } } + return userText; } - return userText; - } - static String rolesDescription(Subject subject, @Nullable AuthorizationInfo authorizationInfo) { - // We cannot print the roles if it's an API key or a service account (both do not have roles, but privileges) - if (subject.getType() != Subject.Type.USER) { - return ""; - } + // package-private for tests + String rolesDescription(Subject subject, @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo) { + // We cannot print the roles if it's an API key or a service account (both do not have roles, but privileges) + if (subject.getType() != Subject.Type.USER) { + return ""; + } - final StringBuilder sb = new StringBuilder(); - final List effectiveRoleNames = extractEffectiveRoleNames(authorizationInfo); - if (effectiveRoleNames == null) { - sb.append(" with assigned roles [").append(Strings.arrayToCommaDelimitedString(subject.getUser().roles())).append("]"); - } else { - sb.append(" with effective roles [").append(Strings.collectionToCommaDelimitedString(effectiveRoleNames)).append("]"); - - final Set assignedRoleNames = Set.of(subject.getUser().roles()); - final SortedSet unfoundedRoleNames = Sets.sortedDifference(assignedRoleNames, Set.copyOf(effectiveRoleNames)); - if (false == unfoundedRoleNames.isEmpty()) { - sb.append(" (assigned roles [") - .append(Strings.collectionToCommaDelimitedString(unfoundedRoleNames)) - .append("] were not found)"); + final StringBuilder sb = new StringBuilder(); + final List effectiveRoleNames = extractEffectiveRoleNames(authorizationInfo); + if (effectiveRoleNames == null) { + sb.append(" with assigned roles [").append(Strings.arrayToCommaDelimitedString(subject.getUser().roles())).append("]"); + } else { + sb.append(" with effective roles [").append(Strings.collectionToCommaDelimitedString(effectiveRoleNames)).append("]"); + + final Set assignedRoleNames = Set.of(subject.getUser().roles()); + final SortedSet unfoundedRoleNames = Sets.sortedDifference(assignedRoleNames, Set.copyOf(effectiveRoleNames)); + if (false == unfoundedRoleNames.isEmpty()) { + sb.append(" (assigned roles [") + .append(Strings.collectionToCommaDelimitedString(unfoundedRoleNames)) + .append("] were not found)"); + } } + return sb.toString(); } - return sb.toString(); - } - static String successfulAuthenticationDescription(Authentication authentication, @Nullable AuthorizationInfo authorizationInfo) { - String userText = authenticatedUserDescription(authentication); + // package-private for tests + String successfulAuthenticationDescription( + Authentication authentication, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo + ) { + String userText = authenticatedUserDescription(authentication); - if (authentication.isRunAs()) { - userText = userText + " run as [" + authentication.getEffectiveSubject().getUser().principal() + "]"; + if (authentication.isRunAs()) { + userText = userText + " run as [" + authentication.getEffectiveSubject().getUser().principal() + "]"; + } + + userText += rolesDescription(authentication.getEffectiveSubject(), authorizationInfo); + return userText; } - userText += rolesDescription(authentication.getEffectiveSubject(), authorizationInfo); - return userText; - } + private List extractEffectiveRoleNames(@Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo) { + if (authorizationInfo == null) { + return null; + } - private static List extractEffectiveRoleNames(@Nullable AuthorizationInfo authorizationInfo) { - if (authorizationInfo == null) { - return null; + final Map info = authorizationInfo.asMap(); + final Object roleNames = info.get(PRINCIPAL_ROLES_FIELD_NAME); + // AuthorizationInfo from custom authorization engine may not have this field or have it as a different data type + if (false == roleNames instanceof String[]) { + assert false == authorizationInfo instanceof RBACEngine.RBACAuthorizationInfo + : "unexpected user.roles field [" + roleNames + "] for RBACAuthorizationInfo"; + return null; + } + return Arrays.stream((String[]) roleNames).sorted().toList(); } - final Map info = authorizationInfo.asMap(); - final Object roleNames = info.get(PRINCIPAL_ROLES_FIELD_NAME); - // AuthorizationInfo from custom authorization engine may not have this field or have it as a different data type - if (false == roleNames instanceof String[]) { - assert false == authorizationInfo instanceof RBACEngine.RBACAuthorizationInfo - : "unexpected user.roles field [" + roleNames + "] for RBACAuthorizationInfo"; - return null; + private String actionIsUnauthorizedMessage(String action, String userText) { + return actionIsUnauthorizedMessage(action, "", userText); } - return Arrays.stream((String[]) roleNames).sorted().toList(); - } - - private static String actionIsUnauthorizedMessage(String action, String userText) { - return actionIsUnauthorizedMessage(action, "", userText); - } - private static String actionIsUnauthorizedMessage(String action, String remoteClusterText, String userText) { - return "action [" + action + "] " + remoteClusterText + "is unauthorized for " + userText; + private String actionIsUnauthorizedMessage(String action, String remoteClusterText, String userText) { + return "action [" + action + "] " + remoteClusterText + "is unauthorized for " + userText; + } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index c886f2fde55ab..0c28ea1e37354 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -139,6 +139,7 @@ public class AuthorizationService { private final boolean isAnonymousEnabled; private final boolean anonymousAuthzExceptionEnabled; + private final AuthorizationDenialMessages authorizationDenialMessages; public AuthorizationService( Settings settings, @@ -154,7 +155,8 @@ public AuthorizationService( XPackLicenseState licenseState, IndexNameExpressionResolver resolver, OperatorPrivilegesService operatorPrivilegesService, - RestrictedIndices restrictedIndices + RestrictedIndices restrictedIndices, + AuthorizationDenialMessages authorizationDenialMessages ) { this.clusterService = clusterService; this.auditTrailService = auditTrailService; @@ -178,6 +180,7 @@ public AuthorizationService( this.licenseState = licenseState; this.operatorPrivilegesService = operatorPrivilegesService; this.indicesAccessControlWrapper = new DlsFlsFeatureTrackingIndicesAccessControlWrapper(settings, licenseState); + this.authorizationDenialMessages = authorizationDenialMessages; } public void checkPrivileges( @@ -922,7 +925,7 @@ private ElasticsearchSecurityException runAsDenied( return denialException( authentication, action, - () -> AuthorizationDenialMessages.runAsDenied(authentication, authorizationInfo, action), + () -> authorizationDenialMessages.runAsDenied(authentication, authorizationInfo, action), null ); } @@ -932,7 +935,7 @@ public ElasticsearchSecurityException remoteActionDenied(Authentication authenti return denialException( authentication, action, - () -> AuthorizationDenialMessages.remoteActionDenied(authentication, authorizationInfo, action, clusterAlias), + () -> authorizationDenialMessages.remoteActionDenied(authentication, authorizationInfo, action, clusterAlias), null ); } @@ -967,7 +970,7 @@ private ElasticsearchSecurityException actionDenied( return denialException( authentication, action, - () -> AuthorizationDenialMessages.actionDenied(authentication, authorizationInfo, action, request, context), + () -> authorizationDenialMessages.actionDenied(authentication, authorizationInfo, action, request, context), cause ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java index 6cc17e418314a..39813a2af5dfd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.license.PutLicenseAction; -import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.transport.TransportRequest; @@ -79,8 +78,8 @@ public OperatorPrivilegesViolation check(String action, TransportRequest request } @Override - public OperatorPrivilegesViolation checkRest(RestHandler restHandler, RestRequest restRequest, RestChannel restChannel) { - return null; // no restrictions + public void checkRest(RestHandler restHandler, RestRequest restRequest) { + // no restrictions } private OperatorPrivilegesViolation checkClusterUpdateSettings(ClusterUpdateSettingsRequest request) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorOnlyRegistry.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorOnlyRegistry.java index c72c72e144b97..f0889f1c48c75 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorOnlyRegistry.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorOnlyRegistry.java @@ -7,7 +7,8 @@ package org.elasticsearch.xpack.security.operator; -import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.transport.TransportRequest; @@ -22,18 +23,22 @@ public interface OperatorOnlyRegistry { OperatorPrivilegesViolation check(String action, TransportRequest request); /** - * Checks to see if a given {@link RestHandler} is subject to operator-only restrictions for the REST API. Any REST API may be - * fully or partially restricted. A fully restricted REST API mandates that the implementation call restChannel.sendResponse(...) and - * return a {@link OperatorPrivilegesViolation}. A partially restricted REST API mandates that the {@link RestRequest} is marked as - * restricted so that the downstream handler can behave appropriately. For example, to restrict the REST response the implementation + * Checks to see if a given {@link RestHandler} is subject to operator-only restrictions for the REST API. + * + * Any REST API may be fully or partially restricted. + * A fully restricted REST API mandates that the implementation of this method throw an + * {@link org.elasticsearch.ElasticsearchStatusException} with an appropriate status code and error message. + * + * A partially restricted REST API mandates that the {@link RestRequest} is marked as restricted so that the downstream handler can + * behave appropriately. + * For example, to restrict the REST response the implementation * should call {@link RestRequest#markPathRestricted(String)} so that the downstream handler can properly restrict the response - * before returning to the client. Note - a partial restriction should return null. + * before returning to the client. Note - a partial restriction should not throw an exception. + * * @param restHandler The {@link RestHandler} to check for any restrictions * @param restRequest The {@link RestRequest} to check for any restrictions and mark any partially restricted REST API's - * @param restChannel The {@link RestChannel} to enforce fully restricted REST API's - * @return {@link OperatorPrivilegesViolation} iff the request was fully restricted and the response has been sent back to the client. - * else returns null. + * @throws ElasticsearchStatusException if the request should be denied in its entirety (fully restricted) */ - OperatorPrivilegesViolation checkRest(RestHandler restHandler, RestRequest restRequest, RestChannel restChannel); + void checkRest(RestHandler restHandler, RestRequest restRequest) throws ElasticsearchException; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivileges.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivileges.java index eef7cbdfc7f5e..79c529eb3d7b1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivileges.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivileges.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.common.settings.Setting; @@ -156,28 +157,40 @@ public boolean checkRest(RestHandler restHandler, RestRequest restRequest, RestC if (false == isOperator(threadContext)) { // Only check whether request is operator-only when user is NOT an operator if (logger.isTraceEnabled()) { - Authentication authentication = threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY); - final User user = authentication.getEffectiveSubject().getUser(); + final User user = getUser(threadContext); logger.trace("Checking for any operator-only REST violations for user [{}] and uri [{}]", user, restRequest.uri()); } - OperatorPrivilegesViolation violation = operatorOnlyRegistry.checkRest(restHandler, restRequest, restChannel); - if (violation != null) { + + try { + operatorOnlyRegistry.checkRest(restHandler, restRequest); + } catch (ElasticsearchException e) { if (logger.isDebugEnabled()) { - Authentication authentication = threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY); - final User user = authentication.getEffectiveSubject().getUser(); logger.debug( "Found the following operator-only violation [{}] for user [{}] and uri [{}]", - violation.message(), - user, + e.getMessage(), + getUser(threadContext), restRequest.uri() ); } - return false; + throw e; + } catch (Exception e) { + logger.info( + "Unexpected exception [{}] while processing operator privileges for user [{}] and uri [{}]", + e.getMessage(), + getUser(threadContext), + restRequest.uri() + ); + throw e; } } return true; } + private static User getUser(ThreadContext threadContext) { + Authentication authentication = threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY); + return authentication.getEffectiveSubject().getUser(); + } + public void maybeInterceptRequest(ThreadContext threadContext, TransportRequest request) { if (request instanceof RestoreSnapshotRequest) { logger.debug("Intercepting [{}] for operator privileges", request); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyAction.java index cd751740dd0fb..b192c836434c2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyAction.java @@ -66,7 +66,7 @@ public RestResponse buildResponse(GetApiKeyResponse getApiKeyResponse, XContentB getApiKeyResponse.toXContent(builder, channel.request()); // return HTTP status 404 if no API key found for API key id - if (Strings.hasText(apiKeyId) && getApiKeyResponse.getApiKeyInfos().length == 0) { + if (Strings.hasText(apiKeyId) && getApiKeyResponse.getApiKeyInfoList().isEmpty()) { return new RestResponse(RestStatus.NOT_FOUND, builder); } return new RestResponse(RestStatus.OK, builder); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 66b03e8dedd32..f575bb6adc50e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -52,9 +52,9 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.license.internal.XPackLicenseStatus; import org.elasticsearch.plugins.ExtensiblePlugin; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.internal.RestExtension; -import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.script.ScriptService; @@ -121,7 +121,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Function; -import java.util.function.Predicate; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; @@ -177,7 +176,7 @@ public OperatorPrivilegesViolation check(String action, TransportRequest request } @Override - public OperatorPrivilegesViolation checkRest(RestHandler restHandler, RestRequest restRequest, RestChannel restChannel) { + public void checkRest(RestHandler restHandler, RestRequest restRequest) { throw new RuntimeException("boom"); } } @@ -470,7 +469,7 @@ public void testJoinValidatorForFIPSOnForbiddenLicense() throws Exception { public void testGetFieldFilterSecurityEnabled() throws Exception { createComponents(Settings.EMPTY); - Function> fieldFilter = security.getFieldFilter(); + Function fieldFilter = security.getFieldFilter(); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); Map permissionsMap = new HashMap<>(); @@ -492,9 +491,9 @@ public void testGetFieldFilterSecurityEnabled() throws Exception { assertThat(fieldFilter.apply("index_granted"), trueWith("field_granted")); assertThat(fieldFilter.apply("index_granted"), falseWith(randomAlphaOfLengthBetween(3, 10))); - assertEquals(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("index_granted_all_permissions")); + assertEquals(FieldPredicate.ACCEPT_ALL, fieldFilter.apply("index_granted_all_permissions")); assertThat(fieldFilter.apply("index_granted_all_permissions"), trueWith(randomAlphaOfLengthBetween(3, 10))); - assertEquals(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("index_other")); + assertEquals(FieldPredicate.ACCEPT_ALL, fieldFilter.apply("index_other")); } public void testGetFieldFilterSecurityDisabled() throws Exception { @@ -504,7 +503,7 @@ public void testGetFieldFilterSecurityDisabled() throws Exception { public void testGetFieldFilterSecurityEnabledLicenseNoFLS() throws Exception { createComponents(Settings.EMPTY); - Function> fieldFilter = security.getFieldFilter(); + Function fieldFilter = security.getFieldFilter(); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); licenseState.update( new XPackLicenseStatus( @@ -514,7 +513,7 @@ public void testGetFieldFilterSecurityEnabledLicenseNoFLS() throws Exception { ) ); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); - assertSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply(randomAlphaOfLengthBetween(3, 6))); + assertSame(FieldPredicate.ACCEPT_ALL, fieldFilter.apply(randomAlphaOfLengthBetween(3, 6))); } public void testValidateRealmsWhenSettingsAreInvalid() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index b6a1523b09784..9f3dccd8b2a48 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.xpack.core.security.action.user.GetUsersResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.Subject; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.user.AnonymousUser; @@ -114,12 +113,7 @@ public void testAnonymousUser() { when(securityIndex.isAvailable(SecurityIndexManager.Availability.SEARCH_SHARDS)).thenReturn(true); AnonymousUser anonymousUser = new AnonymousUser(settings); ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, threadPool); - reservedRealm.initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME), - new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node") - ) - ); + reservedRealm.setRealmRef(new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node")); TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -195,12 +189,7 @@ public void testReservedUsersOnly() { new AnonymousUser(settings), threadPool ); - reservedRealm.initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME), - new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node") - ) - ); + reservedRealm.setRealmRef(new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node")); PlainActionFuture> userFuture = new PlainActionFuture<>(); reservedRealm.users(userFuture); final Collection allReservedUsers = userFuture.actionGet(); @@ -284,12 +273,7 @@ public void testGetAllUsers() { new AnonymousUser(settings), threadPool ); - reservedRealm.initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME), - new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node") - ) - ); + reservedRealm.setRealmRef(new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node")); TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -390,12 +374,7 @@ public void testGetUsersWithProfileUidException() { new AnonymousUser(settings), threadPool ); - reservedRealm.initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME), - new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node") - ) - ); + reservedRealm.setRealmRef(new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node")); TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -445,12 +424,7 @@ private void testGetStoreOnlyUsers(List storeUsers) { NativeUsersStore usersStore = mock(NativeUsersStore.class); AnonymousUser anonymousUser = new AnonymousUser(settings); ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, threadPool); - reservedRealm.initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME), - new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node") - ) - ); + reservedRealm.setRealmRef(new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node")); TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -596,16 +570,8 @@ private List randomUsersWithInternalUsernames() { private Realms mockRealms() { final Realms realms = mock(Realms.class); - when(realms.getRealmRefs()).thenReturn( - Map.of( - new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, NativeRealmSettings.DEFAULT_NAME), - new Authentication.RealmRef( - NativeRealmSettings.DEFAULT_NAME, - NativeRealmSettings.TYPE, - randomAlphaOfLengthBetween(3, 8), - null - ) - ) + when(realms.getNativeRealmRef()).thenReturn( + new Authentication.RealmRef(NativeRealmSettings.DEFAULT_NAME, NativeRealmSettings.TYPE, randomAlphaOfLengthBetween(3, 8), null) ); return realms; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java index 1c14da149cbd3..3fb3a816baa8b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.core.security.action.user.QueryUserRequest; import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.Subject; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.user.User; @@ -305,16 +304,8 @@ private ProfileService mockProfileService(boolean throwException, boolean profil private Realms mockRealms() { final Realms realms = mock(Realms.class); - when(realms.getRealmRefs()).thenReturn( - Map.of( - new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, NativeRealmSettings.DEFAULT_NAME), - new Authentication.RealmRef( - NativeRealmSettings.DEFAULT_NAME, - NativeRealmSettings.TYPE, - randomAlphaOfLengthBetween(3, 8), - null - ) - ) + when(realms.getNativeRealmRef()).thenReturn( + new Authentication.RealmRef(NativeRealmSettings.DEFAULT_NAME, NativeRealmSettings.TYPE, randomAlphaOfLengthBetween(3, 8), null) ); return realms; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index df454ddffe96f..342eff842b05e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -181,6 +181,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -326,7 +327,7 @@ public void testGetApiKeys() throws Exception { verify(searchRequestBuilder).setFetchSource(eq(true)); assertThat(searchRequest.get().source().query(), is(boolQuery)); GetApiKeyResponse getApiKeyResponse = getApiKeyResponsePlainActionFuture.get(); - assertThat(getApiKeyResponse.getApiKeyInfos(), emptyArray()); + assertThat(getApiKeyResponse.getApiKeyInfoList(), emptyIterable()); } @SuppressWarnings("unchecked") @@ -407,28 +408,31 @@ public void testApiKeysOwnerRealmIdentifier() throws Exception { getApiKeyResponsePlainActionFuture ); GetApiKeyResponse getApiKeyResponse = getApiKeyResponsePlainActionFuture.get(); - assertThat(getApiKeyResponse.getApiKeyInfos().length, is(2)); - assertThat(getApiKeyResponse.getApiKeyInfos()[0].getRealm(), is(realm1)); - assertThat(getApiKeyResponse.getApiKeyInfos()[0].getRealmType(), is(realm1Type)); - assertThat(getApiKeyResponse.getApiKeyInfos()[0].getRealmIdentifier(), is(new RealmConfig.RealmIdentifier(realm1Type, realm1))); - assertThat(getApiKeyResponse.getApiKeyInfos()[1].getRealm(), is(realm2)); - assertThat(getApiKeyResponse.getApiKeyInfos()[1].getRealmType(), nullValue()); - assertThat(getApiKeyResponse.getApiKeyInfos()[1].getRealmIdentifier(), nullValue()); + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(2)); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealm(), is(realm1)); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealmType(), is(realm1Type)); + assertThat( + getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealmIdentifier(), + is(new RealmConfig.RealmIdentifier(realm1Type, realm1)) + ); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealm(), is(realm2)); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealmType(), nullValue()); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealmIdentifier(), nullValue()); } { PlainActionFuture queryApiKeyResponsePlainActionFuture = new PlainActionFuture<>(); service.queryApiKeys(new SearchRequest(".security"), false, queryApiKeyResponsePlainActionFuture); QueryApiKeyResponse queryApiKeyResponse = queryApiKeyResponsePlainActionFuture.get(); - assertThat(queryApiKeyResponse.getItems().length, is(2)); - assertThat(queryApiKeyResponse.getItems()[0].getApiKey().getRealm(), is(realm1)); - assertThat(queryApiKeyResponse.getItems()[0].getApiKey().getRealmType(), is(realm1Type)); + assertThat(queryApiKeyResponse.getApiKeyInfoList(), iterableWithSize(2)); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealm(), is(realm1)); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealmType(), is(realm1Type)); assertThat( - queryApiKeyResponse.getItems()[0].getApiKey().getRealmIdentifier(), + queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealmIdentifier(), is(new RealmConfig.RealmIdentifier(realm1Type, realm1)) ); - assertThat(queryApiKeyResponse.getItems()[1].getApiKey().getRealm(), is(realm2)); - assertThat(queryApiKeyResponse.getItems()[1].getApiKey().getRealmType(), nullValue()); - assertThat(queryApiKeyResponse.getItems()[1].getApiKey().getRealmIdentifier(), nullValue()); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealm(), is(realm2)); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealmType(), nullValue()); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealmIdentifier(), nullValue()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java index c21543500e29b..28b3a1ead9414 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java @@ -76,7 +76,6 @@ import java.util.stream.IntStream; import static org.elasticsearch.test.TestMatchers.throwableWithMessage; -import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -85,7 +84,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -162,6 +160,260 @@ private void setRealmAvailability(Function licenseStateListeners.forEach(LicenseStateListener::licenseStateChanged); } + public void testReservedRealmCannotBePartOfDomain() { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String domainName = randomAlphaOfLength(7); + String nativeRealmName = randomFrom("n" + randomAlphaOfLength(8), NativeRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".order", 4); + String fileRealmName = randomFrom("f" + randomAlphaOfLength(8), FileRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".order", 5); + builder.put( + "xpack.security.authc.domains." + domainName + ".realms", + randomFrom(nativeRealmName + "," + ReservedRealm.NAME, ReservedRealm.NAME + "," + fileRealmName) + ); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new Realms(settings, env, factories, licenseState, threadContext, reservedRealm) + ); + assertThat(e.getMessage(), containsString("Undefined realms [reserved] cannot be assigned to domains")); + } + + public void testReservedRealmRef() throws Exception { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String nativeRealmName = randomFrom("n" + randomAlphaOfLength(8), NativeRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".order", 4); + String fileRealmName = randomFrom("f" + randomAlphaOfLength(8), FileRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".order", 5); + builder.put("xpack.security.authc.reserved_realm.enabled", randomBoolean()); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); + Authentication.RealmRef reservedRealmRef = randomFrom( + realms.getReservedRealmRef(), + realms.getRealmRef(new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME)), + realms.getRealmRef(new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, randomAlphaOfLength(4))) + ); + assertThat(reservedRealmRef.getName(), is(ReservedRealm.NAME)); + assertThat(reservedRealmRef.getType(), is(ReservedRealm.TYPE)); + assertThat(reservedRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME))); + // no domain + assertThat(reservedRealmRef.getDomain(), nullValue()); + } + + public void testRealmRefForDisabledNativeRealm() throws Exception { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String nativeRealmName = randomFrom(randomAlphaOfLength(8), NativeRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".enabled", false); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); + assertThat(realms.getRealmRefsCount(), is(3)); // reserved, native, file + // there is still a realm ref for the disabled native realm + Authentication.RealmRef nativeRealmRef = randomFrom( + realms.getNativeRealmRef(), + realms.getRealmRef(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName)), + realms.getRealmRef(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, randomAlphaOfLength(4))) + ); + assertThat(nativeRealmRef.getName(), is(nativeRealmName)); + assertThat(nativeRealmRef.getType(), is(NativeRealmSettings.TYPE)); + assertThat(nativeRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName))); + // no domain + assertThat(nativeRealmRef.getDomain(), nullValue()); + } + + public void testRealmRefForDisabledFileRealm() throws Exception { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String fileRealmName = randomFrom(randomAlphaOfLength(8), FileRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".enabled", false); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); + assertThat(realms.getRealmRefsCount(), is(3)); // reserved, native, file + // there is still a realm ref for the disabled native realm + Authentication.RealmRef fileRealmRef = randomFrom( + realms.getFileRealmRef(), + realms.getRealmRef(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName)), + realms.getRealmRef(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, randomAlphaOfLength(4))) + ); + assertThat(fileRealmRef.getName(), is(fileRealmName)); + assertThat(fileRealmRef.getType(), is(FileRealmSettings.TYPE)); + assertThat(fileRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName))); + // no domain + assertThat(fileRealmRef.getDomain(), nullValue()); + } + + public void testRealmRefForDisabledNativeRealmAssignedToDomain() throws Exception { + String domainName = randomAlphaOfLength(7); + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String nativeRealmName = randomFrom(randomAlphaOfLength(8), NativeRealmSettings.TYPE); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".enabled", false); + if (randomBoolean()) { + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".order", 5); + } + builder.put("xpack.security.authc.realms.type_0.other.order", 4); + builder.put( + "xpack.security.authc.domains." + domainName + ".realms", + randomFrom(nativeRealmName + ",other", "other," + nativeRealmName) + ); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); + assertThat(realms.getRealmRefsCount(), is(4)); // reserved, native, file, + other + // the realm ref for the disabled native realm contains the domain + Authentication.RealmRef nativeRealmRef = randomFrom( + realms.getNativeRealmRef(), + realms.getRealmRef(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName)), + realms.getRealmRef(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, randomAlphaOfLength(4))) + ); + assertThat(nativeRealmRef.getName(), is(nativeRealmName)); + assertThat(nativeRealmRef.getType(), is(NativeRealmSettings.TYPE)); + assertThat(nativeRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName))); + assertThat(nativeRealmRef.getDomain().name(), is(domainName)); + assertThat( + nativeRealmRef.getDomain().realms(), + containsInAnyOrder( + new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName), + new RealmConfig.RealmIdentifier("type_0", "other") + ) + ); + // the realm ref for the other realm also contains the domain with the native realm ref + Authentication.RealmRef otherRealmRef = realms.getRealmRef(new RealmConfig.RealmIdentifier("type_0", "other")); + assertThat(otherRealmRef.getName(), is("other")); + assertThat(otherRealmRef.getType(), is("type_0")); + assertThat(otherRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier("type_0", "other"))); + assertThat(otherRealmRef.getDomain().name(), is(domainName)); + assertThat( + otherRealmRef.getDomain().realms(), + containsInAnyOrder( + new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName), + new RealmConfig.RealmIdentifier("type_0", "other") + ) + ); + } + + public void testRealmRefForDisabledFileRealmAssignedToDomain() throws Exception { + String domainName = randomAlphaOfLength(7); + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String fileRealmName = randomFrom(randomAlphaOfLength(8), FileRealmSettings.TYPE); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".enabled", false); + if (randomBoolean()) { + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".order", 5); + } + builder.put("xpack.security.authc.realms.type_0.other.order", 4); + builder.put( + "xpack.security.authc.domains." + domainName + ".realms", + randomFrom(fileRealmName + ",other", "other," + fileRealmName) + ); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); + assertThat(realms.getRealmRefsCount(), is(4)); // reserved, native, file, + other + // the realm ref for the disabled file realm contains the domain + Authentication.RealmRef fileRealmRef = randomFrom( + realms.getFileRealmRef(), + realms.getRealmRef(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName)), + realms.getRealmRef(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, randomAlphaOfLength(4))) + ); + assertThat(fileRealmRef.getName(), is(fileRealmName)); + assertThat(fileRealmRef.getType(), is(FileRealmSettings.TYPE)); + assertThat(fileRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName))); + assertThat(fileRealmRef.getDomain().name(), is(domainName)); + assertThat( + fileRealmRef.getDomain().realms(), + containsInAnyOrder( + new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName), + new RealmConfig.RealmIdentifier("type_0", "other") + ) + ); + // the realm ref for the other realm also contains the domain with the native realm ref + Authentication.RealmRef otherRealmRef = realms.getRealmRef(new RealmConfig.RealmIdentifier("type_0", "other")); + assertThat(otherRealmRef.getName(), is("other")); + assertThat(otherRealmRef.getType(), is("type_0")); + assertThat(otherRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier("type_0", "other"))); + assertThat(otherRealmRef.getDomain().name(), is(domainName)); + assertThat( + otherRealmRef.getDomain().realms(), + containsInAnyOrder( + new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName), + new RealmConfig.RealmIdentifier("type_0", "other") + ) + ); + } + + public void testMultipleNativeRealmsFails() { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + if (randomBoolean()) { + builder.put("xpack.security.authc.realms.native.n1.enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.native.n1.order", 10); + } else { + builder.put("xpack.security.authc.realms.native." + NativeRealmSettings.DEFAULT_NAME + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.native." + NativeRealmSettings.DEFAULT_NAME + ".order", 10); + } + builder.put("xpack.security.authc.realms.native.n2.enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.native.n2.order", 100); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new Realms(settings, env, factories, licenseState, threadContext, reservedRealm) + ); + assertThat( + e.getMessage(), + containsString( + "multiple [native] realms are configured. [native] is an internal realm" + + " and therefore there can only be one such realm configured" + ) + ); + } + + public void testMultipleFileRealmsFails() { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + if (randomBoolean()) { + builder.put("xpack.security.authc.realms.file.f1.enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.file.f1.order", 10); + } else { + builder.put("xpack.security.authc.realms.file." + FileRealmSettings.DEFAULT_NAME + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.file." + FileRealmSettings.DEFAULT_NAME + ".order", 10); + } + builder.put("xpack.security.authc.realms.file.f2.enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.file.f2.order", 100); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new Realms(settings, env, factories, licenseState, threadContext, reservedRealm) + ); + assertThat( + e.getMessage(), + containsString( + "multiple [file] realms are configured. [file] is an internal realm" + + " and therefore there can only be one such realm configured" + ) + ); + } + public void testRealmTypeAvailable() { final Set basicRealmTypes = Sets.newHashSet("file", "native", "reserved"); final Set goldRealmTypes = Sets.newHashSet("ldap", "active_directory", "pki"); @@ -243,35 +495,31 @@ public void testWithSettings() throws Exception { assertThat(realms.getUnlicensedRealms(), sameInstance(realms.getUnlicensedRealms())); // realmRefs contains all implicitly and explicitly configured realm (disabled or not) - final Map realmRefs = realms.getRealmRefs(); realms.forEach( activeRealm -> assertThat( activeRealm.realmRef(), - equalTo(realmRefs.get(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) + is(realms.getRealmRef(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) ) ); // reserved, file, native and custom realms - assertThat(realmRefs, aMapWithSize(3 + randomRealmTypesCount)); + assertThat(realms.getRealmRefsCount(), is(3 + randomRealmTypesCount)); assertThat( - realmRefs, - hasEntry(new RealmConfig.RealmIdentifier("reserved", "reserved"), buildRealmRef("reserved", "reserved", nodeName)) + realms.getRealmRef(new RealmConfig.RealmIdentifier("reserved", "reserved")), + is(buildRealmRef("reserved", "reserved", nodeName)) ); assertThat( - realmRefs, - hasEntry(new RealmConfig.RealmIdentifier("file", "default_file"), buildRealmRef("default_file", "file", nodeName)) + realms.getRealmRef(new RealmConfig.RealmIdentifier("file", "default_file")), + is(buildRealmRef("default_file", "file", nodeName)) ); assertThat( - realmRefs, - hasEntry(new RealmConfig.RealmIdentifier("native", "default_native"), buildRealmRef("default_native", "native", nodeName)) + realms.getRealmRef(new RealmConfig.RealmIdentifier("native", "default_native")), + is(buildRealmRef("default_native", "native", nodeName)) ); IntStream.range(0, randomRealmTypesCount) .forEach( index -> assertThat( - realmRefs, - hasEntry( - new RealmConfig.RealmIdentifier("type_" + index, "realm_" + index), - buildRealmRef("realm_" + index, "type_" + index, nodeName) - ) + realms.getRealmRef(new RealmConfig.RealmIdentifier("type_" + index, "realm_" + index)), + is(buildRealmRef("realm_" + index, "type_" + index, nodeName)) ) ); } @@ -406,45 +654,31 @@ public void testDomainAssignment() throws Exception { }); // realmRefs contains all implicitly and explicitly configured realm (disabled or not) - final Map realmRefs = realms.getRealmRefs(); - // reserved, file, native and custom realms - assertThat(realmRefs, aMapWithSize(3 + randomRealmTypesCount)); + assertThat(realms.getRealmRefsCount(), is(3 + randomRealmTypesCount)); realms.forEach( activeRealm -> assertThat( activeRealm.realmRef(), - equalTo(realmRefs.get(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) + is(realms.getRealmRef(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) ) ); assertThat( - realmRefs, - hasEntry( - new RealmConfig.RealmIdentifier("reserved", "reserved"), - buildRealmRef("reserved", "reserved", nodeName, realmsForDomain) - ) + realms.getRealmRef(new RealmConfig.RealmIdentifier("reserved", "reserved")), + is(buildRealmRef("reserved", "reserved", nodeName, realmsForDomain)) ); assertThat( - realmRefs, - hasEntry( - new RealmConfig.RealmIdentifier("file", "default_file"), - buildRealmRef("default_file", "file", nodeName, realmsForDomain) - ) + realms.getRealmRef(new RealmConfig.RealmIdentifier("file", "default_file")), + is(buildRealmRef("default_file", "file", nodeName, realmsForDomain)) ); assertThat( - realmRefs, - hasEntry( - new RealmConfig.RealmIdentifier("native", "default_native"), - buildRealmRef("default_native", "native", nodeName, realmsForDomain) - ) + realms.getRealmRef(new RealmConfig.RealmIdentifier("native", "default_native")), + is(buildRealmRef("default_native", "native", nodeName, realmsForDomain)) ); IntStream.range(0, randomRealmTypesCount) .forEach( index -> assertThat( - realmRefs, - hasEntry( - new RealmConfig.RealmIdentifier("type_" + index, "realm_" + index), - buildRealmRef("realm_" + index, "type_" + index, nodeName, realmsForDomain) - ) + realms.getRealmRef(new RealmConfig.RealmIdentifier("type_" + index, "realm_" + index)), + is(buildRealmRef("realm_" + index, "type_" + index, nodeName, realmsForDomain)) ) ); } @@ -614,20 +848,25 @@ public void testWithEmptySettings() throws Exception { assertThat(realms.getUnlicensedRealms(), sameInstance(realms.getUnlicensedRealms())); // realmRefs contains all implicitly and explicitly configured realm (disabled or not) - final Map realmRefs = realms.getRealmRefs(); realms.forEach( activeRealm -> assertThat( activeRealm.realmRef(), - equalTo(realmRefs.get(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) + is(realms.getRealmRef(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) ) ); // reserved, file, native - assertThat(realmRefs.size(), equalTo(3)); - assertThat(realmRefs, hasEntry(new RealmConfig.RealmIdentifier("reserved", "reserved"), buildRealmRef("reserved", "reserved", ""))); - assertThat(realmRefs, hasEntry(new RealmConfig.RealmIdentifier("file", "default_file"), buildRealmRef("default_file", "file", ""))); + assertThat(realms.getRealmRefsCount(), is(3)); + assertThat( + realms.getRealmRef(new RealmConfig.RealmIdentifier("reserved", "reserved")), + is(buildRealmRef("reserved", "reserved", "")) + ); + assertThat( + realms.getRealmRef(new RealmConfig.RealmIdentifier("file", "default_file")), + is(buildRealmRef("default_file", "file", "")) + ); assertThat( - realmRefs, - hasEntry(new RealmConfig.RealmIdentifier("native", "default_native"), buildRealmRef("default_native", "native", "")) + realms.getRealmRef(new RealmConfig.RealmIdentifier("native", "default_native")), + is(buildRealmRef("default_native", "native", "")) ); } @@ -682,8 +921,12 @@ public void testRealmsAreDisabledOnLicenseDowngrade() throws Exception { final Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); assertThat(realms.getUnlicensedRealms(), empty()); assertThat(realms.getActiveRealms(), hasSize(9)); // 0..7 configured + reserved - final Map realmRefs = Map.copyOf(realms.getRealmRefs()); - assertThat(realmRefs, aMapWithSize(9)); + Map beforeRealmRefs = realms.getActiveRealms() + .stream() + .map(r -> new RealmConfig.RealmIdentifier(r.type(), r.name())) + .collect(Collectors.toMap(Function.identity(), realms::getRealmRef)); + assertThat(realms.getRealmRefsCount(), is(9)); + assertThat(beforeRealmRefs.size(), is(9)); verify(licenseState).enableUsageTracking(Security.KERBEROS_REALM_FEATURE, "kerberos_realm"); verify(licenseState).enableUsageTracking(Security.LDAP_REALM_FEATURE, "ldap_realm_1"); @@ -720,7 +963,8 @@ public void testRealmsAreDisabledOnLicenseDowngrade() throws Exception { assertThat(unlicensedRealmNames, containsInAnyOrder("kerberos_realm", "custom_realm_1", "custom_realm_2")); assertThat(realms.getActiveRealms(), hasSize(6)); // 9 - 3 // no change to realm refs - assertThat(realms.getRealmRefs(), equalTo(realmRefs)); + assertThat(realms.getRealmRefsCount(), is(beforeRealmRefs.size())); + beforeRealmRefs.forEach((ri, rr) -> assertThat(realms.getRealmRef(ri), is(rr))); verify(licenseState).disableUsageTracking(Security.KERBEROS_REALM_FEATURE, "kerberos_realm"); verify(licenseState).disableUsageTracking(Security.CUSTOM_REALMS_FEATURE, "custom_realm_1"); @@ -1060,7 +1304,7 @@ public void testNativeRealmNotAddedWhenNativeUsersDisabled() throws Exception { ); // We still want a ref to the native realm so that transport actions can reference it (but the realm is disabled) - assertThat(realms.getRealmRefs(), hasKey(realmId)); + assertThat(realms.getRealmRef(realmId), notNullValue()); } public void testNativeRealmNotAvailableWhenNativeUsersDisabled() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java index 19202bb109215..1eb7f672bc99c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java @@ -27,12 +27,7 @@ public final class DummyUsernamePasswordRealm extends UsernamePasswordRealm { public DummyUsernamePasswordRealm(RealmConfig config) { super(config); - initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(config.type(), config.name()), - new Authentication.RealmRef(config.name(), config.type(), Node.NODE_NAME_SETTING.get(config.settings())) - ) - ); + setRealmRef(new Authentication.RealmRef(config.name(), config.type(), Node.NODE_NAME_SETTING.get(config.settings()))); this.users = new HashMap<>(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessagesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessagesTests.java index 73bd70705120c..f3b2d65ad1b0c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessagesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessagesTests.java @@ -29,16 +29,15 @@ public class AuthorizationDenialMessagesTests extends ESTestCase { + private final AuthorizationDenialMessages.Default denialMessages = new AuthorizationDenialMessages.Default(); + public void testNoRolesDescriptionIfSubjectIsNotAUser() { final Authentication authentication = randomFrom( AuthenticationTestHelper.builder().apiKey().build(), AuthenticationTestHelper.builder().serviceAccount().build() ); - assertThat( - AuthorizationDenialMessages.rolesDescription(authentication.getEffectiveSubject(), mock(AuthorizationInfo.class)), - equalTo("") - ); + assertThat(denialMessages.rolesDescription(authentication.getEffectiveSubject(), mock(AuthorizationInfo.class)), equalTo("")); } public void testRolesDescriptionWithNullAuthorizationInfo() { @@ -51,7 +50,7 @@ public void testRolesDescriptionWithNullAuthorizationInfo() { .user(new User(randomAlphaOfLengthBetween(3, 8), assignedRoleNames.toArray(String[]::new))) .build(false) .getEffectiveSubject(); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, null); + final String rolesDescription = denialMessages.rolesDescription(subject, null); assertThat( rolesDescription, @@ -71,7 +70,7 @@ public void testRolesDescriptionWithNullRolesField() { .getEffectiveSubject(); final AuthorizationInfo authorizationInfo = mock(AuthorizationInfo.class); when(authorizationInfo.asMap()).thenReturn(Map.of()); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, authorizationInfo); + final String rolesDescription = denialMessages.rolesDescription(subject, authorizationInfo); assertThat( rolesDescription, @@ -102,7 +101,7 @@ public void testRolesDescriptionWithIncompatibleRolesField() { ) ) ); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, authorizationInfo); + final String rolesDescription = denialMessages.rolesDescription(subject, authorizationInfo); assertThat( rolesDescription, @@ -123,7 +122,7 @@ public void testRoleDescriptionWithEmptyResolvedRole() { final AuthorizationInfo authorizationInfo = mock(AuthorizationInfo.class); when(authorizationInfo.asMap()).thenReturn(Map.of("user.roles", Strings.EMPTY_ARRAY)); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, authorizationInfo); + final String rolesDescription = denialMessages.rolesDescription(subject, authorizationInfo); if (assignedRoleNames.isEmpty()) { assertThat(rolesDescription, equalTo(" with effective roles []")); @@ -160,7 +159,7 @@ public void testRoleDescriptionAllResolvedAndMaybeWithAnonymousRoles() { final AuthorizationInfo authorizationInfo = mock(AuthorizationInfo.class); when(authorizationInfo.asMap()).thenReturn(Map.of("user.roles", effectiveRoleNames.toArray(String[]::new))); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, authorizationInfo); + final String rolesDescription = denialMessages.rolesDescription(subject, authorizationInfo); assertThat( rolesDescription, @@ -195,7 +194,7 @@ public void testRoleDescriptionWithUnresolvedRoles() { final AuthorizationInfo authorizationInfo = mock(AuthorizationInfo.class); when(authorizationInfo.asMap()).thenReturn(Map.of("user.roles", effectiveRoleNames.toArray(String[]::new))); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, authorizationInfo); + final String rolesDescription = denialMessages.rolesDescription(subject, authorizationInfo); assertThat( rolesDescription, @@ -220,13 +219,13 @@ public void testActionDeniedForCrossClusterAccessAuthentication() { .get(AuthenticationField.CROSS_CLUSTER_ACCESS_AUTHENTICATION_KEY); final String action = "indices:/some/action/" + randomAlphaOfLengthBetween(0, 8); assertThat( - AuthorizationDenialMessages.actionDenied(authentication, null, action, mock(), null), + denialMessages.actionDenied(authentication, null, action, mock(), null), equalTo( Strings.format( "action [%s] towards remote cluster is unauthorized for %s authenticated by API key id [%s] of user [%s], " + "this action is granted by the index privileges [all]", action, - AuthorizationDenialMessages.successfulAuthenticationDescription(innerAuthentication, null), + denialMessages.successfulAuthenticationDescription(innerAuthentication, null), authentication.getAuthenticatingSubject().getMetadata().get(AuthenticationField.API_KEY_ID_KEY), authentication.getEffectiveSubject().getUser().principal() ) @@ -237,7 +236,7 @@ public void testActionDeniedForCrossClusterAccessAuthentication() { public void testSuccessfulAuthenticationDescription() { final Authentication authentication1 = AuthenticationTestHelper.builder().realm().build(false); assertThat( - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication1, null), + denialMessages.successfulAuthenticationDescription(authentication1, null), equalTo( Strings.format( "user [%s] with assigned roles [%s]", @@ -249,7 +248,7 @@ public void testSuccessfulAuthenticationDescription() { final Authentication authentication2 = AuthenticationTestHelper.builder().realm().runAs().build(); assertThat( - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication2, null), + denialMessages.successfulAuthenticationDescription(authentication2, null), equalTo( Strings.format( "user [%s] run as [%s] with assigned roles [%s]", @@ -262,7 +261,7 @@ public void testSuccessfulAuthenticationDescription() { final Authentication authentication3 = AuthenticationTestHelper.builder().apiKey().build(); assertThat( - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication3, null), + denialMessages.successfulAuthenticationDescription(authentication3, null), equalTo( Strings.format( "API key id [%s] of user [%s]", @@ -274,7 +273,7 @@ public void testSuccessfulAuthenticationDescription() { final Authentication authentication4 = AuthenticationTestHelper.builder().serviceAccount().build(); assertThat( - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication4, null), + denialMessages.successfulAuthenticationDescription(authentication4, null), equalTo(Strings.format("service account [%s]", authentication4.getEffectiveSubject().getUser().principal())) ); @@ -286,11 +285,11 @@ public void testSuccessfulAuthenticationDescription() { .getMetadata() .get(AuthenticationField.CROSS_CLUSTER_ACCESS_AUTHENTICATION_KEY); assertThat( - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication5, null), + denialMessages.successfulAuthenticationDescription(authentication5, null), equalTo( Strings.format( "%s authenticated by API key id [%s] of user [%s]", - AuthorizationDenialMessages.successfulAuthenticationDescription(innerAuthentication, null), + denialMessages.successfulAuthenticationDescription(innerAuthentication, null), authentication5.getAuthenticatingSubject().getMetadata().get(AuthenticationField.API_KEY_ID_KEY), authentication5.getEffectiveSubject().getUser().principal() ) @@ -303,14 +302,14 @@ public void testRemoteActionDenied() { final String action = "indices:/some/action/" + randomAlphaOfLengthBetween(0, 8); final String clusterAlias = randomAlphaOfLengthBetween(5, 12); assertThat( - AuthorizationDenialMessages.remoteActionDenied(authentication, null, action, clusterAlias), + denialMessages.remoteActionDenied(authentication, null, action, clusterAlias), equalTo( Strings.format( "action [%s] towards remote cluster [%s] is unauthorized for %s" + " because no remote indices privileges apply for the target cluster", action, clusterAlias, - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication, null) + denialMessages.successfulAuthenticationDescription(authentication, null) ) ) ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 8a0041ef2bb76..2cc6c7d569f44 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -321,7 +321,8 @@ public void setup() { licenseState, TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, + new AuthorizationDenialMessages.Default() ); } @@ -1743,7 +1744,8 @@ public void testDenialForAnonymousUser() { new XPackLicenseState(() -> 0), TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, + new AuthorizationDenialMessages.Default() ); RoleDescriptor role = new RoleDescriptor( @@ -1791,7 +1793,8 @@ public void testDenialForAnonymousUserAuthorizationExceptionDisabled() { new XPackLicenseState(() -> 0), TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, + new AuthorizationDenialMessages.Default() ); RoleDescriptor role = new RoleDescriptor( @@ -3307,7 +3310,8 @@ public void testAuthorizationEngineSelectionForCheckPrivileges() throws Exceptio licenseState, TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, + new AuthorizationDenialMessages.Default() ); Subject subject = new Subject(new User("test", "a role"), mock(RealmRef.class)); @@ -3462,7 +3466,8 @@ public void getUserPrivileges(AuthorizationInfo authorizationInfo, ActionListene licenseState, TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, + new AuthorizationDenialMessages.Default() ); Authentication authentication; try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { @@ -3561,7 +3566,7 @@ public void testRemoteActionDenied() { + " because no remote indices privileges apply for the target cluster", action, clusterAlias, - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication, authorizationInfo) + new AuthorizationDenialMessages.Default().successfulAuthenticationDescription(authentication, authorizationInfo) ) ) ); @@ -3583,7 +3588,7 @@ public void testActionDeniedForCrossClusterAccessAuthentication() { Strings.format( "action [%s] towards remote cluster is unauthorized for %s", action, - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication, authorizationInfo) + new AuthorizationDenialMessages.Default().successfulAuthenticationDescription(authentication, authorizationInfo) ) ) ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/DefaultOperatorPrivilegesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/DefaultOperatorPrivilegesTests.java index 22c63a4e27378..8a7602627b714 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/DefaultOperatorPrivilegesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/DefaultOperatorPrivilegesTests.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.common.logging.Loggers; @@ -32,13 +33,16 @@ import org.junit.Before; import org.mockito.Mockito; +import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.elasticsearch.xpack.security.operator.OperatorPrivileges.NOOP_OPERATOR_PRIVILEGES_SERVICE; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; @@ -278,8 +282,13 @@ public void testCheckRest() { ThreadContext threadContext = new ThreadContext(settings); // not an operator - when(operatorOnlyRegistry.checkRest(restHandler, restRequest, restChannel)).thenReturn(() -> "violation!"); - assertFalse(operatorPrivilegesService.checkRest(restHandler, restRequest, restChannel, threadContext)); + doThrow(new ElasticsearchSecurityException("violation!")).when(operatorOnlyRegistry).checkRest(restHandler, restRequest); + final ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + () -> operatorPrivilegesService.checkRest(restHandler, restRequest, restChannel, threadContext) + ); + assertThat(ex, instanceOf(ElasticsearchSecurityException.class)); + assertThat(ex, throwableWithMessage("violation!")); Mockito.clearInvocations(operatorOnlyRegistry); // is an operator diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java index 76a01f100b8ad..16afd3716b113 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java @@ -44,7 +44,8 @@ import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomCrossClusterAccessRoleDescriptor; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; -import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -144,7 +145,7 @@ public void doE || getApiKeyRequest.getRealmName() != null && getApiKeyRequest.getRealmName().equals("realm-1") || getApiKeyRequest.getUserName() != null && getApiKeyRequest.getUserName().equals("user-x")) { if (replyEmptyResponse) { - listener.onResponse((Response) GetApiKeyResponse.emptyResponse()); + listener.onResponse((Response) GetApiKeyResponse.EMPTY); } else { listener.onResponse((Response) getApiKeyResponseExpected); } @@ -162,25 +163,27 @@ public void doE assertThat(restResponse.status(), (replyEmptyResponse && params.get("id") != null) ? is(RestStatus.NOT_FOUND) : is(RestStatus.OK)); final GetApiKeyResponse actual = GetApiKeyResponse.fromXContent(createParser(XContentType.JSON.xContent(), restResponse.content())); if (replyEmptyResponse) { - assertThat(actual.getApiKeyInfos().length, is(0)); + assertThat(actual.getApiKeyInfoList(), emptyIterable()); } else { assertThat( - actual.getApiKeyInfos(), - arrayContaining( - new ApiKey( - "api-key-name-1", - "api-key-id-1", - type, - creation, - expiration, - false, - null, - "user-x", - "realm-1", - "realm-type-1", - metadata, - roleDescriptors, - limitedByRoleDescriptors + actual.getApiKeyInfoList(), + contains( + new GetApiKeyResponse.Item( + new ApiKey( + "api-key-name-1", + "api-key-id-1", + type, + creation, + expiration, + false, + null, + "user-x", + "realm-1", + "realm-type-1", + metadata, + roleDescriptors, + limitedByRoleDescriptors + ) ) ) ); @@ -286,11 +289,9 @@ public void doE assertThat(restResponse.status(), is(RestStatus.OK)); final GetApiKeyResponse actual = GetApiKeyResponse.fromXContent(createParser(XContentType.JSON.xContent(), restResponse.content())); if (isGetRequestForOwnedKeysOnly) { - assertThat(actual.getApiKeyInfos().length, is(1)); - assertThat(actual.getApiKeyInfos(), arrayContaining(apiKey1)); + assertThat(actual.getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), contains(apiKey1)); } else { - assertThat(actual.getApiKeyInfos().length, is(2)); - assertThat(actual.getApiKeyInfos(), arrayContaining(apiKey1, apiKey2)); + assertThat(actual.getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), contains(apiKey1, apiKey2)); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java index 2bb5db03f210a..1373be0b372b6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java @@ -126,7 +126,7 @@ public void doE final QueryBuilder shouldQueryBuilder = boolQueryBuilder.should().get(0); assertThat(shouldQueryBuilder.getClass(), is(PrefixQueryBuilder.class)); assertThat(((PrefixQueryBuilder) shouldQueryBuilder).fieldName(), equalTo("metadata.environ")); - listener.onResponse((Response) QueryApiKeyResponse.emptyResponse()); + listener.onResponse((Response) QueryApiKeyResponse.EMPTY); } }; final RestQueryApiKeyAction restQueryApiKeyAction = new RestQueryApiKeyAction(Settings.EMPTY, mockLicenseState); @@ -190,7 +190,7 @@ public void doE equalTo(new SearchAfterBuilder().setSortValues(new String[] { "key-2048", "2021-07-01T00:00:59.000Z" })) ); - listener.onResponse((Response) QueryApiKeyResponse.emptyResponse()); + listener.onResponse((Response) QueryApiKeyResponse.EMPTY); } }; diff --git a/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java b/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java index c004eaf58939b..b249e12946159 100644 --- a/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java +++ b/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java @@ -24,7 +24,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -249,7 +248,6 @@ public void testShardsMoveOffRemovingNode() throws Exception { putNodeShutdown(nodeIdToShutdown, "REMOVE"); // assertBusy waiting for the shard to no longer be on that node - AtomicReference> debug = new AtomicReference<>(); assertBusy(() -> { List shardsResponse = entityAsList(client().performRequest(checkShardsRequest)); final long shardsOnNodeToShutDown = shardsResponse.stream() @@ -258,16 +256,17 @@ public void testShardsMoveOffRemovingNode() throws Exception { .filter(shard -> "STARTED".equals(shard.get("state")) || "RELOCATING".equals(shard.get("state"))) .count(); assertThat(shardsOnNodeToShutDown, is(0L)); - debug.set(shardsResponse); }); - // Now check the shard migration status - Request getStatusRequest = new Request("GET", "_nodes/" + nodeIdToShutdown + "/shutdown"); - Response statusResponse = client().performRequest(getStatusRequest); - Map status = entityAsMap(statusResponse); - assertThat(ObjectPath.eval("nodes.0.shard_migration.status", status), equalTo("COMPLETE")); - assertThat(ObjectPath.eval("nodes.0.shard_migration.shard_migrations_remaining", status), equalTo(0)); - assertThat(ObjectPath.eval("nodes.0.shard_migration.explanation", status), nullValue()); + assertBusy(() -> { + // Now check the shard migration status + Request getStatusRequest = new Request("GET", "_nodes/" + nodeIdToShutdown + "/shutdown"); + Response statusResponse = client().performRequest(getStatusRequest); + Map status = entityAsMap(statusResponse); + assertThat(ObjectPath.eval("nodes.0.shard_migration.status", status), equalTo("COMPLETE")); + assertThat(ObjectPath.eval("nodes.0.shard_migration.shard_migrations_remaining", status), equalTo(0)); + assertThat(ObjectPath.eval("nodes.0.shard_migration.explanation", status), nullValue()); + }); } public void testShardsCanBeAllocatedAfterShutdownDeleted() throws Exception { @@ -292,7 +291,6 @@ public void testShardsCanBeAllocatedAfterShutdownDeleted() throws Exception { ensureGreen(indexName); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/77456") public void testStalledShardMigrationProperlyDetected() throws Exception { String nodeIdToShutdown = getRandomNodeId(); int numberOfShards = randomIntBetween(1, 5); diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java index e8d06e6f8cbe2..fad05e6f213d5 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.shutdown; -import org.elasticsearch.Build; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -51,7 +50,6 @@ protected Collection> nodePlugins() { * reverting to `NOT_STARTED` (this was a bug in the initial implementation). */ public void testShardStatusStaysCompleteAfterNodeLeaves() throws Exception { - assumeTrue("must be on a snapshot build of ES to run in order for the feature flag to be set", Build.current().isSnapshot()); final String nodeToRestartName = internalCluster().startNode(); final String nodeToRestartId = getNodeId(nodeToRestartName); internalCluster().startNode(); @@ -70,9 +68,7 @@ public void testShardStatusStaysCompleteAfterNodeLeaves() throws Exception { * Similar to the previous test, but ensures that the status stays at `COMPLETE` when the node is offline when the shutdown is * registered. This may happen if {@link NodeSeenService} isn't working as expected. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/76689") public void testShardStatusStaysCompleteAfterNodeLeavesIfRegisteredWhileNodeOffline() throws Exception { - assumeTrue("must be on a snapshot build of ES to run in order for the feature flag to be set", Build.current().isSnapshot()); final String nodeToRestartName = internalCluster().startNode(); final String nodeToRestartId = getNodeId(nodeToRestartName); internalCluster().startNode(); @@ -92,7 +88,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { NodesInfoResponse nodes = clusterAdmin().prepareNodesInfo().clear().get(); assertThat(nodes.getNodes().size(), equalTo(1)); - assertNodeShutdownStatus(nodeToRestartId, COMPLETE); + assertBusy(() -> { assertNodeShutdownStatus(nodeToRestartId, COMPLETE); }); } /** @@ -100,7 +96,6 @@ public Settings onNodeStopped(String nodeName) throws Exception { * (this was a bug in the initial implementation). */ public void testShardStatusIsCompleteOnNonDataNodes() throws Exception { - assumeTrue("must be on a snapshot build of ES to run in order for the feature flag to be set", Build.current().isSnapshot()); final String nodeToShutDownName = internalCluster().startMasterOnlyNode(); internalCluster().startMasterOnlyNode(); // Just to have at least one other node final String nodeToRestartId = getNodeId(nodeToShutDownName); diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java index 554b617774db3..83e72d4146640 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java @@ -101,6 +101,7 @@ public ClusterState execute(BatchExecutionContext batc var nodesNotPreviouslySeen = new HashSet<>(); for (final var taskContext : batchExecutionContext.taskContexts()) { nodesNotPreviouslySeen.addAll(taskContext.getTask().nodesNotPreviouslySeen()); + taskContext.success(() -> {}); } var nodes = initialState.nodes(); diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java index 729cb8ef47292..f54cd4d4977d7 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotInfoUtils; import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; @@ -194,7 +195,9 @@ public void testCreateSnapshotOnTrigger() { assertThat(req.includeGlobalState(), equalTo(globalState)); try { - return CreateSnapshotResponse.fromXContent(createParser(JsonXContent.jsonXContent, createSnapResponse)); + return SnapshotInfoUtils.createSnapshotResponseFromXContent( + createParser(JsonXContent.jsonXContent, createSnapResponse) + ); } catch (IOException e) { fail("failed to parse snapshot response"); return null; diff --git a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java index 1465911490f61..8951b91cb76a3 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java @@ -986,7 +986,11 @@ public void testRecoveryConcurrentlyWithIndexing() throws Exception { if (waitForSnapshotDownloadToStart) { // must complete using snapshots alone. RecoveryState recoveryState = getLatestPeerRecoveryStateForShard(indexName, 0); - assertThat(recoveryState.getIndex().recoveredFromSnapshotBytes(), equalTo(snapshotSizeForIndex)); + assertThat( + "Index " + recoveryState.getIndex() + " should be completely recovered from the snapshot", + recoveryState.getIndex().recoveredFromSnapshotBytes(), + equalTo(snapshotSizeForIndex) + ); } assertDocumentsAreEqual(indexName, numDocs.get()); diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle index 90a6f4ada32e0..3fbb55ca4eb3a 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle @@ -5,29 +5,19 @@ * 2.0. */ - -import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.util.ports.ReservedPortRange - -import java.nio.file.Files -import java.nio.file.Paths - -import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -apply plugin: 'elasticsearch.test.fixtures' -apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' -apply plugin: 'elasticsearch.internal-available-ports' - -final Project hdfsFixtureProject = project(':test:fixtures:hdfs2-fixture') -final Project krbFixtureProject = project(':test:fixtures:krb5kdc-fixture') -final Project hdfsRepoPluginProject = project(':plugins:repository-hdfs') dependencies { javaRestTestImplementation testArtifact(project(xpackModule('snapshot-repo-test-kit'))) - javaRestTestImplementation project(':plugins:repository-hdfs') + javaRestTestImplementation project(path: ':test:fixtures:hdfs-fixture', configuration:"shadow") + javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') + javaRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" + javaRestTestImplementation "org.slf4j:slf4j-simple:${versions.slf4j}" + javaRestTestRuntimeOnly "com.google.guava:guava:16.0.1" + javaRestTestRuntimeOnly "commons-cli:commons-cli:1.2" } restResources { @@ -36,151 +26,15 @@ restResources { } } -testFixtures.useFixture(krbFixtureProject.path, 'hdfs-snapshot-repo-tests') - -configurations { - hdfsFixture -} - dependencies { - hdfsFixture hdfsFixtureProject - // Set the keytab files in the classpath so that we can access them from test code without the security manager freaking out. - if (isEclipse == false) { - testRuntimeOnly files(krbFixtureProject.ext.krb5Keytabs("hdfs-snapshot-repo-tests", "hdfs_hdfs.build.elastic.co.keytab").parent){ - builtBy ":test:fixtures:krb5kdc-fixture:preProcessFixture" - } - } -} - -normalization { - runtimeClasspath { - // ignore generated keytab files for the purposes of build avoidance - ignore '*.keytab' - // ignore fixture ports file which is on the classpath primarily to pacify the security manager - ignore 'ports' - } -} - -String realm = "BUILD.ELASTIC.CO" -String krb5conf = krbFixtureProject.ext.krb5Conf("hdfs") - -// Create HDFS File System Testing Fixtures -for (String fixtureName : ['hdfsFixture', 'secureHdfsFixture']) { - project.tasks.register(fixtureName, org.elasticsearch.gradle.internal.test.AntFixture) { - dependsOn project.configurations.hdfsFixture, krbFixtureProject.tasks.postProcessFixture - executable = "${BuildParams.runtimeJavaHome}/bin/java" - env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}" - maxWaitInSeconds 60 - BuildParams.withFipsEnabledOnly(it) - waitCondition = { fixture, ant -> - // the hdfs.MiniHDFS fixture writes the ports file when - // it's ready, so we can just wait for the file to exist - return fixture.portsFile.exists() - } - final List miniHDFSArgs = [] - - // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options - if (name.equals('secureHdfsFixture')) { - onlyIf("Only runtime java version < 16") { BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16 } - miniHDFSArgs.addAll(["--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"]) - miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}") - } - // configure port dynamically - def portRange = project.getExtensions().getByType(ReservedPortRange) - miniHDFSArgs.add("-Dhdfs.config.port=${portRange.getOrAllocate(name)}") - - // Common options - miniHDFSArgs.add('hdfs.MiniHDFS') - miniHDFSArgs.add(baseDir) - - // If it's a secure fixture, then set the principal name and keytab locations to use for auth. - if (name.equals('secureHdfsFixture')) { - miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") - miniHDFSArgs.add(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")) - } - - args miniHDFSArgs.toArray() - } + clusterPlugins project(':plugins:repository-hdfs') } // Disable integration test if Fips mode tasks.named("javaRestTest").configure { + usesDefaultDistribution() description = "Runs rest tests against an elasticsearch cluster with HDFS." - def hdfsPort = project.getExtensions().getByType(ReservedPortRange).getOrAllocate("hdfsFixture") - systemProperty 'test.hdfs.uri', "hdfs://localhost:$hdfsPort" - nonInputProperties.systemProperty 'test.hdfs.path', '/user/elasticsearch/test/repository_test_kit/simple' BuildParams.withFipsEnabledOnly(it) -} - -tasks.register("javaRestTestSecure", RestIntegTestTask) { - description = "Runs rest tests against an elasticsearch cluster with Secured HDFS." - def hdfsPort = project.getExtensions().getByType(ReservedPortRange).getOrAllocate("secureHdfsFixture") - nonInputProperties.systemProperty 'test.hdfs.uri', "hdfs://localhost:$hdfsPort" - nonInputProperties.systemProperty 'test.hdfs.path', '/user/elasticsearch/test/repository_test_kit/secure' - nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}" - nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}" - nonInputProperties.systemProperty( - "test.krb5.keytab.hdfs", - project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab") - ) - onlyIf("FIPS mode disabled and runtime java < 16") { - BuildParams.inFipsJvm == false && BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16 - } - testClassesDirs = sourceSets.javaRestTest.output.classesDirs - classpath = sourceSets.javaRestTest.runtimeClasspath -} - -tasks.named("check").configure { dependsOn("javaRestTestSecure") } - -testClusters.configureEach { - testDistribution = 'DEFAULT' - plugin(hdfsRepoPluginProject.path) - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'false' -} - -testClusters.matching { it.name == "javaRestTestSecure" }.configureEach { - systemProperty "java.security.krb5.conf", krb5conf - extraConfigFile( - "repository-hdfs/krb5.keytab", - file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE - ) -} - -// Determine HDFS Fixture compatibility for the current build environment. -boolean fixtureSupported = false -if (OS.current() == OS.WINDOWS) { - // hdfs fixture will not start without hadoop native libraries on windows - String nativePath = System.getenv("HADOOP_HOME") - if (nativePath != null) { - java.nio.file.Path path = Paths.get(nativePath) - if (Files.isDirectory(path) && - Files.exists(path.resolve("bin").resolve("winutils.exe")) && - Files.exists(path.resolve("bin").resolve("hadoop.dll")) && - Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { - fixtureSupported = true - } else { - throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin") - } - } -} else { - fixtureSupported = true -} - -boolean legalPath = rootProject.rootDir.toString().contains(" ") == false -if (legalPath == false) { - fixtureSupported = false -} - -if (fixtureSupported) { - tasks.named("javaRestTest").configure {dependsOn "hdfsFixture" } - tasks.named("javaRestTestSecure").configure {dependsOn "secureHdfsFixture" } -} else { - tasks.named("javaRestTest").configure {enabled = false } - tasks.named("javaRestTestSecure").configure { enabled = false } - if (legalPath) { - logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") - } else { - logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'") - } + // required for krb5kdc-fixture to work + jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AbstractHdfsSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AbstractHdfsSnapshotRepoTestKitIT.java new file mode 100644 index 0000000000000..2810c4801e8dd --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AbstractHdfsSnapshotRepoTestKitIT.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit; + +import org.elasticsearch.common.settings.Settings; + +import static org.hamcrest.Matchers.blankOrNullString; +import static org.hamcrest.Matchers.not; + +public abstract class AbstractHdfsSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestTestCase { + + @Override + protected String repositoryType() { + return "hdfs"; + } + + @Override + protected Settings repositorySettings() { + final String uri = "hdfs://localhost:" + getHdfsPort(); + // final String uri = System.getProperty("test.hdfs.uri"); + assertThat(uri, not(blankOrNullString())); + + final String path = getRepositoryPath(); + assertThat(path, not(blankOrNullString())); + Settings.Builder repositorySettings = Settings.builder().put("client", "repository_test_kit").put("uri", uri).put("path", path); + return repositorySettings.build(); + } + + protected abstract String getRepositoryPath(); + + protected abstract int getHdfsPort(); + +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/HdfsSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/HdfsSnapshotRepoTestKitIT.java index 2cc81567e94bf..e9787ecdce854 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/HdfsSnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/HdfsSnapshotRepoTestKitIT.java @@ -6,33 +6,43 @@ */ package org.elasticsearch.repositories.blobstore.testkit; -import org.elasticsearch.common.settings.Settings; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; -import static org.hamcrest.Matchers.blankOrNullString; -import static org.hamcrest.Matchers.not; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; -public class HdfsSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestTestCase { +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class }) +public class HdfsSnapshotRepoTestKitIT extends AbstractHdfsSnapshotRepoTestKitIT { + + public static HdfsFixture hdfsFixture = new HdfsFixture(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(hdfsFixture).around(cluster); @Override - protected String repositoryType() { - return "hdfs"; + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); } @Override - protected Settings repositorySettings() { - final String uri = System.getProperty("test.hdfs.uri"); - assertThat(uri, not(blankOrNullString())); - - final String path = System.getProperty("test.hdfs.path"); - assertThat(path, not(blankOrNullString())); - - // Optional based on type of test - final String principal = System.getProperty("test.krb5.principal.es"); + protected String getRepositoryPath() { + return "/user/elasticsearch/test/repository_test_kit/simple"; + } - Settings.Builder repositorySettings = Settings.builder().put("client", "repository_test_kit").put("uri", uri).put("path", path); - if (principal != null) { - repositorySettings.put("security.principal", principal); - } - return repositorySettings.build(); + @Override + protected int getHdfsPort() { + return hdfsFixture.getPort(); } } diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/SecureHdfsSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/SecureHdfsSnapshotRepoTestKitIT.java new file mode 100644 index 0000000000000..6d599e41e3b9f --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/SecureHdfsSnapshotRepoTestKitIT.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.krb5kdc.Krb5kDcContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class, TestContainersThreadFilter.class }) +public class SecureHdfsSnapshotRepoTestKitIT extends AbstractHdfsSnapshotRepoTestKitIT { + + public static Krb5kDcContainer krb5Fixture = new Krb5kDcContainer(); + + public static HdfsFixture hdfsFixture = new HdfsFixture().withKerberos(() -> krb5Fixture.getPrincipal(), () -> krb5Fixture.getKeytab()); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .systemProperty("java.security.krb5.conf", () -> krb5Fixture.getConfPath().toString()) + .configFile("repository-hdfs/krb5.conf", Resource.fromString(() -> krb5Fixture.getConf())) + .configFile("repository-hdfs/krb5.keytab", Resource.fromFile(() -> krb5Fixture.getEsKeytab())) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(krb5Fixture).around(hdfsFixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected int getHdfsPort() { + return hdfsFixture.getPort(); + } + + @Override + protected String getRepositoryPath() { + return "/user/elasticsearch/test/repository_test_kit/secure"; + } + + @Override + protected Settings repositorySettings() { + return Settings.builder().put(super.repositorySettings()).put("security.principal", "elasticsearch@BUILD.ELASTIC.CO").build(); + } +} diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/AbstractGeoShapeScriptFieldQuery.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/AbstractGeoShapeScriptFieldQuery.java index 18020bd44ca6e..c178b20530f0c 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/AbstractGeoShapeScriptFieldQuery.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/AbstractGeoShapeScriptFieldQuery.java @@ -22,7 +22,7 @@ abstract class AbstractGeoShapeScriptFieldQuery extends AbstractScriptFieldQuery } @Override - protected boolean matches(GeometryFieldScript scriptContext, int docId) { + protected final boolean matches(GeometryFieldScript scriptContext, int docId) { scriptContext.runForDoc(docId); return matches(scriptContext.geometry()); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptFieldTypeTests.java index 331bfbf8cd305..592cb65800b71 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptFieldTypeTests.java @@ -99,8 +99,8 @@ protected ScriptFactory dummyScript() { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -128,6 +128,7 @@ public void collect(int doc) throws IOException { }; } }); + assertEquals(2, results.size()); } } } @@ -141,7 +142,7 @@ public void testSort() throws IOException { public void testFetch() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef(""" + addDocument(iw, List.of(new StoredField("_source", new BytesRef(""" {"foo": {"coordinates": [[45.0, 45.0], [0.0, 0.0]], "type" : "LineString"}}""")))); try (DirectoryReader reader = iw.getReader()) { SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -162,8 +163,8 @@ public void testFetch() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -196,8 +197,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(2)); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml index 09f08d59049ec..095f5c043457e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml @@ -21,7 +21,7 @@ setup: ], "applications": [ { - "application": "myapp", + "application": "kibana-.kibana", "privileges": ["*"], "resources": ["*"] } @@ -43,14 +43,14 @@ setup: security.put_privileges: body: > { - "myapp": { + "kibana-.kibana": { "read": { - "application": "myapp", + "application": "kibana-.kibana", "name": "read", "actions": [ "data:read/*" ] }, "write": { - "application": "myapp", + "application": "kibana-.kibana", "name": "write", "actions": [ "data:write/*" ] } @@ -71,7 +71,7 @@ teardown: - do: security.delete_privileges: - application: myapp + application: "kibana-.kibana" name: "read,write" ignore: 404 @@ -301,7 +301,7 @@ teardown: ], "applications": [ { - "application": "myapp", + "application": "kibana-.kibana", "privileges": ["read"], "resources": ["*"] } @@ -344,7 +344,7 @@ teardown: ], "application": [ { - "application" : "myapp", + "application" : "kibana-.kibana", "resources" : [ "*", "some-other-res" ], "privileges" : [ "data:read/me", "data:write/me" ] } @@ -369,7 +369,7 @@ teardown: } } } - match: { "application" : { - "myapp" : { + "kibana-.kibana" : { "*" : { "data:read/me" : true, "data:write/me" : false diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml index 44d7290cbc002..c72315312afce 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml @@ -193,3 +193,64 @@ - match: { columns.0.type: double } - length: { values: 1 } - match: { values.0.0: 30.0 } + + + +--- +"text in functions #105379": + - skip: + version: " - 8.13.99" + reason: "fixes in 8.13 or later" + - do: + indices.create: + index: idx_with_date_ip_txt + body: + mappings: + properties: + id: + type: long + date: + type: date + ip: + type: ip + text: + type: text + text2: + type: text + + - do: + bulk: + refresh: true + body: + - { "index": { "_index": "idx_with_date_ip_txt" } } + - { "id": 1, "date": "2024-03-22T14:50:00.000Z", "ip": "192.168.0.10", "text":"yyyy-MM-dd", "text2":"year" } + - { "index": { "_index": "idx_with_date_ip_txt" } } + - { "id": 2, "date": "2024-03-22T14:50:00.000Z", "ip": "192.168.0.10", "text": "192.168.0.0/16" } + - { "index": { "_index": "idx_with_date_ip_txt" } } + - { "id": 3, "date": "2024-03-22T14:50:00.000Z", "ip": "10.0.0.10", "text": "192.168.0.0/16" } + - do: + esql.query: + body: + query: 'from idx_with_date_ip_txt | where id == 1 | eval x = date_format(text, date), y = date_extract(text2, date), p = date_parse(text, "2024-03-14") | keep x, y, p | limit 1' + - match: { columns.0.name: x } + - match: { columns.0.type: keyword } + - match: { columns.1.name: y } + - match: { columns.1.type: long } + - length: { values: 1 } + - match: { values.0.0: "2024-03-22" } + - match: { values.0.1: 2024 } + - match: { values.0.2: "2024-03-14T00:00:00.000Z" } + + - do: + esql.query: + body: + query: 'from idx_with_date_ip_txt | where id > 1 | eval x = cidr_match(ip, text) | sort id | keep id, x | limit 2' + - match: { columns.0.name: id } + - match: { columns.0.type: long } + - match: { columns.1.name: x } + - match: { columns.1.type: boolean } + - length: { values: 2 } + - match: { values.0.0: 2 } + - match: { values.0.1: true } + - match: { values.1.0: 3 } + - match: { values.1.1: false } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml index 4727a5394cf3d..8a9004d372d17 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml @@ -131,6 +131,14 @@ geo_point unsortable with limit from row: body: query: 'ROW wkt = ["POINT(42.9711 -14.7553)", "POINT(75.8093 22.7277)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) | limit 5 | sort pt' +--- +values unsupported for geo_point: + - do: + catch: '/.+argument of \[VALUES\(location\)\] must be \[boolean, date, double, integer, ip, keyword, long, null, text, unsigned_long or version\].+/' + esql.query: + body: + query: 'FROM geo_points | STATS VALUES(location)' + --- cartesian_point: - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml index 2c3c919f2b37d..b294629dda073 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml @@ -6,7 +6,7 @@ setup: features: allowed_warnings_regex - do: indices.create: - index: cities + index: cities body: settings: number_of_shards: 5 @@ -15,13 +15,13 @@ setup: city_code: type: keyword city: - type: keyword + type: text country: type: keyword - do: bulk: - index: "cities" + index: cities refresh: true body: - { "index": { } } @@ -31,16 +31,29 @@ setup: - do: enrich.put_policy: - name: cities_policy + name: city_codes_policy body: match: - indices: ["cities"] + indices: [ "cities" ] match_field: "city_code" - enrich_fields: ["city", "country"] + enrich_fields: [ "city", "country" ] + + - do: + enrich.put_policy: + name: city_names_policy + body: + match: + indices: [ "cities" ] + match_field: "city" + enrich_fields: [ "city_code", "country" ] + + - do: + enrich.execute_policy: + name: city_codes_policy - do: enrich.execute_policy: - name: cities_policy + name: city_names_policy - do: indices.create: @@ -52,39 +65,49 @@ setup: type: keyword city_id: type: keyword + city_name: + type: text - do: bulk: - index: "test" + index: test refresh: true body: - { "index": { } } - - { "name": "Alice", "city_id": "nyc" } + - { "name": "Alice", "city_id": "nyc", "city_name": "New York" } - { "index": { } } - - { "name": "Bob", "city_id": "nyc" } + - { "name": "Bob", "city_id": "nyc", "city_name": "New York" } - { "index": { } } - - { "name": "Mario", "city_id": "rom" } + - { "name": "Mario", "city_id": "rom", "city_name": "Rome" } - { "index": { } } - - { "name": "Denise", "city_id": "sgn" } + - { "name": "Denise", "city_id": "sgn", "city_name": "Tan Son Nhat" } --- teardown: - do: enrich.delete_policy: - name: cities_policy + name: city_codes_policy + - do: + enrich.delete_policy: + name: city_names_policy --- -"Basic": +"Enrich on keyword": + - skip: + version: " - 8.12.99" + reason: "Enriching with text field in the enrich fields list involves reading text from source, not supported before 8.13.0" + features: allowed_warnings_regex + - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'from test | enrich cities_policy on city_id | keep name, city, country | sort name' + query: 'from test | enrich city_codes_policy on city_id | keep name, city, country | sort name' - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } - match: { columns.1.name: "city" } - - match: { columns.1.type: "keyword" } + - match: { columns.1.type: "text" } - match: { columns.2.name: "country" } - match: { columns.2.type: "keyword" } @@ -95,12 +118,14 @@ teardown: - match: { values.3: [ "Mario", "Rome", "Italy" ] } +--- +"Enrich on keyword with fields": - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'from test | keep name, city_id | enrich cities_policy on city_id with country | sort name' + query: 'from test | keep name, city_id | enrich city_codes_policy on city_id with country | sort name' - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -116,12 +141,14 @@ teardown: - match: { values.3: [ "Mario", "rom", "Italy" ] } +--- +"Enrich on keyword with fields alias": - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'from test | keep name, city_id | enrich cities_policy on city_id with country_name = country | sort name' + query: 'from test | keep name, city_id | enrich city_codes_policy on city_id with country_name = country | sort name' - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -135,3 +162,32 @@ teardown: - match: { values.1: [ "Bob", "nyc", "USA" ] } - match: { values.2: [ "Denise", "sgn", null ] } - match: { values.3: [ "Mario", "rom", "Italy" ] } + + +--- +"Enrich on text": + - skip: + version: " - 8.13.99" + reason: "TEXT field ENRICH support was added in 8.14.0" + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | keep name, city_name | enrich city_names_policy on city_name | sort name' + + - match: { columns.0.name: "name" } + - match: { columns.0.type: "keyword" } + - match: { columns.1.name: "city_name" } + - match: { columns.1.type: "text" } + - match: { columns.2.name: "city_code" } + - match: { columns.2.type: "keyword" } + - match: { columns.3.name: "country" } + - match: { columns.3.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ "Alice", "New York", "nyc", "USA" ] } + - match: { values.1: [ "Bob", "New York", "nyc", "USA" ] } + - match: { values.2: [ "Denise", "Tan Son Nhat", null, null ] } + - match: { values.3: [ "Mario", "Rome", "rom", "Italy" ] } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 0e2838c976799..67dc4bbf2a06a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -1,8 +1,8 @@ --- setup: - skip: - version: " - 8.11.99" - reason: "Latest, more complete, telemetry available in 8.12+" + version: " - 8.13.99" + reason: "Introduction of META tracking in 8.14+" - do: indices.create: @@ -23,7 +23,7 @@ setup: - do: {xpack.usage: {}} - match: { esql.available: true } - match: { esql.enabled: true } - - length: { esql.features: 15 } + - length: { esql.features: 16 } - set: {esql.features.dissect: dissect_counter} - set: {esql.features.drop: drop_counter} - set: {esql.features.eval: eval_counter} @@ -32,6 +32,7 @@ setup: - set: {esql.features.grok: grok_counter} - set: {esql.features.keep: keep_counter} - set: {esql.features.limit: limit_counter} + - set: {esql.features.meta: meta_counter} - set: {esql.features.mv_expand: mv_expand_counter} - set: {esql.features.rename: rename_counter} - set: {esql.features.row: row_counter} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml index 89e3c31bd475a..77a6811b899f6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml @@ -74,6 +74,33 @@ setup: - { "index": { } } - { "@timestamp": "2023-06-24", "ip": "13.101.0.114", "message": "authentication failed" } + - do: + indices.create: + index: events_text + body: + mappings: + properties: + "@timestamp": + type: date + ip_text: + type: text + message: + type: keyword + + - do: + bulk: + index: events_text + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2023-06-20", "ip_text": "10.100.0.21", "message": "network connected" } + - { "index": { } } + - { "@timestamp": "2023-06-21", "ip_text": [ "10.100.0.21", "10.101.0.107" ], "message": "sending messages" } + - { "index": { } } + - { "@timestamp": "2023-06-22", "ip_text": "10.101.0.107", "message": "network disconnected" } + - { "index": { } } + - { "@timestamp": "2023-06-24", "ip_text": "13.101.0.114", "message": "authentication failed" } + --- teardown: - do: @@ -104,8 +131,40 @@ teardown: - match: { values.2: [ "10.101.0.107" , "QA", "Engineering", "network disconnected" ] } - match: { values.3: [ "13.101.0.114" , null, null, "authentication failed" ] } +--- +"IP text fields": + - skip: + version: " - 8.13.99" + reason: "ENRICH support for TEXT fields was added in 8.14.0" + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM events_text | ENRICH networks-policy ON ip_text | sort @timestamp | KEEP ip_text, name, department, message' + + - match: { columns.0.name: "ip_text" } + - match: { columns.0.type: "text" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "department" } + - match: { columns.2.type: "keyword" } + - match: { columns.3.name: "message" } + - match: { columns.3.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ "10.100.0.21", "Production", "OPS", "network connected" ] } + - match: { values.1: [ [ "10.100.0.21", "10.101.0.107" ], [ "Production", "QA" ], [ "OPS","Engineering" ], "sending messages" ] } + - match: { values.2: [ "10.101.0.107" , "QA", "Engineering", "network disconnected" ] } + - match: { values.3: [ "13.101.0.114" , null, null, "authentication failed" ] } + --- "Invalid IP strings": + - skip: + version: " - 8.13.99" + reason: "IP range ENRICH support was added in 8.14.0" + - do: catch: /'invalid_[\d\.]+' is not an IP string literal/ esql.query: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index cef7f88506de8..fba68760a162f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -4,6 +4,7 @@ setup: version: " - 8.10.99" reason: "ESQL is available in 8.11+" features: allowed_warnings_regex + - do: indices.create: index: test @@ -120,6 +121,82 @@ setup: - length: { values: 1 } - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } +--- +"IN on text": + - skip: + version: " - 8.13.99" + reason: "IN on text fixed in v 8.14" + features: allowed_warnings_regex + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | where tag IN ("abc", "baz") | keep emp_no, name, job, tag' + + - match: { columns.0.name: "emp_no" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "job" } + - match: { columns.2.type: "text" } + - match: { columns.3.name: "tag" } + - match: { columns.3.type: "text" } + + - length: { values: 1 } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + +--- +"IN on text and itself": + - skip: + version: " - 8.13.99" + reason: "IN on text fixed in v 8.14" + features: allowed_warnings_regex + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | where tag IN ("abc", tag) | keep emp_no, name, job, tag | sort emp_no' + + - match: { columns.0.name: "emp_no" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "job" } + - match: { columns.2.type: "text" } + - match: { columns.3.name: "tag" } + - match: { columns.3.type: "text" } + + - length: { values: 2 } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } + - match: { values.1: [ 20, "John", "Payroll Specialist", "baz"] } + +--- +"NOT IN on text": + - skip: + version: " - 8.13.99" + reason: "IN on text fixed in v 8.14" + features: allowed_warnings_regex + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | where tag NOT IN ("abc", "baz") | keep emp_no, name, job, tag' + + - match: { columns.0.name: "emp_no" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "job" } + - match: { columns.2.type: "text" } + - match: { columns.3.name: "tag" } + - match: { columns.3.type: "text" } + + - length: { values: 1 } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } + --- "eval and filter text": - do: @@ -289,6 +366,26 @@ setup: - match: { values.0: [ "Jenny - IT Director"] } - match: { values.1: [ "John - Payroll Specialist"] } +--- +"split text": + - skip: + version: " - 8.13.99" + reason: "functions fixed for text in v 8.14" + features: allowed_warnings_regex + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | sort emp_no | eval split = split(tag, " ") | keep split' + + - match: { columns.0.name: "split" } + - match: { columns.0.type: "keyword" } + + - length: { values: 2 } + - match: { values.0: [ ["foo", "bar"] ] } + - match: { values.1: [ "baz"] } + --- "stats text with raw": @@ -441,3 +538,38 @@ setup: - length: { values: 2 } - match: { values.0.0: "IT Director" } - match: { values.1.0: "Payroll Specialist" } + +--- +values: + - requires: + cluster_features: esql.value_agg + reason: "values is available in 8.14+" + + - do: + esql.query: + body: + query: 'FROM test | STATS job = VALUES(job) | LIMIT 1' + - match: { columns.0.name: "job" } + - match: { columns.0.type: "text" } + - length: { values: 1 } + - match: { values.0: [ [ "IT Director", "Payroll Specialist" ] ] } + + - do: + bulk: + index: test + refresh: true + body: + - { "index": { } } + - { "emp_no": 30, "name": "Stuff", "job": "Other", "tag": "baz" } + + - do: + esql.query: + body: + query: 'FROM test | STATS job = VALUES(job) BY tag | SORT tag | LIMIT 10' + - match: { columns.0.name: "job" } + - match: { columns.0.type: "text" } + - match: { columns.1.name: "tag" } + - match: { columns.1.type: "text" } + - length: { values: 2 } + - match: { values.0: [ [ "Payroll Specialist", "Other" ], "baz" ] } + - match: { values.1: [ "IT Director", "foo bar" ] } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_crud.yml index 3c4439444d1a1..24e869781f677 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_crud.yml @@ -1130,7 +1130,7 @@ "Test cannot create job with model snapshot id set": - do: - catch: /illegal_argument_exception/ + catch: /x_content_parse_exception/ ml.put_job: job_id: has-model-snapshot-id body: > diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml index 97e5146e9af86..869bba90345c4 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml @@ -75,17 +75,17 @@ setup: index: index-with-embedded-text refresh: true body: | - {"index": {}} + {"index": {"_id": "0"}} {"source_text": "the octopus comforter smells", "embedding":[0.3925197124481201, 0.9145996570587158, 0.01372915506362915, 0.9987854957580566, 0.3240084648132324, 0.6188188195228577, 0.926924467086792, 0.12143599987030029, 0.175662100315094, 0.16076070070266724, 0.7671306133270264, 0.9518267512321472, 0.4557478427886963, 0.5410670638084412, 0.7594802975654602, 0.5035953521728516, 0.4115469455718994, 0.038427770137786865, 0.5419668555259705, 0.6362232565879822, 0.17554593086242676, 0.01821446418762207, 0.2931918501853943, 0.294437050819397, 0.6901726722717285, 0.1679999828338623, 0.7995195984840393, 0.8781598210334778, 0.18507736921310425, 0.8614458441734314, 0.690071702003479, 0.7859554886817932, 0.803643524646759, 0.0048452019691467285, 0.19700628519058228, 0.22210919857025146, 0.7043975591659546, 0.6320799589157104, 0.542057991027832, 0.8704766035079956, 0.32195907831192017, 0.7272325158119202, 0.4066658020019531, 0.89588862657547, 0.7947880029678345, 0.06543421745300293, 0.2873639464378357, 0.8773637413978577, 0.36480581760406494, 0.692948043346405, 0.19171112775802612, 0.14275449514389038, 0.17054951190948486, 0.8969640135765076, 0.39838290214538574, 0.26756417751312256, 0.5369327664375305, 0.4736328721046448, 0.21181154251098633, 0.2695402503013611, 0.8651300072669983, 0.8051849603652954, 0.7073134779930115, 0.5963589549064636, 0.09601861238479614, 0.5362404584884644, 0.23020631074905396, 0.8515381813049316, 0.5730932354927063, 0.7235705256462097, 0.08228331804275513, 0.5840849280357361, 0.6030013561248779, 0.2084050178527832, 0.7312950491905212, 0.6159517168998718, 0.6482340693473816, 0.07220339775085449, 0.5136227607727051, 0.9152160286903381, 0.8169018030166626, 0.15515869855880737, 0.7978536486625671, 0.564482569694519, 0.4757157564163208, 0.2718064785003662, 0.6910138726234436, 0.5675734877586365, 0.702862024307251, 0.19079893827438354, 0.8995556235313416, 0.4988499879837036, 0.6378234028816223, 0.2683940529823303, 0.21990180015563965, 0.8442690372467041, 0.8502047061920166, 0.9857811331748962, 0.3549607992172241, 0.7605474591255188]} - {"index": {}} + {"index": {"_id": "1"}} {"source_text": "the machine is leaking", "embedding":[0.09775793552398682, 0.9594467282295227, 0.7915146946907043, 0.9140479564666748, 0.5148435235023499, 0.8556410670280457, 0.6022665500640869, 0.05222177505493164, 0.9821935296058655, 0.49276530742645264, 0.23147249221801758, 0.2428399920463562, 0.3865380883216858, 0.5778483748435974, 0.5600519776344299, 0.9427472352981567, 0.48832541704177856, 0.8807493448257446, 0.32909590005874634, 0.8452557325363159, 0.811530590057373, 0.13344216346740723, 0.15256845951080322, 0.5025331974029541, 0.4288772940635681, 0.6590417623519897, 0.9282752871513367, 0.8842046856880188, 0.7873250842094421, 0.356731653213501, 0.9959152936935425, 0.07572609186172485, 0.5062583088874817, 0.36245888471603394, 0.6189196705818176, 0.7766605019569397, 0.5198523998260498, 0.7379586100578308, 0.0553441047668457, 0.5035901665687561, 0.24139636754989624, 0.10798943042755127, 0.272808313369751, 0.38171595335006714, 0.24275553226470947, 0.956981897354126, 0.8182021379470825, 0.9383817315101624, 0.06551980972290039, 0.6892690658569336, 0.7068917751312256, 0.5184322595596313, 0.6103079319000244, 0.7020677328109741, 0.7181660532951355, 0.6477184295654297, 0.26282840967178345, 0.9316624402999878, 0.8318467140197754, 0.1487215757369995, 0.39937925338745117, 0.6842989921569824, 0.3496543765068054, 0.6008991003036499, 0.9530165791511536, 0.4209877848625183, 0.5675879716873169, 0.7883706092834473, 0.9547191858291626, 0.6292906403541565, 0.49566715955734253, 0.6907342672348022, 0.0834314227104187, 0.19785481691360474, 0.4896165728569031, 0.8460168838500977, 0.9680339097976685, 0.43386441469192505, 0.7068926095962524, 0.19123870134353638, 0.5661664009094238, 0.610595166683197, 0.23599380254745483, 0.2831611633300781, 0.7919651865959167, 0.0018386244773864746, 0.15559959411621094, 0.4622604250907898, 0.02038663625717163, 0.42241227626800537, 0.4200526475906372, 0.1223069429397583, 0.7035380005836487, 0.09902423620223999, 0.7804107666015625, 0.05339455604553223, 0.6485095024108887, 0.29347676038742065, 0.9716366529464722, 0.30257928371429443]} - {"index": {}} + {"index": {"_id": "2"}} {"source_text": "these are my words", "embedding":[0.7000167369842529, 0.590781033039093, 0.009879708290100098, 0.7874260544776917, 0.797156572341919, 0.1791083812713623, 0.07826781272888184, 0.25102007389068604, 0.09334254264831543, 0.3819708824157715, 0.7312374711036682, 0.02819347381591797, 0.20099765062332153, 0.7702597975730896, 0.9443559050559998, 0.35520339012145996, 0.25699591636657715, 0.5596823692321777, 0.23947590589523315, 0.47478222846984863, 0.23411548137664795, 0.9809996485710144, 0.3806597590446472, 0.5006771087646484, 0.5724453926086426, 0.21510547399520874, 0.07062828540802002, 0.9858258962631226, 0.9636645317077637, 0.36034029722213745, 0.07260054349899292, 0.06882566213607788, 0.18354403972625732, 0.06756395101547241, 0.5749042630195618, 0.05275309085845947, 0.1865217685699463, 0.5852730870246887, 0.1086682677268982, 0.10090464353561401, 0.32582908868789673, 0.5494027733802795, 0.873362123966217, 0.02236837148666382, 0.37973177433013916, 0.5556552410125732, 0.5083678364753723, 0.8081125020980835, 0.09164196252822876, 0.2207810878753662, 0.8086426258087158, 0.271828293800354, 0.5981417298316956, 0.7745779156684875, 0.40872830152511597, 0.6035888195037842, 0.5598325133323669, 0.19086670875549316, 0.02406853437423706, 0.8299782872200012, 0.4994274377822876, 0.0300295352935791, 0.47190529108047485, 0.8889331817626953, 0.34195321798324585, 0.9380808472633362, 0.4418332576751709, 0.5789303779602051, 0.0526617169380188, 0.7349719405174255, 0.44571834802627563, 0.6602563261985779, 0.3819742202758789, 0.16881734132766724, 0.45588219165802, 0.028081774711608887, 0.6681976914405823, 0.8183007836341858, 0.7887755632400513, 0.4506028890609741, 0.8040162324905396, 0.431918203830719, 0.7408918738365173, 0.39756304025650024, 0.7438145875930786, 0.6120601892471313, 0.5724676251411438, 0.08701330423355103, 0.18344634771347046, 0.7226220369338989, 0.3648560643196106, 0.9813777208328247, 0.2615315318107605, 0.9847549796104431, 0.32967478036880493, 0.47099196910858154, 0.3591546416282654, 0.4132147431373596, 0.48631107807159424, 0.04420149326324463]} - {"index": {}} + {"index": {"_id": "3"}} {"source_text": "washing machine", "embedding":[0.7044712901115417, 0.12284207344055176, 0.5008929967880249, 0.04643195867538452, 0.3666788339614868, 0.26660799980163574, 0.24114298820495605, 0.0761682391166687, 0.5294214487075806, 0.16935181617736816, 0.6257967948913574, 0.2804388999938965, 0.6417903900146484, 0.169958233833313, 0.4216839075088501, 0.6773303747177124, 0.9472144842147827, 0.21874648332595825, 0.5095921754837036, 0.839306116104126, 0.6176233291625977, 0.5847064852714539, 0.6748610734939575, 0.3264034390449524, 0.4112023115158081, 0.13818275928497314, 0.08356589078903198, 0.4147903323173523, 0.5626787543296814, 0.7167286276817322, 0.6314535737037659, 0.23092854022979736, 0.34547603130340576, 0.7425565719604492, 0.2837678790092468, 0.47037917375564575, 0.1555209755897522, 0.5618507266044617, 0.2076261043548584, 0.3026384711265564, 0.04561811685562134, 0.1691250205039978, 0.2504339814186096, 0.5350574851036072, 0.26857447624206543, 0.23607933521270752, 0.16938960552215576, 0.23708534240722656, 0.026302993297576904, 0.16901731491088867, 0.2847784757614136, 0.944273829460144, 0.28171658515930176, 0.9864799380302429, 0.6811433434486389, 0.9383156895637512, 0.5682582259178162, 0.14361613988876343, 0.7900274395942688, 0.27808505296707153, 0.05677521228790283, 0.08594226837158203, 0.6450491547584534, 0.06500720977783203, 0.36045730113983154, 0.1987738013267517, 0.07287931442260742, 0.5315744280815125, 0.04742676019668579, 0.7842378616333008, 0.0881078839302063, 0.7612627744674683, 0.2528950572013855, 0.27305954694747925, 0.03027820587158203, 0.4686838984489441, 0.13311690092086792, 0.048372089862823486, 0.808062732219696, 0.44010263681411743, 0.5726178288459778, 0.15828031301498413, 0.4597446322441101, 0.6375324130058289, 0.8452948927879333, 0.9763500690460205, 0.5094607472419739, 0.3535742163658142, 0.664739191532135, 0.40749913454055786, 0.8537857532501221, 0.5830079913139343, 0.7949922680854797, 0.6309236288070679, 0.07258343696594238, 0.1224660873413086, 0.24250483512878418, 0.36189037561416626, 0.5156043171882629, 0.1819135546684265]} - {"index": {}} + {"index": {"_id": "4"}} {"source_text": "washing machine smells", "embedding":[0.7249823808670044, 0.3981819152832031, 0.4572623372077942, 0.7442894577980042, 0.15898281335830688, 0.6481881737709045, 0.1513708233833313, 0.8945682644844055, 0.7708938121795654, 0.5494217276573181, 0.48253726959228516, 0.39402270317077637, 0.6369197368621826, 0.7152248024940491, 0.6326345205307007, 0.7362181544303894, 0.350342333316803, 0.16101288795471191, 0.4180338382720947, 0.04114532470703125, 0.002633512020111084, 0.20396709442138672, 0.8963556885719299, 0.1552276611328125, 0.7476853728294373, 0.9651047587394714, 0.7527561187744141, 0.7041972279548645, 0.12461084127426147, 0.6282403469085693, 0.9631509184837341, 0.16590750217437744, 0.4101366400718689, 0.31320667266845703, 0.13579899072647095, 0.2895740270614624, 0.9905323386192322, 0.02118372917175293, 0.637545645236969, 0.5133231282234192, 0.679695188999176, 0.04641437530517578, 0.21913814544677734, 0.16534924507141113, 0.02987360954284668, 0.14805591106414795, 0.16874665021896362, 0.9378783702850342, 0.8607399463653564, 0.7287217974662781, 0.5402306318283081, 0.9973209500312805, 0.26169413328170776, 0.3835873603820801, 0.1874808669090271, 0.8038567304611206, 0.18557673692703247, 0.8631893992424011, 0.7676172256469727, 0.3599127531051636, 0.48698097467422485, 0.926689088344574, 0.6542723774909973, 0.49722349643707275, 0.7027173638343811, 0.13385021686553955, 0.9873734712600708, 0.17187494039535522, 0.7995050549507141, 0.5259199142456055, 0.33804380893707275, 0.21665722131729126, 0.952264130115509, 0.8337767720222473, 0.879487156867981, 0.5553549528121948, 0.6160674095153809, 0.1315295696258545, 0.8010737895965576, 0.834412693977356, 0.20340144634246826, 0.8993185758590698, 0.6493895649909973, 0.9454924464225769, 0.38529330492019653, 0.6891772150993347, 0.5530646443367004, 0.18555349111557007, 0.8361382484436035, 0.11815804243087769, 0.38942235708236694, 0.945141613483429, 0.6417409181594849, 0.39776402711868286, 0.5133314728736877, 0.5431299805641174, 0.2615429759025574, 0.8987119793891907, 0.023733675479888916, 0.4941052794456482]} - {"index": {}} + {"index": {"_id": "5"}} {"source_text": "my words", "embedding":[0.19087255001068115, 0.5498749017715454, 0.9536173939704895, 0.25011056661605835, 0.37642204761505127, 0.18271470069885254, 0.670674741268158, 0.5553990006446838, 0.3306507468223572, 0.3368762731552124, 0.053364574909210205, 0.047215282917022705, 0.4221981167793274, 0.7591024041175842, 0.998794436454773, 0.6113318204879761, 0.8178470730781555, 0.8554672598838806, 0.40100908279418945, 0.6486459374427795, 0.804382860660553, 0.6775466799736023, 0.2916865944862366, 0.7019925117492676, 0.9812073707580566, 0.4414554834365845, 0.08203905820846558, 0.9167835116386414, 0.3082762360572815, 0.5454868674278259, 0.6665160655975342, 0.06828844547271729, 0.36014634370803833, 0.01810687780380249, 0.2640475630760193, 0.1856365203857422, 0.4734996557235718, 0.8153479695320129, 0.9614933133125305, 0.4851576089859009, 0.003343045711517334, 0.17352384328842163, 0.26423048973083496, 0.24217921495437622, 0.5694647431373596, 0.8538861274719238, 0.06464511156082153, 0.038984060287475586, 0.7695011496543884, 0.008188009262084961, 0.3858819007873535, 0.7950196862220764, 0.7225212454795837, 0.3982154130935669, 0.4996080994606018, 0.28709208965301514, 0.6753579378128052, 0.6779837608337402, 0.4815831184387207, 0.27917128801345825, 0.8400004506111145, 0.9022405743598938, 0.8253144025802612, 0.6251398324966431, 0.25444501638412476, 0.7694959044456482, 0.006821691989898682, 0.7958594560623169, 0.9144708514213562, 0.8688076138496399, 0.9641174077987671, 0.44437146186828613, 0.06135892868041992, 0.2638128399848938, 0.05436718463897705, 0.9926314353942871, 0.8661795854568481, 0.9176243543624878, 0.5521496534347534, 0.6017677783966064, 0.22096896171569824, 0.7030748128890991, 0.16923701763153076, 0.8178754448890686, 0.47008246183395386, 0.28875309228897095, 0.14314061403274536, 0.3431167006492615, 0.9301973581314087, 0.5416158437728882, 0.563427209854126, 0.7897542119026184, 0.2761036157608032, 0.16855067014694214, 0.42684781551361084, 0.7562968730926514, 0.2551668882369995, 0.7754542827606201, 0.218039870262146, 0.7080662846565247]} - do: headers: @@ -112,15 +112,15 @@ setup: search: index: index-with-embedded-text body: + size: 3 query: knn: field: embedding - num_candidates: 3 query_vector_builder: text_embedding: model_id: text_embedding_model model_text: "the octopus comforter smells" - - match: { hits.total.value: 3 } + - length: { hits.hits: 3 } --- "nested kNN search with inner_hits size": diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml index 1df34a64f860a..a2cfb65b08a11 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml @@ -76,21 +76,7 @@ "Test job config is invalid because model snapshot id set": - do: - catch: /illegal_argument_exception/ - ml.validate: - body: > - { - "model_snapshot_id": "wont-create-with-this-setting", - "analysis_config" : { - "bucket_span": "1h", - "detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}] - }, - "data_description" : { - } - } - - - do: - catch: /The job is configured with fields \[model_snapshot_id\] that are illegal to set at job creation/ + catch: /x_content_parse_exception/ ml.validate: body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml index 319b84e855aaf..7e65691ea17dd 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml @@ -15,5 +15,5 @@ setup: # This is fragile - it needs to be updated every time we add a new cluster/index privilege # I would much prefer we could just check that specific entries are in the array, but we don't have # an assertion for that - - length: { "cluster" : 55 } + - length: { "cluster" : 57 } - length: { "index" : 22 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index 367655ba89388..cc282d26ae418 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -116,7 +116,7 @@ setup: - {"create": {"_index": "profiling-executables", "_id": "lHp5_WAgpLy2alrUVab6HA"}} - {"@timestamp": "1698624000", "Executable": {"build": {"id": "c5f89ea1c68710d2a493bb604c343a92c4f8ddeb"}, "file": {"name": "vmlinux"}}, "Symbolization": {"next_time": "4852491791"}, "ecs": {"version": "1.12.0"}} - {"create": {"_index": "profiling-hosts", "_id": "eLH27YsBj2lLi3tJYlvr"}} - - {"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "ec2.instance_type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "profiling.host.machine": "x86_64", "profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "ec2.placement.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "profiling.host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } + - {"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "host.type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "host.arch": "amd64", "profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "cloud.provider": "aws", "cloud.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "profiling.host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } - {"index": {"_index": "test-events"}} - {"@timestamp": "1700504427", "events": ["S07KmaoGhvNte78xwwRbZQ"]} --- diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml index ff812a6e2cb9c..edc79a8ebfc9e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml @@ -52,6 +52,8 @@ teardown: - match: { role: { created: true } } - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" security.put_role: name: "backwards_role" body: > @@ -69,10 +71,11 @@ teardown: - do: headers: Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" - cluster.health: {} - - match: { timed_out: false } + info: {} - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" security.get_role: name: "admin_role" - match: { admin_role.cluster.0: "all" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml index 21badf967b7aa..6ee155eb9aa71 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml @@ -72,8 +72,7 @@ teardown: - do: headers: Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" - cluster.health: {} - - match: { timed_out: false } + info: {} - do: security.get_role: diff --git a/x-pack/plugin/stack/build.gradle b/x-pack/plugin/stack/build.gradle index 6b26373c40544..c4b950ad9cb59 100644 --- a/x-pack/plugin/stack/build.gradle +++ b/x-pack/plugin/stack/build.gradle @@ -16,6 +16,7 @@ base { dependencies { compileOnly project(path: xpackModule('core')) + testImplementation project(':modules:data-streams') javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(path: ':x-pack:plugin:stack') clusterModules project(':modules:mapper-extras') diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 1eaf224083c87..b21e8c0c15811 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -38,9 +38,13 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(StackTemplateRegistry.class); - // Current version of the registry requires all nodes to be at least 8.9.0. + // Historical node feature kept here as LegacyStackTemplateRegistry is deprecated public static final NodeFeature STACK_TEMPLATES_FEATURE = new NodeFeature("stack.templates_supported"); + // this node feature is a redefinition of {@link DataStreamFeatures#DATA_STREAM_LIFECYCLE} and it's meant to avoid adding a + // dependency to the data-streams module just for this + public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); + // The stack template registry version. This number must be incremented when we make changes // to built-in templates. public static final int REGISTRY_VERSION = 8; @@ -326,9 +330,8 @@ protected boolean requiresMasterNode() { @Override protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only once all nodes are updated to 8.9.0. - // This is necessary to prevent an error caused nby the usage of the ignore_missing_pipeline property - // in the pipeline processor, which has been introduced only in 8.9.0 - return featureService.clusterHasFeature(event.state(), STACK_TEMPLATES_FEATURE); + // Ensure current version of the components are installed only after versions that support data stream lifecycle + // due to .kibana-reporting making use of the feature + return featureService.clusterHasFeature(event.state(), DATA_STREAM_LIFECYCLE); } } diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 2caa820c51645..782fe3b41ae3b 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.datastreams.DataStreamFeatures; import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; @@ -70,6 +71,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -87,7 +89,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); clusterService = ClusterServiceUtils.createClusterService(threadPool); - featureService = new FeatureService(List.of(new StackTemplatesFeatures())); + featureService = new FeatureService(List.of(new StackTemplatesFeatures(), new DataStreamFeatures())); registry = new StackTemplateRegistry( Settings.EMPTY, clusterService, @@ -504,7 +506,7 @@ public void testThatMissingMasterNodeDoesNothing() { public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_8_0); + DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_10_0); DiscoveryNodes nodes = DiscoveryNodes.builder() .localNodeId("updatedNode") .masterNodeId("updatedNode") @@ -513,7 +515,7 @@ public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { .build(); client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.9.0 nothing should happen"); + fail("if some cluster mode are not updated to at least v.8.11.0 nothing should happen"); return null; }); @@ -538,6 +540,11 @@ public void testThatTemplatesAreNotDeprecated() { .forEach(p -> assertFalse((Boolean) p.get("deprecated"))); } + public void testDataStreamLifecycleNodeFeatureId() { + // let's make sure these ids remain in-sync + assertThat(StackTemplateRegistry.DATA_STREAM_LIFECYCLE.id(), is(DataStreamFeatures.DATA_STREAM_LIFECYCLE.id())); + } + // ------------- /** diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformInsufficientPermissionsIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformInsufficientPermissionsIT.java index 105633c7340e5..d3d86571e002f 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformInsufficientPermissionsIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformInsufficientPermissionsIT.java @@ -420,12 +420,15 @@ public void testTransformPermissionsDeferUnattendedNoDest() throws Exception { startTransform(transformId, RequestOptions.DEFAULT); - String destIndexIssue = Strings.format("Could not create destination index [%s] for transform [%s]", destIndexName, transformId); + var permissionIssues = Strings.format( + "org.elasticsearch.ElasticsearchSecurityException: Cannot start transform [%s] because user lacks required permissions, " + + "see privileges_check_failed issue for more details", + transformId + ); // transform's auth state status is still RED due to: // - lacking permissions - // - and the inability to create destination index in the indexer (which is also a consequence of lacking permissions) - // wait for 10 seconds to give the transform indexer enough time to try creating destination index - assertBusy(() -> { assertRed(transformId, authIssue, destIndexIssue); }); + // - and the inability to start the indexer (which is also a consequence of lacking permissions) + assertBusy(() -> { assertRed(transformId, authIssue, permissionIssues); }); // update transform's credentials so that the transform has permission to access source/dest indices updateConfig(transformId, "{}", RequestOptions.DEFAULT.toBuilder().addHeader(AUTH_KEY, Users.SENIOR.header).build()); @@ -440,7 +443,6 @@ public void testTransformPermissionsDeferUnattendedNoDest() throws Exception { * unattended = true * pre-existing dest index = true */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105794") public void testTransformPermissionsDeferUnattendedDest() throws Exception { String transformId = "transform-permissions-defer-unattended-dest-exists"; String sourceIndexName = transformId + "-index"; @@ -467,8 +469,15 @@ public void testTransformPermissionsDeferUnattendedDest() throws Exception { startTransform(config.getId(), RequestOptions.DEFAULT); - // transform's auth state status is still RED, but the health status is GREEN (because dest index exists) - assertRed(transformId, authIssue); + var permissionIssues = Strings.format( + "org.elasticsearch.ElasticsearchSecurityException: Cannot start transform [%s] because user lacks required permissions, " + + "see privileges_check_failed issue for more details", + transformId + ); + // transform's auth state status is still RED due to: + // - lacking permissions + // - and the inability to start the indexer (which is also a consequence of lacking permissions) + assertBusy(() -> { assertRed(transformId, authIssue, permissionIssues); }); // update transform's credentials so that the transform has permission to access source/dest indices updateConfig(transformId, "{}", RequestOptions.DEFAULT.toBuilder().addHeader(AUTH_KEY, Users.SENIOR.header).build()); diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformResetIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformResetIT.java index dd7ad718812a1..7ed6466357e8f 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformResetIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformResetIT.java @@ -32,6 +32,13 @@ public class TransformResetIT extends TransformRestTestCase { TEST_PASSWORD_SECURE_STRING ); private static final String DATA_ACCESS_ROLE = "test_data_access"; + private static final String SYNC_CONFIG = """ + "sync": { + "time": { + "field": "timestamp" + } + }, + """; private static boolean indicesCreated = false; @@ -132,6 +139,7 @@ public void testResetDeletesDestinationIndex() throws Exception { } private static String createConfig(String transformDestIndex) { + boolean isContinuous = randomBoolean(); return Strings.format(""" { "dest": { @@ -140,6 +148,7 @@ private static String createConfig(String transformDestIndex) { "source": { "index": "%s" }, + %s "pivot": { "group_by": { "reviewer": { @@ -156,6 +165,6 @@ private static String createConfig(String transformDestIndex) { } } } - }""", transformDestIndex, REVIEWS_INDEX_NAME); + }""", transformDestIndex, REVIEWS_INDEX_NAME, isContinuous ? SYNC_CONFIG : ""); } } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index ce1178e760a6c..7c74e918a039f 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -31,6 +31,7 @@ import java.io.IOException; import java.time.Instant; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -734,4 +735,26 @@ private void logAudits() throws Exception { } }, 5, TimeUnit.SECONDS); } + + @SuppressWarnings("unchecked") + protected List getTransformTasks() throws IOException { + final Request tasksRequest = new Request("GET", "/_tasks"); + tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); + Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); + + Map nodes = (Map) tasksResponse.get("nodes"); + if (nodes == null) { + return List.of(); + } + + List foundTasks = new ArrayList<>(); + for (Map.Entry node : nodes.entrySet()) { + Map nodeInfo = (Map) node.getValue(); + Map tasks = (Map) nodeInfo.get("tasks"); + if (tasks != null) { + foundTasks.addAll(tasks.keySet()); + } + } + return foundTasks; + } } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java index 105ac09e356fd..e537a6f280ac0 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java @@ -15,10 +15,8 @@ import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.containsString; @@ -150,28 +148,6 @@ public void testCancellingTransformTask() throws Exception { assertThat(getTransformTasks(), is(empty())); } - @SuppressWarnings("unchecked") - private List getTransformTasks() throws IOException { - final Request tasksRequest = new Request("GET", "/_tasks"); - tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); - Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); - - Map nodes = (Map) tasksResponse.get("nodes"); - if (nodes == null) { - return List.of(); - } - - List foundTasks = new ArrayList<>(); - for (Entry node : nodes.entrySet()) { - Map nodeInfo = (Map) node.getValue(); - Map tasks = (Map) nodeInfo.get("tasks"); - if (tasks != null) { - foundTasks.addAll(tasks.keySet()); - } - } - return foundTasks; - } - private void beEvilAndDeleteTheTransformIndex() throws IOException { final Request deleteRequest = new Request("DELETE", TransformInternalIndexConstants.LATEST_INDEX_NAME); deleteRequest.setOptions( diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java index 9f74d445252d2..1abf611e833c4 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java @@ -24,10 +24,12 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.matchesRegex; +import static org.hamcrest.Matchers.nullValue; public class TransformTaskFailedStateIT extends TransformRestTestCase { @@ -61,6 +63,9 @@ public void testForceStopFailedTransform() throws Exception { String transformIndex = "failure_pivot_reviews"; createDestinationIndexWithBadMapping(transformIndex); createContinuousPivotReviewsTransform(transformId, transformIndex, null); + + assertThat(getTransformTasks(), is(empty())); + startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); Map fullState = getTransformStateAndStats(transformId); @@ -72,6 +77,8 @@ public void testForceStopFailedTransform() throws Exception { // Verify we have failed for the expected reason assertThat((String) XContentMapValues.extractValue("reason", fullState), matchesRegex(failureReason)); + assertThat(getTransformTasks(), hasSize(1)); + // verify that we cannot stop a failed transform ResponseException ex = expectThrows(ResponseException.class, () -> stopTransform(transformId, false)); assertThat(ex.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.CONFLICT.getStatus())); @@ -90,6 +97,44 @@ public void testForceStopFailedTransform() throws Exception { awaitState(transformId, TransformStats.State.STOPPED); fullState = getTransformStateAndStats(transformId); assertThat(XContentMapValues.extractValue("reason", fullState), is(nullValue())); + + assertThat(getTransformTasks(), is(empty())); + } + + public void testForceResetFailedTransform() throws Exception { + String transformId = "test-force-reset-failed-transform"; + createReviewsIndex(REVIEWS_INDEX_NAME, 10, 27, "date", false, -1, null); + String transformIndex = "failure_pivot_reviews"; + createDestinationIndexWithBadMapping(transformIndex); + createContinuousPivotReviewsTransform(transformId, transformIndex, null); + + assertThat(getTransformTasks(), is(empty())); + + startTransform(transformId); + awaitState(transformId, TransformStats.State.FAILED); + Map fullState = getTransformStateAndStats(transformId); + final String failureReason = "Failed to index documents into destination index due to permanent error: " + + "\\[org.elasticsearch.xpack.transform.transforms.BulkIndexingException: Bulk index experienced \\[7\\] " + + "failures and at least 1 irrecoverable " + + "\\[org.elasticsearch.xpack.transform.transforms.TransformException: Destination index mappings are " + + "incompatible with the transform configuration.;.*"; + // Verify we have failed for the expected reason + assertThat((String) XContentMapValues.extractValue("reason", fullState), matchesRegex(failureReason)); + + assertThat(getTransformTasks(), hasSize(1)); + + // verify that we cannot reset a failed transform + ResponseException ex = expectThrows(ResponseException.class, () -> resetTransform(transformId, false)); + assertThat(ex.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.CONFLICT.getStatus())); + assertThat( + (String) XContentMapValues.extractValue("error.reason", entityAsMap(ex.getResponse())), + is(equalTo("Cannot reset transform [test-force-reset-failed-transform] as the task is running. Stop the task first")) + ); + + // Verify that we can force reset a failed transform + resetTransform(transformId, true); + + assertThat(getTransformTasks(), is(empty())); } public void testStartFailedTransform() throws Exception { @@ -98,6 +143,9 @@ public void testStartFailedTransform() throws Exception { String transformIndex = "failure_pivot_reviews"; createDestinationIndexWithBadMapping(transformIndex); createContinuousPivotReviewsTransform(transformId, transformIndex, null); + + assertThat(getTransformTasks(), is(empty())); + startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); Map fullState = getTransformStateAndStats(transformId); @@ -109,6 +157,8 @@ public void testStartFailedTransform() throws Exception { // Verify we have failed for the expected reason assertThat((String) XContentMapValues.extractValue("reason", fullState), matchesRegex(failureReason)); + assertThat(getTransformTasks(), hasSize(1)); + final String expectedFailure = "Unable to start transform \\[test-force-start-failed-transform\\] " + "as it is in a failed state with failure: \\[" + failureReason @@ -124,6 +174,8 @@ public void testStartFailedTransform() throws Exception { }, 60, TimeUnit.SECONDS); stopTransform(transformId, true); + + assertThat(getTransformTasks(), is(empty())); } private void awaitState(String transformId, TransformStats.State state) throws Exception { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java index d96ba88faff9a..51379b81d7e9d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java @@ -95,7 +95,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A // <3> Delete transform config ActionListener deleteDestIndexListener = ActionListener.wrap( unusedAcknowledgedResponse -> transformConfigManager.deleteTransform(request.getId(), ActionListener.wrap(r -> { - logger.debug("[{}] deleted transform", request.getId()); + logger.info("[{}] deleted transform", request.getId()); auditor.info(request.getId(), "Deleted transform."); listener.onResponse(AcknowledgedResponse.of(r)); }, listener::onFailure)), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java index 8a82880f4d9a3..df36a850a3b0a 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java @@ -168,7 +168,7 @@ private void putTransform(Request request, ActionListener var config = request.getConfig(); transformConfigManager.putTransformConfiguration(config, listener.delegateFailureAndWrap((l, unused) -> { var transformId = config.getId(); - logger.debug("[{}] created transform", transformId); + logger.info("[{}] created transform", transformId); auditor.info(transformId, "Created transform."); var validationFunc = FunctionFactory.create(config); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java index ee394c7a128b4..6d0e3213d67fc 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java @@ -110,7 +110,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A // <4> Reset transform ActionListener updateTransformListener = ActionListener.wrap( unusedUpdateResult -> transformConfigManager.resetTransform(request.getId(), ActionListener.wrap(resetResponse -> { - logger.debug("[{}] reset transform", request.getId()); + logger.info("[{}] reset transform", request.getId()); auditor.info(request.getId(), "Reset transform."); listener.onResponse(AcknowledgedResponse.of(resetResponse)); }, listener::onFailure)), @@ -154,7 +154,14 @@ protected void masterOperation(Task task, Request request, ClusterState state, A stopTransformActionListener.onResponse(null); return; } - StopTransformAction.Request stopTransformRequest = new StopTransformAction.Request(request.getId(), true, false, null, true, false); + StopTransformAction.Request stopTransformRequest = new StopTransformAction.Request( + request.getId(), + true, + request.isForce(), + null, + true, + false + ); executeAsyncWithOrigin(client, TRANSFORM_ORIGIN, StopTransformAction.INSTANCE, stopTransformRequest, stopTransformActionListener); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java index b35566c6467c4..3fb271aeb1535 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java @@ -154,7 +154,7 @@ protected void doExecute(Task task, Request request, ActionListener li TransformConfig updatedConfig = updateResult.getConfig(); AuthorizationState authState = updateResult.getAuthState(); auditor.info(updatedConfig.getId(), "Updated transform."); - logger.debug("[{}] Updated transform [{}]", updatedConfig.getId(), updateResult.getStatus()); + logger.info("[{}] Updated transform [{}]", updatedConfig.getId(), updateResult.getStatus()); checkTransformConfigAndLogWarnings(updatedConfig); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java index 8dd7b541b4e28..592b7b423c053 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java @@ -199,7 +199,7 @@ private void recursiveUpdate( updateOneTransform(next, dryRun, timeout, ActionListener.wrap(updateResponse -> { if (UpdateResult.Status.DELETED.equals(updateResponse.getStatus()) == false) { auditor.info(next, "Updated transform."); - logger.debug("[{}] Updated transform [{}]", next, updateResponse.getStatus()); + logger.info("[{}] Updated transform [{}]", next, updateResponse.getStatus()); updatesByStatus.compute(updateResponse.getStatus(), (k, v) -> (v == null) ? 1 : v + 1L); } if (transformsToUpgrade.isEmpty() == false) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformContext.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformContext.java index 7fdabda6189a9..6119f446e8dc4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformContext.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformContext.java @@ -39,6 +39,9 @@ public interface Listener { private final AtomicInteger statePersistenceFailureCount = new AtomicInteger(); private final AtomicReference lastStatePersistenceFailure = new AtomicReference<>(); private volatile Instant lastStatePersistenceFailureStartTime; + private final AtomicInteger startUpFailureCount = new AtomicInteger(); + private final AtomicReference lastStartUpFailure = new AtomicReference<>(); + private volatile Instant startUpFailureTime; private volatile Instant changesLastDetectedAt; private volatile Instant lastSearchTime; private volatile boolean shouldStopAtCheckpoint = false; @@ -214,6 +217,37 @@ Instant getLastStatePersistenceFailureStartTime() { return lastStatePersistenceFailureStartTime; } + void resetStartUpFailureCount() { + startUpFailureCount.set(0); + lastStartUpFailure.set(null); + startUpFailureTime = null; + } + + int getStartUpFailureCount() { + return startUpFailureCount.get(); + } + + Throwable getStartUpFailure() { + return lastStartUpFailure.get(); + } + + int incrementAndGetStartUpFailureCount(Throwable failure) { + lastStartUpFailure.set(failure); + int newFailureCount = startUpFailureCount.incrementAndGet(); + if (newFailureCount == 1) { + startUpFailureTime = Instant.now(); + } + return newFailureCount; + } + + Instant getStartUpFailureTime() { + return startUpFailureTime; + } + + boolean doesNotHaveFailures() { + return getFailureCount() == 0 && getStatePersistenceFailureCount() == 0 && getStartUpFailureCount() == 0; + } + void shutdown() { taskListener.shutdown(); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformHealthChecker.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformHealthChecker.java index 86d8ce4a6173c..24c5d45a38f75 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformHealthChecker.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformHealthChecker.java @@ -38,7 +38,8 @@ public enum IssueType { PRIVILEGES_CHECK_FAILED("Privileges check failed"), TRANSFORM_TASK_FAILED("Transform task state is [failed]"), TRANSFORM_INDEXER_FAILED("Transform indexer failed"), - TRANSFORM_INTERNAL_STATE_UPDATE_FAILED("Task encountered failures updating internal state"); + TRANSFORM_INTERNAL_STATE_UPDATE_FAILED("Task encountered failures updating internal state"), + TRANSFORM_STARTUP_FAILED("Transform task is automatically retrying its startup process"); private final String issue; @@ -88,8 +89,7 @@ public static TransformHealth checkTransform(TransformTask transformTask) { public static TransformHealth checkTransform(TransformTask transformTask, @Nullable AuthorizationState authState) { // quick check if (TransformTaskState.FAILED.equals(transformTask.getState().getTaskState()) == false - && transformTask.getContext().getFailureCount() == 0 - && transformTask.getContext().getStatePersistenceFailureCount() == 0 + && transformTask.getContext().doesNotHaveFailures() && AuthorizationState.isNullOrGreen(authState)) { return TransformHealth.GREEN; } @@ -145,6 +145,24 @@ public static TransformHealth checkTransform(TransformTask transformTask, @Nulla ); } + if (transformContext.getStartUpFailureCount() != 0) { + if (HealthStatus.RED.equals(maxStatus) == false) { + maxStatus = HealthStatus.YELLOW; + } + + var lastFailure = transformContext.getStartUpFailure(); + var lastFailureMessage = lastFailure instanceof ElasticsearchException elasticsearchException + ? elasticsearchException.getDetailedMessage() + : lastFailure.getMessage(); + issues.add( + IssueType.TRANSFORM_STARTUP_FAILED.newIssue( + lastFailureMessage, + transformContext.getStartUpFailureCount(), + transformContext.getStartUpFailureTime() + ) + ); + } + return new TransformHealth(maxStatus, issues); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java index 38bd231e3e76a..636ed3cc02706 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; @@ -21,6 +22,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.health.HealthStatus; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -271,22 +273,25 @@ protected void onStart(long now, ActionListener listener) { return; } - SetOnce> deducedDestIndexMappings = new SetOnce<>(); - - ActionListener finalListener = ActionListener.wrap(r -> { - try { - // if we haven't set the page size yet, if it is set we might have reduced it after running into an out of memory - if (context.getPageSize() == 0) { - configurePageSize(getConfig().getSettings().getMaxPageSearchSize()); - } + if (context.getAuthState() != null && HealthStatus.RED.equals(context.getAuthState().getStatus())) { + // AuthorizationState status is RED which means there was permission check error during PUT or _update. + listener.onFailure( + new ElasticsearchSecurityException( + TransformMessages.getMessage(TransformMessages.TRANSFORM_CANNOT_START_WITHOUT_PERMISSIONS, getConfig().getId()) + ) + ); + return; + } - runState = determineRunStateAtStart(); - listener.onResponse(true); - } catch (Exception e) { - listener.onFailure(e); - return; + ActionListener finalListener = listener.delegateFailureAndWrap((l, r) -> { + // if we haven't set the page size yet, if it is set we might have reduced it after running into an out of memory + if (context.getPageSize() == 0) { + configurePageSize(getConfig().getSettings().getMaxPageSearchSize()); } - }, listener::onFailure); + + runState = determineRunStateAtStart(); + l.onResponse(true); + }); // On each run, we need to get the total number of docs and reset the count of processed docs // Since multiple checkpoints can be executed in the task while it is running on the same node, we need to gather @@ -334,6 +339,7 @@ protected void onStart(long now, ActionListener listener) { } }, listener::onFailure); + var deducedDestIndexMappings = new SetOnce>(); var shouldMaybeCreateDestIndexForUnattended = context.getCheckpoint() == 0 && TransformEffectiveSettings.isUnattended(transformConfig.getSettings()); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java index ae9678893df9a..f18414e3aaead 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; +import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.TransformExtension; @@ -203,6 +204,7 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable TransformTa final SetOnce stateHolder = new SetOnce<>(); + // <7> log the start result ActionListener startTaskListener = ActionListener.wrap( response -> logger.info("[{}] successfully completed and scheduled task in node operation", transformId), failure -> { @@ -348,21 +350,18 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable TransformTa }); // <2> Get the transform config - ActionListener templateCheckListener = ActionListener.wrap( - aVoid -> transformServices.getConfigManager().getTransformConfiguration(transformId, getTransformConfigListener), - error -> { - Throwable cause = ExceptionsHelper.unwrapCause(error); - String msg = "Failed to create internal index mappings"; - markAsFailed(buildTask, error, msg + "[" + cause + "]"); - } - ); + var templateCheckListener = getTransformConfig(buildTask, params, getTransformConfigListener); // <1> Check the latest internal index (IMPORTANT: according to _this_ node, which might be newer than master) is installed TransformInternalIndex.createLatestVersionedIndexIfRequired( clusterService, parentTaskClient, transformExtension.getTransformInternalIndexAdditionalSettings(), - templateCheckListener + templateCheckListener.delegateResponse((l, e) -> { + Throwable cause = ExceptionsHelper.unwrapCause(e); + String msg = "Failed to create internal index mappings"; + markAsFailed(buildTask, e, msg + "[" + cause + "]"); + }) ); } @@ -401,6 +400,64 @@ private static void markAsFailed(TransformTask task, Throwable exception, String } } + private ActionListener getTransformConfig( + TransformTask task, + TransformTaskParams params, + ActionListener listener + ) { + return ActionListener.running(() -> { + var transformId = params.getId(); + // if this call fails for the first time, we are going to retry it indefinitely + // register the retry using the TransformScheduler, when the call eventually succeeds, deregister it before returning + var scheduler = transformServices.getScheduler(); + scheduler.registerTransform( + params, + new TransformRetryableStartUpListener<>( + transformId, + l -> transformServices.getConfigManager().getTransformConfiguration(transformId, l), + ActionListener.runBefore(listener, () -> scheduler.deregisterTransform(transformId)), + retryListener(task), + () -> true, // because we can't determine if this is an unattended transform yet, retry indefinitely + task.getContext() + ) + ); + }); + } + + /** + * This listener is always called after the first execution of a {@link TransformRetryableStartUpListener}. + * + * When the result is true, then the first call has failed and will retry. Save the state as Started and unblock the network thread, + * notifying the user with a 200 OK (acknowledged). + * + * When the result is false, then the first call has succeeded, and no further action is required for this listener. + */ + private ActionListener retryListener(TransformTask task) { + return ActionListener.wrap(isRetrying -> { + if (isRetrying) { + var oldState = task.getState(); + var newState = new TransformState( + TransformTaskState.STARTED, + oldState.getIndexerState(), + oldState.getPosition(), + oldState.getCheckpoint(), + "Retrying transform start.", + oldState.getProgress(), + oldState.getNode(), + oldState.shouldStopAtNextCheckpoint(), + oldState.getAuthState() + ); + task.persistStateToClusterState( + newState, + ActionListener.wrap( + rr -> logger.debug("[{}] marked as retrying in TransformState.", task.getTransformId()), + ee -> logger.atWarn().withThrowable(ee).log("[{}] failed to persist state.", task.getTransformId()) + ) + ); + } + }, e -> markAsFailed(task, e, "Failed to initiate retries for Transform.")); + } + private void startTask( TransformTask buildTask, ClientTransformIndexerBuilder indexerBuilder, diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java new file mode 100644 index 0000000000000..17548fd8d427f --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.transforms; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.transform.transforms.scheduling.TransformScheduler; + +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; +import java.util.function.Supplier; + +class TransformRetryableStartUpListener implements TransformScheduler.Listener { + private final String transformId; + private final Consumer> action; + private final ActionListener actionListener; + private final ActionListener retryScheduledListener; + private final Supplier shouldRetry; + private final TransformContext context; + private final AtomicBoolean isFirstRun; + private final AtomicBoolean isRunning; + + /** + * @param transformId the transform associated with this listener. All events to this listener must be for the same transformId. + * @param action the action this listener will take. When the TransformScheduler invokes {@link #triggered(TransformScheduler.Event)}, + * the call is forwarded to this action. + * @param actionListener actionListener will be notified via #onResponse when the action succeeds or via #onFailure when retries have + * stopped. If the Transform Stop API deregisters this class from the Scheduler, this actionListener will *not* be + * invoked. + * @param retryScheduledListener retryScheduledListener will be notified after the first call. If true, another thread has started the + * retry process. If false, the original call was successful, and no retries will happen. + * @param shouldRetry allows an external entity to gracefully stop these retries, invoking the actionListener's #onFailure method. + * Note that external entities are still required to deregister this listener from the Scheduler. + * @param context the transform's context object. This listener will update the StartUpFailureCount information in the context as it + * encounters errors and retries. + */ + TransformRetryableStartUpListener( + String transformId, + Consumer> action, + ActionListener actionListener, + ActionListener retryScheduledListener, + Supplier shouldRetry, + TransformContext context + ) { + this.transformId = transformId; + this.action = action; + this.actionListener = actionListener; + this.retryScheduledListener = retryScheduledListener; + this.shouldRetry = shouldRetry; + this.context = context; + this.isFirstRun = new AtomicBoolean(true); + this.isRunning = new AtomicBoolean(true); + } + + @Override + public void triggered(TransformScheduler.Event event) { + if (isRunning.get() && transformId.equals(event.transformId())) { + action.accept(ActionListener.wrap(this::actionSucceeded, this::actionFailed)); + } + } + + private void markDone() { + if (isRunning.compareAndSet(true, false)) { + synchronized (context) { + context.resetStartUpFailureCount(); + } + } + } + + private void actionSucceeded(Response r) { + maybeNotifyRetryListener(false); + markDone(); + actionListener.onResponse(r); + } + + private void maybeNotifyRetryListener(boolean response) { + if (isFirstRun.compareAndSet(true, false)) { + retryScheduledListener.onResponse(response); + } + } + + private void actionFailed(Exception e) { + if (shouldRetry.get()) { + maybeNotifyRetryListener(true); + recordError(e); + } else { + maybeNotifyRetryListener(false); + markDone(); + actionListener.onFailure(e); + } + } + + private void recordError(Exception e) { + synchronized (context) { + context.incrementAndGetStartUpFailureCount(e); + } + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java index ac81579e8dd71..dbfc30a38f4c3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java @@ -71,7 +71,7 @@ public class TransformTask extends AllocatedPersistentTask implements TransformS private final SetOnce indexer = new SetOnce<>(); @SuppressWarnings("this-escape") - public TransformTask( + TransformTask( long id, String type, String action, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformContextTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformContextTests.java index 86193ef511618..d8e505ad16a49 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformContextTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformContextTests.java @@ -21,9 +21,11 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.verifyNoMoreInteractions; public class TransformContextTests extends ESTestCase { @@ -41,19 +43,81 @@ public void verifyNoMoreInteractionsOnMocks() { } public void testFailureCount() { - TransformContext context = new TransformContext(null, null, 0, listener); - assertThat(context.incrementAndGetFailureCount(new RuntimeException("some_exception")), is(equalTo(1))); + var context = new TransformContext(null, null, 0, listener); + + var someException = someException(); + assertThat(context.incrementAndGetFailureCount(someException), is(equalTo(1))); assertThat(context.getFailureCount(), is(equalTo(1))); - assertThat(context.incrementAndGetFailureCount(new IllegalArgumentException("some_other_exception")), is(equalTo(2))); + assertThat(context.getLastFailure(), is(sameInstance(someException))); + assertFalse(context.doesNotHaveFailures()); + + var someOtherException = someOtherException(); + assertThat(context.incrementAndGetFailureCount(someOtherException), is(equalTo(2))); assertThat(context.getFailureCount(), is(equalTo(2))); + assertThat(context.getLastFailure(), is(sameInstance(someOtherException))); + assertFalse(context.doesNotHaveFailures()); + context.resetReasonAndFailureCounter(); assertThat(context.getFailureCount(), is(equalTo(0))); assertThat(context.getLastFailure(), is(nullValue())); + assertTrue(context.doesNotHaveFailures()); // Verify that the listener is notified every time the failure count is incremented or reset verify(listener, times(3)).failureCountChanged(); } + private Throwable someException() { + return new RuntimeException("some_exception"); + } + + private Throwable someOtherException() { + return new IllegalArgumentException("some_other_exception"); + } + + public void testStatePersistenceFailureCount() { + var context = new TransformContext(null, null, 0, listener); + + var someException = someException(); + assertThat(context.incrementAndGetStatePersistenceFailureCount(someException), is(equalTo(1))); + assertThat(context.getStatePersistenceFailureCount(), is(equalTo(1))); + assertThat(context.getLastStatePersistenceFailure(), is(sameInstance(someException))); + assertFalse(context.doesNotHaveFailures()); + + var someOtherException = someOtherException(); + assertThat(context.incrementAndGetStatePersistenceFailureCount(someOtherException), is(equalTo(2))); + assertThat(context.getStatePersistenceFailureCount(), is(equalTo(2))); + assertThat(context.getLastStatePersistenceFailure(), is(sameInstance(someOtherException))); + assertFalse(context.doesNotHaveFailures()); + + context.resetStatePersistenceFailureCount(); + assertThat(context.getStatePersistenceFailureCount(), is(equalTo(0))); + assertThat(context.getLastStatePersistenceFailure(), is(nullValue())); + assertTrue(context.doesNotHaveFailures()); + verifyNoInteractions(listener); + } + + public void testStartUpFailureCount() { + var context = new TransformContext(null, null, 0, listener); + + var someException = someException(); + assertThat(context.incrementAndGetStartUpFailureCount(someException), is(equalTo(1))); + assertThat(context.getStartUpFailureCount(), is(equalTo(1))); + assertThat(context.getStartUpFailure(), is(sameInstance(someException))); + assertFalse(context.doesNotHaveFailures()); + + var someOtherException = someOtherException(); + assertThat(context.incrementAndGetStartUpFailureCount(someOtherException), is(equalTo(2))); + assertThat(context.getStartUpFailureCount(), is(equalTo(2))); + assertThat(context.getStartUpFailure(), is(sameInstance(someOtherException))); + assertFalse(context.doesNotHaveFailures()); + + context.resetStartUpFailureCount(); + assertThat(context.getStartUpFailureCount(), is(equalTo(0))); + assertThat(context.getStartUpFailure(), is(nullValue())); + assertTrue(context.doesNotHaveFailures()); + verifyNoInteractions(listener); + } + public void testCheckpoint() { TransformContext context = new TransformContext(null, null, 13, listener); assertThat(context.getCheckpoint(), is(equalTo(13L))); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformHealthCheckerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformHealthCheckerTests.java index 3b4604caca5cd..e52428bc94c13 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformHealthCheckerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformHealthCheckerTests.java @@ -17,6 +17,7 @@ import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -101,6 +102,30 @@ public void testStatusSwitchingAndMultipleFailures() { assertThat(TransformHealthChecker.checkTransform(task), equalTo(TransformHealth.GREEN)); } + public void testStartUpFailures() { + var task = mock(TransformTask.class); + var context = createTestContext(); + var now = getNow(); + + withIdStateAndContext(task, randomAlphaOfLength(10), context); + assertThat(TransformHealthChecker.checkTransform(task), equalTo(TransformHealth.GREEN)); + + context.incrementAndGetStartUpFailureCount(new ElasticsearchException("failed to persist")); + + var health = TransformHealthChecker.checkTransform(task); + assertThat(health.getStatus(), equalTo(HealthStatus.YELLOW)); + assertEquals(1, health.getIssues().size()); + assertThat(health.getIssues().get(0).getIssue(), equalTo("Transform task is automatically retrying its startup process")); + assertThat(health.getIssues().get(0).getFirstOccurrence(), greaterThanOrEqualTo(now)); + assertThat(health.getIssues().get(0).getFirstOccurrence(), lessThan(Instant.MAX)); + + IntStream.range(0, 10).forEach(i -> context.incrementAndGetStartUpFailureCount(new ElasticsearchException("failed to persist"))); + assertThat("Start up failures should always be yellow regardless of count", health.getStatus(), equalTo(HealthStatus.YELLOW)); + + context.resetStartUpFailureCount(); + assertThat(TransformHealthChecker.checkTransform(task), equalTo(TransformHealth.GREEN)); + } + private TransformContext createTestContext() { return new TransformContext(TransformTaskState.STARTED, "", 0, mock(TransformContext.Listener.class)); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java index b927a248faf31..b5192535e911a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.transform.transforms; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -24,6 +25,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; @@ -31,33 +33,74 @@ import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.Assignment; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; +import org.elasticsearch.xpack.core.transform.transforms.AuthorizationState; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests; +import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; +import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.DefaultTransformExtension; import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; -import org.elasticsearch.xpack.transform.persistence.IndexBasedTransformConfigManager; +import org.elasticsearch.xpack.transform.persistence.InMemoryTransformConfigManager; +import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; import org.elasticsearch.xpack.transform.persistence.TransformInternalIndexTests; import org.elasticsearch.xpack.transform.transforms.scheduling.TransformScheduler; +import org.junit.AfterClass; +import org.junit.BeforeClass; import java.time.Clock; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.isNull; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class TransformPersistentTasksExecutorTests extends ESTestCase { + private static ThreadPool threadPool; + + @BeforeClass + public static void setUpThreadPool() { + threadPool = new TestThreadPool(TransformPersistentTasksExecutorTests.class.getSimpleName()) { + @Override + public ExecutorService executor(String name) { + return EsExecutors.DIRECT_EXECUTOR_SERVICE; + } + + @Override + public ScheduledCancellable schedule(Runnable command, TimeValue delay, Executor name) { + command.run(); + return null; + } + }; + } + + @AfterClass + public static void tearDownThreadPool() { + terminate(threadPool); + } public void testNodeVersionAssignment() { DiscoveryNodes.Builder nodes = buildNodes(false, true, true, true, true); @@ -262,6 +305,88 @@ public void testVerifyIndicesPrimaryShardsAreActive() { assertEquals(indexToRemove, result.get(0)); } + public void testNodeOperation() { + var transformsConfigManager = new InMemoryTransformConfigManager(); + var transformScheduler = new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO); + var taskExecutor = buildTaskExecutor(transformServices(transformsConfigManager, transformScheduler)); + + var transformId = "testNodeOperation"; + var params = mockTaskParams(transformId); + + putTransformConfiguration(transformsConfigManager, transformId); + var task = mockTransformTask(); + taskExecutor.nodeOperation(task, params, mock()); + + verify(task).start(isNull(), any()); + } + + private void putTransformConfiguration(TransformConfigManager configManager, String transformId) { + configManager.putTransformConfiguration( + TransformConfigTests.randomTransformConfig(transformId, TimeValue.timeValueMillis(1), TransformConfigVersion.CURRENT), + ActionListener.noop().delegateResponse((l, e) -> fail(e)) + ); + } + + public void testNodeOperationStartupRetry() throws Exception { + var failFirstCall = new AtomicBoolean(true); + var transformsConfigManager = new InMemoryTransformConfigManager() { + @Override + public void getTransformConfiguration(String transformId, ActionListener resultListener) { + if (failFirstCall.compareAndSet(true, false)) { + resultListener.onFailure(new IllegalStateException("Failing first call.")); + } else { + super.getTransformConfiguration(transformId, resultListener); + } + } + }; + + var transformScheduler = new TransformScheduler(Clock.systemUTC(), threadPool, fastRetry(), TimeValue.ZERO); + var taskExecutor = buildTaskExecutor(transformServices(transformsConfigManager, transformScheduler)); + + var transformId = "testNodeOperationStartupRetry"; + var params = mockTaskParams(transformId); + putTransformConfiguration(transformsConfigManager, transformId); + + var task = mockTransformTask(); + taskExecutor.nodeOperation(task, params, mock()); + + // skip waiting for the scheduler to run the task a second time and just rerun it now + transformScheduler.scheduleNow(transformId); + + // verify the retry listener set the state to TransformTaskState.STARTED + IndexerState.STOPPED + verify(task).persistStateToClusterState(argThat(state -> { + assertThat(TransformTaskState.STARTED, equalTo(state.getTaskState())); + assertThat(IndexerState.STOPPED, equalTo(state.getIndexerState())); + return true; + }), any()); + verify(task).start(isNull(), any()); + } + + private Settings fastRetry() { + // must be >= [1s] + return Settings.builder().put(Transform.SCHEDULER_FREQUENCY.getKey(), TimeValue.timeValueSeconds(1)).build(); + } + + private TransformTaskParams mockTaskParams(String transformId) { + var params = mock(TransformTaskParams.class); + when(params.getId()).thenReturn(transformId); + when(params.getFrequency()).thenReturn(TimeValue.timeValueSeconds(1)); + return params; + } + + private TransformTask mockTransformTask() { + var task = mock(TransformTask.class); + when(task.setAuthState(any(AuthorizationState.class))).thenReturn(task); + when(task.setNumFailureRetries(anyInt())).thenReturn(task); + when(task.getParentTaskId()).thenReturn(TaskId.EMPTY_TASK_ID); + when(task.getContext()).thenReturn(mock()); + doAnswer(a -> fail(a.getArgument(0, Throwable.class))).when(task).fail(any(Throwable.class), any(String.class), any()); + when(task.getState()).thenReturn( + new TransformState(TransformTaskState.STOPPED, IndexerState.STOPPED, null, 0, null, null, null, false, null) + ); + return task; + } + private void addIndices(Metadata.Builder metadata, RoutingTable.Builder routingTable) { List indices = new ArrayList<>(); indices.add(TransformInternalIndexConstants.AUDIT_INDEX); @@ -415,23 +540,20 @@ private ClusterState buildClusterState(DiscoveryNodes.Builder nodes) { csBuilder.metadata(metadata); return csBuilder.build(); - } private TransformPersistentTasksExecutor buildTaskExecutor() { - ClusterService clusterService = mock(ClusterService.class); - Client client = mock(Client.class); - TransformAuditor mockAuditor = mock(TransformAuditor.class); - IndexBasedTransformConfigManager transformsConfigManager = new IndexBasedTransformConfigManager( - clusterService, - TestIndexNameExpressionResolver.newInstance(), - client, - xContentRegistry() + var transformServices = transformServices( + new InMemoryTransformConfigManager(), + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) ); - Clock clock = Clock.systemUTC(); - ThreadPool threadPool = mock(ThreadPool.class); - TransformCheckpointService transformCheckpointService = new TransformCheckpointService( - clock, + return buildTaskExecutor(transformServices); + } + + private TransformServices transformServices(TransformConfigManager configManager, TransformScheduler scheduler) { + var mockAuditor = mock(TransformAuditor.class); + var transformCheckpointService = new TransformCheckpointService( + Clock.systemUTC(), Settings.EMPTY, new ClusterService( Settings.EMPTY, @@ -439,28 +561,29 @@ private TransformPersistentTasksExecutor buildTaskExecutor() { null, (TaskManager) null ), - transformsConfigManager, + configManager, mockAuditor ); - TransformServices transformServices = new TransformServices( - transformsConfigManager, - transformCheckpointService, - mockAuditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) - ); - - ClusterSettings cSettings = new ClusterSettings(Settings.EMPTY, Collections.singleton(Transform.NUM_FAILURE_RETRIES_SETTING)); - when(clusterService.getClusterSettings()).thenReturn(cSettings); - when(clusterService.state()).thenReturn(TransformInternalIndexTests.randomTransformClusterState()); + return new TransformServices(configManager, transformCheckpointService, mockAuditor, scheduler); + } + private TransformPersistentTasksExecutor buildTaskExecutor(TransformServices transformServices) { return new TransformPersistentTasksExecutor( - client, + mock(Client.class), transformServices, threadPool, - clusterService, + clusterService(), Settings.EMPTY, new DefaultTransformExtension(), TestIndexNameExpressionResolver.newInstance() ); } + + private ClusterService clusterService() { + var clusterService = mock(ClusterService.class); + var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(Transform.NUM_FAILURE_RETRIES_SETTING)); + when(clusterService.getClusterSettings()).thenReturn(cSettings); + when(clusterService.state()).thenReturn(TransformInternalIndexTests.randomTransformClusterState()); + return clusterService; + } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java new file mode 100644 index 0000000000000..1a2bbfd434455 --- /dev/null +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java @@ -0,0 +1,239 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.transforms; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.transform.transforms.scheduling.TransformScheduler; + +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.only; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; + +public class TransformRetryableStartUpListenerTests extends ESTestCase { + /** + * When the action succeeds on the first try + * Then we invoked the retryListener with "false" and then invoked the actionListener's onResponse. + */ + public void testFirstRunPasses() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + immediatelyReturn(), + responseListener(responseResult), + retryListener(retryResult), + () -> true, + context + ); + + callThreeTimes("transformId", listener); + + // assert only 1 success and no retries + assertEquals("Response Listener should only be called once.", 1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertFalse("Retries should not be scheduled.", retryResult.get()); + verify(context, only()).resetStartUpFailureCount(); + } + + private Consumer> immediatelyReturn() { + return l -> l.onResponse(null); + } + + private ActionListener responseListener(AtomicInteger result) { + return ActionListener.wrap(r -> { + if (result.compareAndSet(0, 1) == false) { + fail("Response Listener should only be called at most once for every test."); + } + }, e -> { + if (result.compareAndSet(0, -1) == false) { + fail("Response Listener should only be called at most once for every test."); + } + }); + } + + private ActionListener retryListener(AtomicReference result) { + return ActionListener.wrap(result::set, e -> fail("Retry Listener is never expected to fail.")); + } + + private void callThreeTimes(String transformId, TransformRetryableStartUpListener listener) { + listener.triggered(event(transformId)); + listener.triggered(event(transformId)); + listener.triggered(event(transformId)); + } + + private TransformScheduler.Event event(String transformId) { + return new TransformScheduler.Event(transformId, System.currentTimeMillis(), System.currentTimeMillis()); + } + + /** + * When the action fails once then succeeds on the second try + * Then we invoked the retryListener with "true" and then invoked the actionListener's onResponse. + */ + public void testFirstRunFails() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + failOnceThen(immediatelyReturn()), + responseListener(responseResult), + retryListener(retryResult), + () -> true, + context + ); + + callThreeTimes("transformId", listener); + + // assert only 1 retry and 1 success + assertEquals("Response Listener should only be called once.", 1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertTrue("Retries should be scheduled.", retryResult.get()); + verify(context, times(1)).incrementAndGetStartUpFailureCount(any(IllegalStateException.class)); + verify(context, times(1)).resetStartUpFailureCount(); + } + + private Consumer> failOnceThen(Consumer> followup) { + var firstRun = new AtomicBoolean(true); + return l -> { + if (firstRun.compareAndSet(true, false)) { + l.onFailure(new IllegalStateException("first call fails")); + } else { + followup.accept(l); + } + }; + } + + /** + * When the TransformRetryableStartUpListener is never invoked + * Then there should be no failures to report + */ + public void testUnusedRetryableIsNotReported() { + var context = mock(TransformContext.class); + + new TransformRetryableStartUpListener<>( + "transformId", + failOnceThen(immediatelyReturn()), + responseListener(), + retryListener(), + () -> true, + context + ); + + verifyNoInteractions(context); + } + + private ActionListener retryListener() { + return retryListener(new AtomicReference<>()); + } + + private ActionListener responseListener() { + return responseListener(new AtomicInteger()); + } + + /** + * Given one transformId + * When we receive an event for another transformId + * Then we should not take any action + */ + public void testWrongTransformIdIsIgnored() { + var correctTransformId = "transformId"; + var incorrectTransformId = "someOtherTransformId"; + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var listener = new TransformRetryableStartUpListener<>( + correctTransformId, + failOnceThen(immediatelyReturn()), + responseListener(responseResult), + retryListener(retryResult), + () -> true, + context + ); + + listener.triggered(event(incorrectTransformId)); + + assertEquals("Response Listener should never be called once.", 0, responseResult.get()); + assertNull("Retry Listener should not be called.", retryResult.get()); + verifyNoInteractions(context); + } + + /** + * Given an action that always fails + * When shouldRetry returns true and then false + * Then we should call the actionListener's onFailure handler + */ + public void testCancelRetries() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + var runTwice = new AtomicBoolean(true); + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + alwaysFail(), + responseListener(responseResult), + retryListener(retryResult), + () -> runTwice.compareAndSet(true, false), + context + ); + + callThreeTimes("transformId", listener); + + // assert only 1 retry and 1 failure + assertEquals("Response Listener should only be called once.", -1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertTrue("Retries should be scheduled.", retryResult.get()); + verify(context, times(1)).incrementAndGetStartUpFailureCount(any(IllegalStateException.class)); + verify(context, times(1)).resetStartUpFailureCount(); + } + + private Consumer> alwaysFail() { + return l -> l.onFailure(new IllegalStateException("always fail")); + } + + /** + * Given an action that always fails + * When shouldRetry returns false + * Then we should call the actionListener's onFailure handler and the retryListener with "false" + */ + public void testCancelRetryImmediately() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + alwaysFail(), + responseListener(responseResult), + retryListener(retryResult), + () -> false, + context + ); + + callThreeTimes("transformId", listener); + + // assert no retries and 1 failure + assertEquals("Response Listener should only be called once.", -1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertFalse("Retries should not be scheduled.", retryResult.get()); + verify(context, only()).resetStartUpFailureCount(); + } +} diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 62306a18d946b..a07544ff68c9a 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -25,7 +25,6 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MatchAllDocsQuery; @@ -82,7 +81,6 @@ import java.nio.charset.StandardCharsets; import java.time.ZoneId; import java.util.ArrayList; -import java.util.Collection; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; @@ -843,15 +841,6 @@ public Query prefixQuery( return wildcardQuery(escapeWildcardSyntax(value) + "*", method, caseInsensitive, context); } - @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - for (Object value : values) { - bq.add(termQuery(value, context), Occur.SHOULD); - } - return new ConstantScoreQuery(bq.build()); - } - @Override public BlockLoader blockLoader(BlockLoaderContext blContext) { if (hasDocValues()) { diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/TermsQueryTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/TermsQueryTests.java new file mode 100644 index 0000000000000..3b7e1777b3bc9 --- /dev/null +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/TermsQueryTests.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.wildcard.mapper; + +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.WildcardQueryBuilder; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.AbstractBuilderTestCase; +import org.elasticsearch.xpack.wildcard.Wildcard; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class TermsQueryTests extends AbstractBuilderTestCase { + + protected Collection> getPlugins() { + return List.of(Wildcard.class); + } + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + mapperService.merge("_doc", new CompressedXContent(org.elasticsearch.common.Strings.format(""" + { + "properties": { + "mapped_wildcard": { + "type": "wildcard" + } + } + }""")), MapperService.MergeReason.MAPPING_UPDATE); + } + + public void testSingleDuplicateTerms() throws IOException { + String[] duplicates = new String[1023]; + Arrays.fill(duplicates, "duplicate"); + QueryBuilder termsQueryBuilder = new TermsQueryBuilder("mapped_wildcard", duplicates); + termsQueryBuilder = termsQueryBuilder.rewrite(createQueryRewriteContext()); + Query actual = termsQueryBuilder.toQuery(createSearchExecutionContext()); + + QueryBuilder queryBuilder = new BoolQueryBuilder().should(new WildcardQueryBuilder("mapped_wildcard", "duplicate")); + queryBuilder = queryBuilder.rewrite(createQueryRewriteContext()); + Query expected = new ConstantScoreQuery(queryBuilder.toQuery(createSearchExecutionContext())); + + assertEquals(expected, actual); + } + + public void testMultiDuplicateTerms() throws IOException { + int numTerms = randomIntBetween(2, 10); + List randomTerms = new ArrayList<>(numTerms); + for (int i = 0; i < numTerms; ++i) { + randomTerms.add(randomAlphaOfLengthBetween(1, 1024)); + } + int totalTerms = randomIntBetween(numTerms * 5, 1023); + String[] duplicates = new String[totalTerms]; + for (int i = 0; i < numTerms; ++i) { + duplicates[i] = randomTerms.get(i); + } + for (int i = numTerms; i < totalTerms; ++i) { + duplicates[i] = randomTerms.get(randomIntBetween(0, numTerms - 1)); + } + + QueryBuilder termsQueryBuilder = new TermsQueryBuilder("mapped_wildcard", duplicates); + termsQueryBuilder = termsQueryBuilder.rewrite(createQueryRewriteContext()); + Query actual = termsQueryBuilder.toQuery(createSearchExecutionContext()); + + Set ordered = new HashSet<>(randomTerms); + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + for (String randomTerm : ordered) { + QueryBuilder wildcardQueryBuilder = new WildcardQueryBuilder("mapped_wildcard", randomTerm); + wildcardQueryBuilder = wildcardQueryBuilder.rewrite(createQueryRewriteContext()); + boolQueryBuilder.should(wildcardQueryBuilder); + } + QueryBuilder expectedQueryBuilder = boolQueryBuilder.rewrite(createQueryRewriteContext()); + Query expected = new ConstantScoreQuery(expectedQueryBuilder.toQuery(createSearchExecutionContext())); + + assertEquals(expected, actual); + } +} diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 254d12a05d936..d7760eb42a1db 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -731,7 +731,7 @@ private void assertBasicWatchInteractions() throws Exception { Map updateWatch = entityAsMap(client().performRequest(createWatchRequest)); assertThat(updateWatch.get("created"), equalTo(false)); - assertThat(updateWatch.get("_version"), equalTo(2)); + assertThat((int) updateWatch.get("_version"), greaterThanOrEqualTo(2)); Map get = entityAsMap(client().performRequest(new Request("GET", "_watcher/watch/new_watch"))); assertThat(get.get("found"), equalTo(true)); diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index 62d6f0a1e34b8..0ec7044ed18ab 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -1,51 +1,15 @@ -import java.nio.file.Path -import java.nio.file.Paths - apply plugin: 'elasticsearch.internal-java-rest-test' -apply plugin: 'elasticsearch.test.fixtures' - -testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "peppa" dependencies { javaRestTestImplementation project(':x-pack:plugin:core') javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation(testArtifact(project(xpackModule('security')))) + javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') } -normalization { - runtimeClasspath { - ignore 'krb5.conf' - ignore '*.keytab' - } -} - -tasks.register("copyKeytabToGeneratedResources", Copy) { - from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "peppa.keytab") - into "$buildDir/generated-resources/keytabs" - from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "HTTP_localhost.keytab") - into "$buildDir/generated-resources/keytabs" - dependsOn ":test:fixtures:krb5kdc-fixture:postProcessFixture" -} - -tasks.register("copyConfToGeneratedResources", Copy) { - from project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("peppa") - into "$buildDir/generated-resources/conf" - dependsOn ":test:fixtures:krb5kdc-fixture:postProcessFixture" -} - -String realm = "BUILD.ELASTIC.CO" tasks.named("javaRestTest").configure { - dependsOn "copyKeytabToGeneratedResources", "copyConfToGeneratedResources" usesDefaultDistribution() - Path peppaKeytab = Paths.get("${project.buildDir}", "generated-resources", "keytabs", "peppa.keytab") - Path krb5Conf = Paths.get("${project.buildDir}", "generated-resources", "conf", "krb5.conf") - nonInputProperties.systemProperty 'test.userkt', "peppa@${realm}" - nonInputProperties.systemProperty 'test.userkt.keytab', "${peppaKeytab}" - nonInputProperties.systemProperty 'test.userpwd', "george@${realm}" - nonInputProperties.systemProperty 'test.krb5.conf', "${krb5Conf}" - nonInputProperties.systemProperty 'java.security.krb5.conf', "${krb5Conf}" - systemProperty 'test.userpwd.password', "dino_but_longer_than_14_chars" - systemProperty 'sun.security.krb5.debug', true - classpath += files("$buildDir/generated-resources/keytabs") - classpath += files("$buildDir/generated-resources/conf") + description = "Runs rest tests against an elasticsearch cluster with Kerberos." + // required for krb5kdc-fixture to work + jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } diff --git a/x-pack/qa/kerberos-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java b/x-pack/qa/kerberos-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java index ff37b9c77735c..3058905548c08 100644 --- a/x-pack/qa/kerberos-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java +++ b/x-pack/qa/kerberos-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.security.authc.kerberos; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.elasticsearch.client.Request; @@ -22,12 +24,16 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.krb5kdc.Krb5kDcContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.ietf.jgss.GSSException; import org.junit.Before; import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import java.io.IOException; import java.net.InetAddress; @@ -56,15 +62,16 @@ * Demonstrates login by keytab and login by password for given user principal * name using rest client. */ +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class KerberosAuthenticationIT extends ESRestTestCase { private static final String ENABLE_KERBEROS_DEBUG_LOGS_KEY = "test.krb.debug"; - private static final String TEST_USER_WITH_KEYTAB_KEY = "test.userkt"; - private static final String TEST_USER_WITH_KEYTAB_PATH_KEY = "test.userkt.keytab"; - private static final String TEST_USER_WITH_PWD_KEY = "test.userpwd"; - private static final String TEST_USER_WITH_PWD_PASSWD_KEY = "test.userpwd.password"; + private static final String TEST_USER_WITH_KEYTAB_KEY = "peppa@BUILD.ELASTIC.CO"; + private static final String TEST_USER_WITH_PWD_KEY = "george@BUILD.ELASTIC.CO"; + private static final String TEST_USER_WITH_PWD_PASSWD_KEY = "dino_but_longer_than_14_chars"; private static final String TEST_KERBEROS_REALM_NAME = "kerberos"; - @ClassRule + public static Krb5kDcContainer krb5Fixture = new Krb5kDcContainer(Krb5kDcContainer.ProvisioningId.PEPPA); + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) // force localhost IPv4 otherwise it is a chicken and egg problem where we need the keytab for the hostname when starting the @@ -81,13 +88,16 @@ public class KerberosAuthenticationIT extends ESRestTestCase { .setting("xpack.security.authc.realms.kerberos.kerberos.keytab.path", "es.keytab") .setting("xpack.security.authc.realms.kerberos.kerberos.krb.debug", "true") .setting("xpack.security.authc.realms.kerberos.kerberos.remove_realm_name", "false") - .systemProperty("java.security.krb5.conf", System.getProperty("test.krb5.conf")) + .systemProperty("java.security.krb5.conf", () -> krb5Fixture.getConfPath().toString()) .systemProperty("sun.security.krb5.debug", "true") .user("test_admin", "x-pack-test-password") .user("test_kibana_user", "x-pack-test-password", "kibana_system", false) - .configFile("es.keytab", Resource.fromClasspath("HTTP_localhost.keytab")) + .configFile("es.keytab", Resource.fromFile(() -> krb5Fixture.getEsKeytab())) .build(); + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(krb5Fixture).around(cluster); + @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); @@ -130,20 +140,19 @@ public void setupRoleMapping() throws IOException { } public void testLoginByKeytab() throws IOException, PrivilegedActionException { - final String userPrincipalName = System.getProperty(TEST_USER_WITH_KEYTAB_KEY); - final String keytabPath = System.getProperty(TEST_USER_WITH_KEYTAB_PATH_KEY); - final boolean enabledDebugLogs = Boolean.parseBoolean(System.getProperty(ENABLE_KERBEROS_DEBUG_LOGS_KEY)); + final String keytabPath = krb5Fixture.getKeytab().toString(); + final boolean enabledDebugLogs = Boolean.parseBoolean(ENABLE_KERBEROS_DEBUG_LOGS_KEY); final SpnegoHttpClientConfigCallbackHandler callbackHandler = new SpnegoHttpClientConfigCallbackHandler( - userPrincipalName, + krb5Fixture.getPrincipal(), keytabPath, enabledDebugLogs ); - executeRequestAndVerifyResponse(userPrincipalName, callbackHandler); + executeRequestAndVerifyResponse(krb5Fixture.getPrincipal(), callbackHandler); } public void testLoginByUsernamePassword() throws IOException, PrivilegedActionException { - final String userPrincipalName = System.getProperty(TEST_USER_WITH_PWD_KEY); - final String password = System.getProperty(TEST_USER_WITH_PWD_PASSWD_KEY); + final String userPrincipalName = TEST_USER_WITH_PWD_KEY; + final String password = TEST_USER_WITH_PWD_PASSWD_KEY; final boolean enabledDebugLogs = Boolean.parseBoolean(System.getProperty(ENABLE_KERBEROS_DEBUG_LOGS_KEY)); final SpnegoHttpClientConfigCallbackHandler callbackHandler = new SpnegoHttpClientConfigCallbackHandler( userPrincipalName, @@ -154,8 +163,8 @@ public void testLoginByUsernamePassword() throws IOException, PrivilegedActionEx } public void testGetOauth2TokenInExchangeForKerberosTickets() throws PrivilegedActionException, GSSException, IOException { - final String userPrincipalName = System.getProperty(TEST_USER_WITH_PWD_KEY); - final String password = System.getProperty(TEST_USER_WITH_PWD_PASSWD_KEY); + final String userPrincipalName = TEST_USER_WITH_PWD_KEY; + final String password = TEST_USER_WITH_PWD_PASSWD_KEY; final boolean enabledDebugLogs = Boolean.parseBoolean(System.getProperty(ENABLE_KERBEROS_DEBUG_LOGS_KEY)); final SpnegoHttpClientConfigCallbackHandler callbackHandler = new SpnegoHttpClientConfigCallbackHandler( userPrincipalName, diff --git a/x-pack/test/idp-fixture/build.gradle b/x-pack/test/idp-fixture/build.gradle index 2ef03bf7747cc..4ef8eee8d08a7 100644 --- a/x-pack/test/idp-fixture/build.gradle +++ b/x-pack/test/idp-fixture/build.gradle @@ -1,35 +1,22 @@ -import org.elasticsearch.gradle.Architecture -import org.elasticsearch.gradle.internal.docker.DockerBuildTask -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.java' apply plugin: 'elasticsearch.cache-test-fixtures' +apply plugin: 'elasticsearch.deploy-test-fixtures' + +dockerFixtures { + idp { + dockerContext = file("src/main/resources/idp") + version = "1.1" + baseImages = ["openjdk:11.0.16-jre"] + } + openldap { + dockerContext = file("src/main/resources/openldap") + version = "1.0" + baseImages = ["osixia/openldap:1.4.0"] + } +} dependencies { testImplementation project(':test:framework') - api project(':test:fixtures:testcontainer-utils') api "junit:junit:${versions.junit}" } - -tasks.withType(DockerBuildTask).configureEach { - noCache = BuildParams.isCi() - push = true //BuildParams.isCi() - getPlatforms().addAll( Architecture.values().collect{ it.dockerPlatform } ) -} - -tasks.register("deployIdpFixtureDockerImages", DockerBuildTask) { - dockerContext.fileValue(file("src/main/resources/idp")) - baseImages = ["openjdk:11.0.16-jre"] - tags = ["docker.elastic.co/elasticsearch-dev/idp-fixture:1.1"] -} - -tasks.register("deployOpenLdapFixtureDockerImages", DockerBuildTask) { - dockerContext.fileValue(file("src/main/resources/openldap")) - baseImages = ["osixia/openldap:1.4.0"] - tags = ["docker.elastic.co/elasticsearch-dev/openldap-fixture:1.0"] -} - -tasks.register("deployFixtureDockerImages") { - dependsOn tasks.withType(DockerBuildTask) -}