diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 944230377d077..c5b079c39fbc1 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.14.1", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 5ac361c810627..378a7c5c9c5d2 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -529,8 +529,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.13.5 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.5 + - label: "{{matrix.image}} / 8.13.4 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.4 timeout_in_minutes: 300 matrix: setup: @@ -543,10 +543,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.5 + BWC_VERSION: 8.13.4 - - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 + - label: "{{matrix.image}} / 8.14.1 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.1 timeout_in_minutes: 300 matrix: setup: @@ -559,7 +559,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.14.0 + BWC_VERSION: 8.14.1 - label: "{{matrix.image}} / 8.15.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 7ba46f0f0951c..1726f0f29fa92 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -591,8 +591,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.13.5 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.5#bwcTest + - label: 8.13.4 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.4#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -601,7 +601,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.13.5 + BWC_VERSION: 8.13.4 retry: automatic: - exit_status: "-1" @@ -610,8 +610,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.14.0 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.0#bwcTest + - label: 8.14.1 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.1#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -620,7 +620,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.14.0 + BWC_VERSION: 8.14.1 retry: automatic: - exit_status: "-1" @@ -714,7 +714,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.14.1", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -762,7 +762,7 @@ steps: - openjdk21 - openjdk22 - openjdk23 - BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.14.1", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index b9afdcf23b858..3aa17cc370296 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -30,6 +30,6 @@ BWC_VERSION: - "8.10.4" - "8.11.4" - "8.12.2" - - "8.13.5" - - "8.14.0" + - "8.13.4" + - "8.14.1" - "8.15.0" diff --git a/.ci/init.gradle b/.ci/init.gradle index 4b2cbd1907ca0..3e1f23804cf98 100644 --- a/.ci/init.gradle +++ b/.ci/init.gradle @@ -91,8 +91,8 @@ if (USE_ARTIFACTORY) { } gradle.settingsEvaluated { settings -> - settings.pluginManager.withPlugin("com.gradle.enterprise") { - settings.gradleEnterprise { + settings.pluginManager.withPlugin("com.gradle.develocity") { + settings.develocity { server = 'https://gradle-enterprise.elastic.co' } } diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 213e4e93bc81d..f802829f6ec8a 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,4 @@ BWC_VERSION: - "7.17.22" - - "8.13.5" - - "8.14.0" + - "8.14.1" - "8.15.0" diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 50491da4d39aa..7335dfbd8f239 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -15,6 +15,8 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; @@ -26,7 +28,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; @@ -84,7 +86,17 @@ public class EvalBenchmark { } @Param( - { "abs", "add", "date_trunc", "equal_to_const", "long_equal_to_long", "long_equal_to_int", "mv_min", "mv_min_ascending", "rlike" } + { + "abs", + "add", + "add_double", + "date_trunc", + "equal_to_const", + "long_equal_to_long", + "long_equal_to_int", + "mv_min", + "mv_min_ascending", + "rlike" } ) public String operation; @@ -101,25 +113,32 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) { case "add" -> { FieldAttribute longField = longField(); yield EvalMapper.toEvaluator( - new Add(Source.EMPTY, longField, new Literal(Source.EMPTY, 1L, DataTypes.LONG)), + new Add(Source.EMPTY, longField, new Literal(Source.EMPTY, 1L, DataType.LONG)), layout(longField) ).get(driverContext); } + case "add_double" -> { + FieldAttribute doubleField = doubleField(); + yield EvalMapper.toEvaluator( + new Add(Source.EMPTY, doubleField, new Literal(Source.EMPTY, 1D, DataType.DOUBLE)), + layout(doubleField) + ).get(driverContext); + } case "date_trunc" -> { FieldAttribute timestamp = new FieldAttribute( Source.EMPTY, "timestamp", - new EsField("timestamp", DataTypes.DATETIME, Map.of(), true) + new EsField("timestamp", DataType.DATETIME, Map.of(), true) ); yield EvalMapper.toEvaluator( - new DateTrunc(Source.EMPTY, new Literal(Source.EMPTY, Duration.ofHours(24), DataTypes.TIME_DURATION), timestamp), + new DateTrunc(Source.EMPTY, new Literal(Source.EMPTY, Duration.ofHours(24), DataType.TIME_DURATION), timestamp), layout(timestamp) ).get(driverContext); } case "equal_to_const" -> { FieldAttribute longField = longField(); yield EvalMapper.toEvaluator( - new Equals(Source.EMPTY, longField, new Literal(Source.EMPTY, 100_000L, DataTypes.LONG)), + new Equals(Source.EMPTY, longField, new Literal(Source.EMPTY, 100_000L, DataType.LONG)), layout(longField) ).get(driverContext); } @@ -147,15 +166,19 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) { } private static FieldAttribute longField() { - return new FieldAttribute(Source.EMPTY, "long", new EsField("long", DataTypes.LONG, Map.of(), true)); + return new FieldAttribute(Source.EMPTY, "long", new EsField("long", DataType.LONG, Map.of(), true)); + } + + private static FieldAttribute doubleField() { + return new FieldAttribute(Source.EMPTY, "double", new EsField("double", DataType.DOUBLE, Map.of(), true)); } private static FieldAttribute intField() { - return new FieldAttribute(Source.EMPTY, "int", new EsField("int", DataTypes.INTEGER, Map.of(), true)); + return new FieldAttribute(Source.EMPTY, "int", new EsField("int", DataType.INTEGER, Map.of(), true)); } private static FieldAttribute keywordField() { - return new FieldAttribute(Source.EMPTY, "keyword", new EsField("keyword", DataTypes.KEYWORD, Map.of(), true)); + return new FieldAttribute(Source.EMPTY, "keyword", new EsField("keyword", DataType.KEYWORD, Map.of(), true)); } private static Layout layout(FieldAttribute... fields) { @@ -182,6 +205,16 @@ private static void checkExpected(String operation, Page actual) { } } } + case "add_double" -> { + DoubleVector v = actual.getBlock(1).asVector(); + for (int i = 0; i < BLOCK_LENGTH; i++) { + if (v.getDouble(i) != i * 100_000 + 1D) { + throw new AssertionError( + "[" + operation + "] expected [" + (i * 100_000 + 1D) + "] but was [" + v.getDouble(i) + "]" + ); + } + } + } case "date_trunc" -> { LongVector v = actual.getBlock(1).asVector(); long oneDay = TimeValue.timeValueHours(24).millis(); @@ -239,6 +272,13 @@ private static Page page(String operation) { } yield new Page(builder.build()); } + case "add_double" -> { + var builder = blockFactory.newDoubleBlockBuilder(BLOCK_LENGTH); + for (int i = 0; i < BLOCK_LENGTH; i++) { + builder.appendDouble(i * 100_000D); + } + yield new Page(builder.build()); + } case "long_equal_to_long" -> { var lhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); var rhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/MapperServiceFactory.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/MapperServiceFactory.java index 70e9fe424e77b..68b31481e17f3 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/MapperServiceFactory.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/MapperServiceFactory.java @@ -9,6 +9,7 @@ package org.elasticsearch.benchmark.index.mapper; import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.util.Accountable; import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -21,10 +22,12 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.LowercaseNormalizer; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ProvidedIdFieldMapper; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.script.Script; @@ -52,6 +55,13 @@ public static MapperService create(String mappings) { MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); SimilarityService similarityService = new SimilarityService(indexSettings, null, Map.of()); + BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) {} + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) {} + }); MapperService mapperService = new MapperService( () -> TransportVersion.current(), indexSettings, @@ -73,6 +83,7 @@ public T compile(Script script, ScriptContext scriptContext) { throw new UnsupportedOperationException(); } }, + bitsetFilterCache::getBitSetProducer, MapperMetrics.NOOP ); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/QueryParserHelperBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/QueryParserHelperBenchmark.java index 14f6fe6501a73..cff15d9c36d34 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/QueryParserHelperBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/QueryParserHelperBenchmark.java @@ -189,6 +189,7 @@ public T compile(Script script, ScriptContext scriptContext) { throw new UnsupportedOperationException(); } }, + query -> { throw new UnsupportedOperationException(); }, MapperMetrics.NOOP ); diff --git a/branches.json b/branches.json index daf6d249f7268..2794b545facc6 100644 --- a/branches.json +++ b/branches.json @@ -7,9 +7,6 @@ { "branch": "8.14" }, - { - "branch": "8.13" - }, { "branch": "7.17" } diff --git a/build-conventions/build.gradle b/build-conventions/build.gradle index cd9a548a9901f..94b0312d0d5d3 100644 --- a/build-conventions/build.gradle +++ b/build-conventions/build.gradle @@ -8,6 +8,16 @@ import org.gradle.plugins.ide.eclipse.model.SourceFolder + +buildscript { + repositories { + maven { + url 'https://jitpack.io' + } + mavenCentral() + } +} + plugins { id 'java-gradle-plugin' id 'java-test-fixtures' @@ -59,6 +69,10 @@ gradlePlugin { } repositories { + maven { + url 'https://jitpack.io' + } + mavenCentral() gradlePluginPortal() } diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 52e72d973f2ed..84e56bbaf03ad 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -257,6 +257,9 @@ tasks.named('licenseHeaders').configure { *****************************************************************************/ repositories { + maven { + url 'https://jitpack.io' + } mavenCentral() gradlePluginPortal() } diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index fcbbad6dd644c..515ab9d5f1822 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip +distributionSha256Sum=f8b4f4772d302c8ff580bc40d0f56e715de69b163546944f787c87abf209c961 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.8-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/settings.gradle b/build-tools-internal/settings.gradle index 6423750872ca2..1b4fb1215a59d 100644 --- a/build-tools-internal/settings.gradle +++ b/build-tools-internal/settings.gradle @@ -1,5 +1,13 @@ pluginManagement { - includeBuild "../build-conventions" + repositories { + maven { + url 'https://jitpack.io' + } + mavenCentral() + gradlePluginPortal() + } + + includeBuild "../build-conventions" includeBuild "../build-tools" } @@ -9,4 +17,4 @@ dependencyResolutionManagement { from(files("../gradle/build.versions.toml")) } } -} \ No newline at end of file +} diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index 67123119f7cd9..f0a7b1a6d0b1c 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -12,168 +12,179 @@ import java.time.LocalDateTime; import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.internal.info.BuildParams -import org.gradle.initialization.BuildRequestMetaData -buildScan { - URL jenkinsUrl = System.getenv('JENKINS_URL') ? new URL(System.getenv('JENKINS_URL')) : null - String buildKiteUrl = System.getenv('BUILDKITE_BUILD_URL') ? System.getenv('BUILDKITE_BUILD_URL') : null +import java.lang.management.ManagementFactory +import java.time.LocalDateTime - // Automatically publish scans from Elasticsearch CI - if (jenkinsUrl?.host?.endsWith('elastic.co') || jenkinsUrl?.host?.endsWith('elastic.dev') || System.getenv('BUILDKITE') == 'true') { - publishAlways() - buildScan.server = 'https://gradle-enterprise.elastic.co' - } +develocity { - background { - tag OS.current().name() - tag Architecture.current().name() + buildScan { + URL jenkinsUrl = System.getenv('JENKINS_URL') ? new URL(System.getenv('JENKINS_URL')) : null + String buildKiteUrl = System.getenv('BUILDKITE_BUILD_URL') ? System.getenv('BUILDKITE_BUILD_URL') : null - // Tag if this build is run in FIPS mode - if (BuildParams.inFipsJvm) { - tag 'FIPS' + // Automatically publish scans from Elasticsearch CI + if (jenkinsUrl?.host?.endsWith('elastic.co') || jenkinsUrl?.host?.endsWith('elastic.dev') || System.getenv('BUILDKITE') == 'true') { + publishing.onlyIf { true } + server = 'https://gradle-enterprise.elastic.co' } - // Jenkins-specific build scan metadata - if (jenkinsUrl) { - // Disable async upload in CI to ensure scan upload completes before CI agent is terminated - uploadInBackground = false - - String buildNumber = System.getenv('BUILD_NUMBER') - String buildUrl = System.getenv('BUILD_URL') - String jobName = System.getenv('JOB_NAME') - String nodeName = System.getenv('NODE_NAME') - String jobBranch = System.getenv('ghprbTargetBranch') ?: System.getenv('JOB_BRANCH') - - // Link to Jenkins worker logs and system metrics - if (nodeName) { - link 'System logs', "https://ci-stats.elastic.co/app/infra#/logs?&logFilter=(expression:'host.name:${nodeName}',kind:kuery)" - buildFinished { - link 'System metrics', "https://ci-stats.elastic.co/app/metrics/detail/host/${nodeName}" - } + + background { + tag OS.current().name() + tag Architecture.current().name() + + // Tag if this build is run in FIPS mode + if (BuildParams.inFipsJvm) { + tag 'FIPS' } - // Parse job name in the case of matrix builds - // Matrix job names come in the form of "base-job-name/matrix_param1=value1,matrix_param2=value2" - def splitJobName = jobName.split('/') - if (splitJobName.length > 1 && splitJobName.last() ==~ /^([a-zA-Z0-9_\-]+=[a-zA-Z0-9_\-&\.]+,?)+$/) { - def baseJobName = splitJobName.dropRight(1).join('/') - tag baseJobName - tag splitJobName.last() - value 'Job Name', baseJobName - def matrixParams = splitJobName.last().split(',') - matrixParams.collect { it.split('=') }.each { param -> - value "MATRIX_${param[0].toUpperCase()}", param[1] + // Jenkins-specific build scan metadata + if (jenkinsUrl) { + // Disable async upload in CI to ensure scan upload completes before CI agent is terminated + uploadInBackground = false + + String buildNumber = System.getenv('BUILD_NUMBER') + String buildUrl = System.getenv('BUILD_URL') + String jobName = System.getenv('JOB_NAME') + String nodeName = System.getenv('NODE_NAME') + String jobBranch = System.getenv('ghprbTargetBranch') ?: System.getenv('JOB_BRANCH') + + // Link to Jenkins worker logs and system metrics + if (nodeName) { + link 'System logs', "https://ci-stats.elastic.co/app/infra#/logs?&logFilter=(expression:'host.name:${nodeName}',kind:kuery)" + buildFinished { + link 'System metrics', "https://ci-stats.elastic.co/app/metrics/detail/host/${nodeName}" + } } - } else { - tag jobName - value 'Job Name', jobName - } - tag 'CI' - link 'CI Build', buildUrl - link 'GCP Upload', "https://console.cloud.google.com/storage/browser/_details/elasticsearch-ci-artifacts/jobs/${URLEncoder.encode(jobName, "UTF-8")}/build/${buildNumber}.tar.bz2" - value 'Job Number', buildNumber - if (jobBranch) { - tag jobBranch - value 'Git Branch', jobBranch - } + // Parse job name in the case of matrix builds + // Matrix job names come in the form of "base-job-name/matrix_param1=value1,matrix_param2=value2" + def splitJobName = jobName.split('/') + if (splitJobName.length > 1 && splitJobName.last() ==~ /^([a-zA-Z0-9_\-]+=[a-zA-Z0-9_\-&\.]+,?)+$/) { + def baseJobName = splitJobName.dropRight(1).join('/') + tag baseJobName + tag splitJobName.last() + value 'Job Name', baseJobName + def matrixParams = splitJobName.last().split(',') + matrixParams.collect { it.split('=') }.each { param -> + value "MATRIX_${param[0].toUpperCase()}", param[1] + } + } else { + tag jobName + value 'Job Name', jobName + } - System.getenv().getOrDefault('NODE_LABELS', '').split(' ').each { - value 'Jenkins Worker Label', it - } + tag 'CI' + link 'CI Build', buildUrl + link 'GCP Upload', + "https://console.cloud.google.com/storage/browser/_details/elasticsearch-ci-artifacts/jobs/${URLEncoder.encode(jobName, "UTF-8")}/build/${buildNumber}.tar.bz2" + value 'Job Number', buildNumber + if (jobBranch) { + tag jobBranch + value 'Git Branch', jobBranch + } - // Add SCM information - def isPrBuild = System.getenv('ROOT_BUILD_CAUSE_GHPRBCAUSE') != null - if (isPrBuild) { - value 'Git Commit ID', System.getenv('ghprbActualCommit') - tag "pr/${System.getenv('ghprbPullId')}" - tag 'pull-request' - link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('ghprbActualCommit')}" - link 'Pull Request', System.getenv('ghprbPullLink') - } else { - value 'Git Commit ID', BuildParams.gitRevision - link 'Source', "https://github.com/elastic/elasticsearch/tree/${BuildParams.gitRevision}" - } - } else if (buildKiteUrl) { //Buildkite-specific build scan metadata - // Disable async upload in CI to ensure scan upload completes before CI agent is terminated - uploadInBackground = false - - def branch = System.getenv('BUILDKITE_PULL_REQUEST_BASE_BRANCH') ?: System.getenv('BUILDKITE_BRANCH') - def repoMatcher = System.getenv('BUILDKITE_REPO') =~ /(https:\/\/github\.com\/|git@github\.com:)(\S+)\.git/ - def repository = repoMatcher.matches() ? repoMatcher.group(2) : "" - def jobLabel = System.getenv('BUILDKITE_LABEL') ?: '' - def jobName = safeName(jobLabel) - - tag 'CI' - link 'CI Build', "${buildKiteUrl}#${System.getenv('BUILDKITE_JOB_ID')}" - value 'Job Number', System.getenv('BUILDKITE_BUILD_NUMBER') - value 'Build ID', System.getenv('BUILDKITE_BUILD_ID') - value 'Job ID', System.getenv('BUILDKITE_JOB_ID') - - value 'Pipeline', System.getenv('BUILDKITE_PIPELINE_SLUG') - tag System.getenv('BUILDKITE_PIPELINE_SLUG') - - value 'Job Name', jobName - tag jobName - if (jobLabel.contains("/")) { - jobLabel.split("/").collect {safeName(it) }.each {matrix -> - tag matrix + System.getenv().getOrDefault('NODE_LABELS', '').split(' ').each { + value 'Jenkins Worker Label', it } - } - def uptime = ManagementFactory.getRuntimeMXBean().getUptime() / 1000; - def metricsStartTime = LocalDateTime.now().minusSeconds(uptime.longValue()).minusMinutes(15).toString() - def metricsEndTime = LocalDateTime.now().plusMinutes(15).toString() + // Add SCM information + def isPrBuild = System.getenv('ROOT_BUILD_CAUSE_GHPRBCAUSE') != null + if (isPrBuild) { + value 'Git Commit ID', System.getenv('ghprbActualCommit') + tag "pr/${System.getenv('ghprbPullId')}" + tag 'pull-request' + link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('ghprbActualCommit')}" + link 'Pull Request', System.getenv('ghprbPullLink') + } else { + value 'Git Commit ID', BuildParams.gitRevision + link 'Source', "https://github.com/elastic/elasticsearch/tree/${BuildParams.gitRevision}" + } + } else if (buildKiteUrl) { //Buildkite-specific build scan metadata + // Disable async upload in CI to ensure scan upload completes before CI agent is terminated + uploadInBackground = false - link 'Agent Metrics', "https://es-buildkite-agents.elastic.dev/app/metrics/detail/host/${System.getenv('BUILDKITE_AGENT_NAME')}?_a=(time:(from:%27${metricsStartTime}Z%27,interval:%3E%3D1m,to:%27${metricsEndTime}Z%27))" - link 'Agent Logs', "https://es-buildkite-agents.elastic.dev/app/logs/stream?logFilter=(filters:!(),query:(language:kuery,query:%27host.name:%20${System.getenv('BUILDKITE_AGENT_NAME')}%27),timeRange:(from:%27${metricsStartTime}Z%27,to:%27${metricsEndTime}Z%27))" + def branch = System.getenv('BUILDKITE_PULL_REQUEST_BASE_BRANCH') ?: System.getenv('BUILDKITE_BRANCH') + def repoMatcher = System.getenv('BUILDKITE_REPO') =~ /(https:\/\/github\.com\/|git@github\.com:)(\S+)\.git/ + def repository = repoMatcher.matches() ? repoMatcher.group(2) : "" + def jobLabel = System.getenv('BUILDKITE_LABEL') ?: '' + def jobName = safeName(jobLabel) - if (branch) { - tag branch - value 'Git Branch', branch - } + tag 'CI' + link 'CI Build', "${buildKiteUrl}#${System.getenv('BUILDKITE_JOB_ID')}" + value 'Job Number', System.getenv('BUILDKITE_BUILD_NUMBER') + value 'Build ID', System.getenv('BUILDKITE_BUILD_ID') + value 'Job ID', System.getenv('BUILDKITE_JOB_ID') - // Add SCM information - def prId = System.getenv('BUILDKITE_PULL_REQUEST') - if (prId != 'false') { - def prBaseUrl = (System.getenv('BUILDKITE_PULL_REQUEST_REPO') - ".git").replaceFirst("git://", "https://") - value 'Git Commit ID', System.getenv('BUILDKITE_COMMIT') - tag "pr/${prId}" - tag 'pull-request' - link 'Source', "${prBaseUrl}/tree/${System.getenv('BUILDKITE_COMMIT')}" - link 'Pull Request', "https://github.com/${repository}/pull/${prId}" - } else { - value 'Git Commit ID', BuildParams.gitRevision - link 'Source', "https://github.com/${repository}/tree/${BuildParams.gitRevision}" - } + value 'Pipeline', System.getenv('BUILDKITE_PIPELINE_SLUG') + tag System.getenv('BUILDKITE_PIPELINE_SLUG') - buildFinished { result -> - - buildScanPublished { scan -> - // Attach build scan link as build metadata - // See: https://buildkite.com/docs/pipelines/build-meta-data - new ProcessBuilder('buildkite-agent', 'meta-data', 'set', "build-scan-${System.getenv('BUILDKITE_JOB_ID')}", "${scan.buildScanUri}") - .start() - .waitFor() - - // Add a build annotation - // See: https://buildkite.com/docs/agent/v3/cli-annotate - def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failure ? 'failed' : 'successful'} build: gradle ${gradle.startParameter.taskNames.join(' ')}
""" - def process = [ - 'buildkite-agent', - 'annotate', - '--context', - result.failure ? 'gradle-build-scans-failed' : 'gradle-build-scans', - '--append', - '--style', - result.failure ? 'error' : 'info' - ].execute() - process.withWriter { it.write(body) } // passing the body in as an argument has issues on Windows, so let's use stdin of the process instead - process.waitFor() + value 'Job Name', jobName + tag jobName + if (jobLabel.contains("/")) { + jobLabel.split("/").collect { safeName(it) }.each { matrix -> + tag matrix + } } + + def uptime = ManagementFactory.getRuntimeMXBean().getUptime() / 1000; + def metricsStartTime = LocalDateTime.now().minusSeconds(uptime.longValue()).minusMinutes(15).toString() + def metricsEndTime = LocalDateTime.now().plusMinutes(15).toString() + + link 'Agent Metrics', + "https://es-buildkite-agents.elastic.dev/app/metrics/detail/host/${System.getenv('BUILDKITE_AGENT_NAME')}?_a=(time:(from:%27${metricsStartTime}Z%27,interval:%3E%3D1m,to:%27${metricsEndTime}Z%27))" + link 'Agent Logs', + "https://es-buildkite-agents.elastic.dev/app/logs/stream?logFilter=(filters:!(),query:(language:kuery,query:%27host.name:%20${System.getenv('BUILDKITE_AGENT_NAME')}%27),timeRange:(from:%27${metricsStartTime}Z%27,to:%27${metricsEndTime}Z%27))" + + if (branch) { + tag branch + value 'Git Branch', branch + } + + // Add SCM information + def prId = System.getenv('BUILDKITE_PULL_REQUEST') + if (prId != 'false') { + def prBaseUrl = (System.getenv('BUILDKITE_PULL_REQUEST_REPO') - ".git").replaceFirst("git://", "https://") + value 'Git Commit ID', System.getenv('BUILDKITE_COMMIT') + tag "pr/${prId}" + tag 'pull-request' + link 'Source', "${prBaseUrl}/tree/${System.getenv('BUILDKITE_COMMIT')}" + link 'Pull Request', "https://github.com/${repository}/pull/${prId}" + } else { + value 'Git Commit ID', BuildParams.gitRevision + link 'Source', "https://github.com/${repository}/tree/${BuildParams.gitRevision}" + } + + buildFinished { result -> + + buildScanPublished { scan + -> + // Attach build scan link as build metadata + // See: https://buildkite.com/docs/pipelines/build-meta-data + new ProcessBuilder('buildkite-agent', 'meta-data', 'set', "build-scan-${System.getenv('BUILDKITE_JOB_ID')}", "${scan.buildScanUri}") + .start() + .waitFor() + + // Add a build annotation + // See: https://buildkite.com/docs/agent/v3/cli-annotate + def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failures ? 'failed' : 'successful'} build: gradle ${gradle.startParameter.taskNames.join(' ')}
""" + def process = [ + 'buildkite-agent', + 'annotate', + '--context', + result.failures ? 'gradle-build-scans-failed' : 'gradle-build-scans', + '--append', + '--style', + result.failures ? 'error' : 'info' + ].execute() + process.withWriter { it.write(body) } + // passing the body in as an argument has issues on Windows, so let's use stdin of the process instead + process.waitFor() + } + } + } else { + tag 'LOCAL' } - } else { - tag 'LOCAL' } } } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.bwc-test.gradle b/build-tools-internal/src/main/groovy/elasticsearch.bwc-test.gradle index 5512b06d0ab8b..ff9b6fe7a526d 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.bwc-test.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.bwc-test.gradle @@ -33,7 +33,8 @@ tasks.register("bwcTest") { plugins.withType(ElasticsearchTestBasePlugin) { tasks.withType(Test).matching { it.name ==~ /v[0-9\.]+#.*/ }.configureEach { - onlyIf("BWC tests enabled") { project.bwc_tests_enabled } + boolean bwcEnabled = project.bwc_tests_enabled + onlyIf("BWC tests enabled") { bwcEnabled } nonInputProperties.systemProperty 'tests.bwc', 'true' } } @@ -50,5 +51,5 @@ plugins.withType(InternalJavaRestTestPlugin) { } } -tasks.matching { it.name.equals("check") }.configureEach {dependsOn(bwcTestSnapshots) } -tasks.matching { it.name.equals("test") }.configureEach {enabled = false} +tasks.matching { it.name.equals("check") }.configureEach { dependsOn(bwcTestSnapshots) } +tasks.matching { it.name.equals("test") }.configureEach { enabled = false } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java index 3d6d37575eca9..7010ed92d4c57 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java @@ -20,6 +20,9 @@ import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; +import org.gradle.api.provider.ProviderFactory; +import org.gradle.api.provider.ValueSource; +import org.gradle.api.provider.ValueSourceParameters; import org.gradle.api.tasks.TaskProvider; import org.gradle.jvm.toolchain.JavaLanguageVersion; import org.gradle.jvm.toolchain.JavaToolchainService; @@ -41,6 +44,7 @@ public class BwcSetupExtension { private static final Version BUILD_TOOL_MINIMUM_VERSION = Version.fromString("7.14.0"); private final Project project; private final ObjectFactory objectFactory; + private final ProviderFactory providerFactory; private final JavaToolchainService toolChainService; private final Provider unreleasedVersionInfo; @@ -49,12 +53,14 @@ public class BwcSetupExtension { public BwcSetupExtension( Project project, ObjectFactory objectFactory, + ProviderFactory providerFactory, JavaToolchainService toolChainService, Provider unreleasedVersionInfo, Provider checkoutDir ) { this.project = project; this.objectFactory = objectFactory; + this.providerFactory = providerFactory; this.toolChainService = toolChainService; this.unreleasedVersionInfo = unreleasedVersionInfo; this.checkoutDir = checkoutDir; @@ -65,11 +71,26 @@ TaskProvider bwcTask(String name, Action configuration) } TaskProvider bwcTask(String name, Action configuration, boolean useUniqueUserHome) { - return createRunBwcGradleTask(project, name, configuration, useUniqueUserHome); + return createRunBwcGradleTask( + project, + checkoutDir, + providerFactory, + unreleasedVersionInfo, + objectFactory, + toolChainService, + name, + configuration, + useUniqueUserHome + ); } - private TaskProvider createRunBwcGradleTask( + private static TaskProvider createRunBwcGradleTask( Project project, + Provider checkoutDir, + ProviderFactory providerFactory, + Provider unreleasedVersionInfo, + ObjectFactory objectFactory, + JavaToolchainService toolChainService, String name, Action configAction, boolean useUniqueUserHome @@ -78,10 +99,10 @@ private TaskProvider createRunBwcGradleTask( loggedExec.dependsOn("checkoutBwcBranch"); loggedExec.getWorkingDir().set(checkoutDir.get()); - loggedExec.getEnvironment().put("JAVA_HOME", unreleasedVersionInfo.zip(checkoutDir, (version, checkoutDir) -> { - String minimumCompilerVersion = readFromFile(new File(checkoutDir, minimumCompilerVersionPath(version.version()))); - return getJavaHome(Integer.parseInt(minimumCompilerVersion)); - })); + loggedExec.getNonTrackedEnvironment().put("JAVA_HOME", providerFactory.of(JavaHomeValueSource.class, spec -> { + spec.getParameters().getVersion().set(unreleasedVersionInfo.map(it -> it.version())); + spec.getParameters().getCheckoutDir().set(checkoutDir); + }).flatMap(s -> getJavaHome(objectFactory, toolChainService, Integer.parseInt(s)))); if (BuildParams.isCi() && OS.current() != OS.WINDOWS) { // TODO: Disabled for now until we can figure out why files are getting corrupted @@ -137,10 +158,13 @@ private TaskProvider createRunBwcGradleTask( }); } - private String minimumCompilerVersionPath(Version bwcVersion) { - return (bwcVersion.onOrAfter(BUILD_TOOL_MINIMUM_VERSION)) - ? "build-tools-internal/" + MINIMUM_COMPILER_VERSION_PATH - : "buildSrc/" + MINIMUM_COMPILER_VERSION_PATH; + /** A convenience method for getting java home for a version of java and requiring that version for the given task to execute */ + private static Provider getJavaHome(ObjectFactory objectFactory, JavaToolchainService toolChainService, final int version) { + Property value = objectFactory.property(JavaLanguageVersion.class).value(JavaLanguageVersion.of(version)); + return toolChainService.launcherFor(javaToolchainSpec -> { + javaToolchainSpec.getLanguageVersion().value(value); + javaToolchainSpec.getVendor().set(JvmVendorSpec.ORACLE); + }).map(launcher -> launcher.getMetadata().getInstallationPath().getAsFile().getAbsolutePath()); } private static String readFromFile(File file) { @@ -151,13 +175,25 @@ private static String readFromFile(File file) { } } - /** A convenience method for getting java home for a version of java and requiring that version for the given task to execute */ - public String getJavaHome(final int version) { - Property value = objectFactory.property(JavaLanguageVersion.class).value(JavaLanguageVersion.of(version)); - return toolChainService.launcherFor(javaToolchainSpec -> { - javaToolchainSpec.getLanguageVersion().value(value); - javaToolchainSpec.getVendor().set(JvmVendorSpec.ORACLE); - }).get().getMetadata().getInstallationPath().getAsFile().getAbsolutePath(); - } + public static abstract class JavaHomeValueSource implements ValueSource { + + private String minimumCompilerVersionPath(Version bwcVersion) { + return (bwcVersion.onOrAfter(BUILD_TOOL_MINIMUM_VERSION)) + ? "build-tools-internal/" + MINIMUM_COMPILER_VERSION_PATH + : "buildSrc/" + MINIMUM_COMPILER_VERSION_PATH; + } + @Override + public String obtain() { + return readFromFile( + new File(getParameters().getCheckoutDir().get(), minimumCompilerVersionPath(getParameters().getVersion().get())) + ); + } + + public interface Params extends ValueSourceParameters { + Property getVersion(); + + Property getCheckoutDir(); + } + } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index ed2dfb577e038..d344b4694a5b5 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -30,6 +30,7 @@ import org.gradle.api.tasks.testing.Test; import java.io.File; +import java.util.List; import java.util.Map; import static org.elasticsearch.gradle.util.FileUtils.mkdirs; @@ -100,6 +101,7 @@ public void execute(Task t) { "-Xmx" + System.getProperty("tests.heap.size", "512m"), "-Xms" + System.getProperty("tests.heap.size", "512m"), "-Djava.security.manager=allow", + "--add-opens=java.base/java.util=ALL-UNNAMED", // TODO: only open these for mockito when it is modularized "--add-opens=java.base/java.security.cert=ALL-UNNAMED", "--add-opens=java.base/java.nio.channels=ALL-UNNAMED", @@ -199,5 +201,29 @@ public void execute(Task t) { } }); }); + configureImmutableCollectionsPatch(project); + } + + private void configureImmutableCollectionsPatch(Project project) { + String patchProject = ":test:immutable-collections-patch"; + if (project.findProject(patchProject) == null) { + return; // build tests may not have this project, just skip + } + String configurationName = "immutableCollectionsPatch"; + FileCollection patchedFileCollection = project.getConfigurations() + .create(configurationName, config -> config.setCanBeConsumed(false)); + var deps = project.getDependencies(); + deps.add(configurationName, deps.project(Map.of("path", patchProject, "configuration", "patch"))); + project.getTasks().withType(Test.class).matching(task -> task.getName().equals("test")).configureEach(test -> { + test.getInputs().files(patchedFileCollection); + test.systemProperty("tests.hackImmutableCollections", "true"); + test.getJvmArgumentProviders() + .add( + () -> List.of( + "--patch-module=java.base=" + patchedFileCollection.getSingleFile() + "/java.base", + "--add-opens=java.base/java.util=ALL-UNNAMED" + ) + ); + }); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java index 71c76b2045007..7add1e615f577 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java @@ -93,13 +93,6 @@ public void execute(Task task) { String remoteRepo = remote.get(); // for testing only we can override the base remote url String remoteRepoUrl = providerFactory.systemProperty("testRemoteRepo") - .orElse( - providerFactory.provider( - () -> addRemote.getExtensions().getExtraProperties().has("remote") - ? addRemote.getExtensions().getExtraProperties().get("remote").toString() - : null - ) - ) .getOrElse("https://github.com/" + remoteRepo + "/" + rootProjectName); spec.commandLine("git", "remote", "add", remoteRepo, remoteRepoUrl); }); @@ -213,6 +206,7 @@ private String maybeAlignedRefSpec(Logger logger, String defaultRefSpec) { private void writeFile(File file, String content) { try { + file.getParentFile().mkdirs(); Files.writeString(file.toPath(), content, CREATE, TRUNCATE_EXISTING); } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java index bfc38e13043b9..d10cecf7fa50e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java @@ -99,8 +99,8 @@ private void configureGeneralTaskDefaults(Project project) { project.getTasks().withType(AbstractCopyTask.class).configureEach(t -> { t.dependsOn(project.getTasks().withType(EmptyDirTask.class)); t.setIncludeEmptyDirs(true); - t.setDirMode(0755); - t.setFileMode(0644); + t.dirPermissions(permissions -> permissions.unix(0755)); + t.filePermissions(permissions -> permissions.unix(0644)); }); // common config across all archives diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index f727dc165a8a9..a2247adcf7b9e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -16,6 +16,7 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; +import org.gradle.api.file.ProjectLayout; import org.gradle.api.model.ObjectFactory; import org.gradle.api.plugins.JvmToolchainsPlugin; import org.gradle.api.provider.Provider; @@ -63,15 +64,39 @@ public void apply(Project project) { project.getPlugins().apply(JvmToolchainsPlugin.class); toolChainService = project.getExtensions().getByType(JavaToolchainService.class); BuildParams.getBwcVersions().forPreviousUnreleased((BwcVersions.UnreleasedVersionInfo unreleasedVersion) -> { - configureBwcProject(project.project(unreleasedVersion.gradleProjectPath()), unreleasedVersion); + configureBwcProject( + project.project(unreleasedVersion.gradleProjectPath()), + unreleasedVersion, + providerFactory, + objectFactory, + toolChainService + ); }); } - private void configureBwcProject(Project project, BwcVersions.UnreleasedVersionInfo versionInfo) { + private static void configureBwcProject( + Project project, + BwcVersions.UnreleasedVersionInfo versionInfo, + ProviderFactory providerFactory, + ObjectFactory objectFactory, + JavaToolchainService toolChainService + ) { + ProjectLayout layout = project.getLayout(); Provider versionInfoProvider = providerFactory.provider(() -> versionInfo); - Provider checkoutDir = versionInfoProvider.map(info -> new File(project.getBuildDir(), "bwc/checkout-" + info.branch())); + Provider checkoutDir = versionInfoProvider.map( + info -> new File(layout.getBuildDirectory().get().getAsFile(), "bwc/checkout-" + info.branch()) + ); BwcSetupExtension bwcSetupExtension = project.getExtensions() - .create("bwcSetup", BwcSetupExtension.class, project, objectFactory, toolChainService, versionInfoProvider, checkoutDir); + .create( + "bwcSetup", + BwcSetupExtension.class, + project, + objectFactory, + providerFactory, + toolChainService, + versionInfoProvider, + checkoutDir + ); BwcGitExtension gitExtension = project.getPlugins().apply(InternalBwcGitPlugin.class).getGitExtension(); Provider bwcVersion = versionInfoProvider.map(info -> info.version()); gitExtension.setBwcVersion(versionInfoProvider.map(info -> info.version())); @@ -157,7 +182,7 @@ private void configureBwcProject(Project project, BwcVersions.UnreleasedVersionI } } - private void registerBwcDistributionArtifacts(Project bwcProject, DistributionProject distributionProject) { + private static void registerBwcDistributionArtifacts(Project bwcProject, DistributionProject distributionProject) { String projectName = distributionProject.name; String buildBwcTask = buildBwcTaskName(projectName); @@ -174,7 +199,11 @@ private void registerBwcDistributionArtifacts(Project bwcProject, DistributionPr } } - private void registerDistributionArchiveArtifact(Project bwcProject, DistributionProject distributionProject, String buildBwcTask) { + private static void registerDistributionArchiveArtifact( + Project bwcProject, + DistributionProject distributionProject, + String buildBwcTask + ) { File distFile = distributionProject.expectedBuildArtifact.distFile; String artifactFileName = distFile.getName(); String artifactName = artifactFileName.contains("oss") ? "elasticsearch-oss" : "elasticsearch"; @@ -363,5 +392,4 @@ private static class DistributionProjectArtifact { this.expandedDistDir = expandedDistDir; } } - } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index 16c286bfdd3f2..756d1ea48610b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -172,7 +172,6 @@ private void createTestTask(Project project, SourceSet sourceSet, int javaVersio testTask.getJavaLauncher() .set(javaToolchains.launcherFor(spec -> spec.getLanguageVersion().set(JavaLanguageVersion.of(javaVersion)))); } - }); project.getTasks().named("check").configure(checkTask -> checkTask.dependsOn(testTaskProvider)); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTar.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTar.java index 29c7dfd422547..52000e8c8fd71 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTar.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTar.java @@ -145,7 +145,7 @@ private void visitSymbolicLink(final FileCopyDetailsInternal details) { visitedSymbolicLinks.add(details.getFile()); final TarArchiveEntry entry = new TarArchiveEntry(details.getRelativePath().getPathString(), TarConstants.LF_SYMLINK); entry.setModTime(getModTime(details)); - entry.setMode(UnixStat.LINK_FLAG | details.getMode()); + entry.setMode(UnixStat.LINK_FLAG | details.getPermissions().toUnixNumeric()); try { entry.setLinkName(Files.readSymbolicLink(details.getFile().toPath()).toString()); tar.putArchiveEntry(entry); @@ -158,7 +158,7 @@ private void visitSymbolicLink(final FileCopyDetailsInternal details) { private void visitDirectory(final FileCopyDetailsInternal details) { final TarArchiveEntry entry = new TarArchiveEntry(details.getRelativePath().getPathString() + "/"); entry.setModTime(getModTime(details)); - entry.setMode(UnixStat.DIR_FLAG | details.getMode()); + entry.setMode(UnixStat.DIR_FLAG | details.getPermissions().toUnixNumeric()); try { tar.putArchiveEntry(entry); tar.closeArchiveEntry(); @@ -170,7 +170,7 @@ private void visitDirectory(final FileCopyDetailsInternal details) { private void visitFile(final FileCopyDetailsInternal details) { final TarArchiveEntry entry = new TarArchiveEntry(details.getRelativePath().getPathString()); entry.setModTime(getModTime(details)); - entry.setMode(UnixStat.FILE_FLAG | details.getMode()); + entry.setMode(UnixStat.FILE_FLAG | details.getPermissions().toUnixNumeric()); entry.setSize(details.getSize()); try { tar.putArchiveEntry(entry); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java index 1ec6f023eb565..fd75df6c06b84 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java @@ -7,6 +7,8 @@ */ package org.elasticsearch.gradle.internal.docker; +import com.avast.gradle.dockercompose.ServiceInfo; + import org.elasticsearch.gradle.Architecture; import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.Version; @@ -56,6 +58,9 @@ public abstract class DockerSupportService implements BuildService serviceInfos; + private Map> tcpPorts; + private Map> udpPorts; @Inject public DockerSupportService(ProviderFactory providerFactory) { @@ -145,6 +150,10 @@ public DockerAvailability getDockerAvailability() { return this.dockerAvailability; } + public boolean isArchitectureSupported(Architecture architecture) { + return getDockerAvailability().supportedArchitectures().contains(architecture); + } + private DockerResult runCommand(List args, DockerValueSource.OutputFilter outputFilter) { return providerFactory.of(DockerValueSource.class, params -> { params.getParameters().getArgs().addAll(args); @@ -329,6 +338,27 @@ private void throwDockerRequiredException(final String message, Exception e) { ); } + public void storeInfo(Map servicesInfos) { + tcpPorts = servicesInfos.entrySet() + .stream() + .collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue().getTcpPorts())); + udpPorts = servicesInfos.entrySet() + .stream() + .collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue().getUdpPorts())); + } + + public Map> getTcpPorts() { + return tcpPorts; + } + + public Map> getUdpPorts() { + return udpPorts; + } + + public void setServiceInfos(Map serviceInfos) { + this.serviceInfos = serviceInfos; + } + /** * An immutable class that represents the results of a Docker search from {@link #getDockerAvailability()}}. */ diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 5e62790a9d78a..42834928bafed 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -260,7 +260,7 @@ private List getAvailableJavaVersions() { private Stream getAvailableJavaInstallationLocationSteam() { return Stream.concat( javaInstallationRegistry.toolchains().stream().map(metadata -> metadata.location), - Stream.of(new InstallationLocation(Jvm.current().getJavaHome(), "Current JVM")) + Stream.of(InstallationLocation.userDefined(Jvm.current().getJavaHome(), "Current JVM")) ); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesPrecommitPlugin.java index 72c08712a1fd9..b1d9cbd1f01d1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesPrecommitPlugin.java @@ -12,30 +12,23 @@ import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin; import org.gradle.api.Project; import org.gradle.api.Task; -import org.gradle.api.artifacts.Configuration; -import org.gradle.api.artifacts.ProjectDependency; +import org.gradle.api.artifacts.component.ComponentIdentifier; +import org.gradle.api.artifacts.component.ModuleComponentIdentifier; import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.specs.Spec; import org.gradle.api.tasks.TaskProvider; public class DependencyLicensesPrecommitPlugin extends PrecommitPlugin { + private static Spec COMPONENT_FILTER = identifier -> (identifier instanceof ModuleComponentIdentifier) + && ((ModuleComponentIdentifier) identifier).getGroup().startsWith("org.elasticsearch") == false; @Override public TaskProvider createTask(Project project) { project.getPlugins().apply(CompileOnlyResolvePlugin.class); - TaskProvider dependencyLicenses = project.getTasks() - .register("dependencyLicenses", DependencyLicensesTask.class); - - // only require dependency licenses for non-elasticsearch deps - dependencyLicenses.configure(t -> { - Configuration runtimeClasspath = project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME); - Configuration compileOnly = project.getConfigurations() - .getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME); - t.setDependencies( - runtimeClasspath.fileCollection( - dependency -> dependency instanceof ProjectDependency == false - && dependency.getGroup().startsWith("org.elasticsearch") == false - ).minus(compileOnly) - ); + var dependencyLicenses = project.getTasks().register("dependencyLicenses", DependencyLicensesTask.class, t -> { + var runtimeClasspath = project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME); + var compileOnly = project.getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME); + t.configureDependencies(runtimeClasspath, compileOnly, COMPONENT_FILTER); }); return dependencyLicenses; } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java index f71973c2fb15c..0099a4616f829 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java @@ -11,6 +11,8 @@ import org.gradle.api.DefaultTask; import org.gradle.api.GradleException; import org.gradle.api.InvalidUserDataException; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.component.ComponentIdentifier; import org.gradle.api.file.Directory; import org.gradle.api.file.DirectoryProperty; import org.gradle.api.file.FileCollection; @@ -18,7 +20,9 @@ import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; import org.gradle.api.model.ObjectFactory; +import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; +import org.gradle.api.specs.Spec; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputDirectory; import org.gradle.api.tasks.InputFiles; @@ -41,6 +45,8 @@ import javax.inject.Inject; +import static org.elasticsearch.gradle.internal.util.DependenciesUtils.createFileCollectionFromNonTransitiveArtifactsView; + /** * A task to check licenses for dependencies. *

@@ -83,7 +89,7 @@ * for the dependency. This artifact will be redistributed by us with the release to * comply with the license terms. */ -public class DependencyLicensesTask extends DefaultTask { +public abstract class DependencyLicensesTask extends DefaultTask { private final Pattern regex = Pattern.compile("-v?\\d+.*"); @@ -181,6 +187,10 @@ public void ignoreFile(String file) { ignoreFiles.add(file); } + @Input + @Optional + public abstract Property> getComponentFilter(); + @TaskAction public void checkDependencies() { if (dependencies == null) { @@ -295,7 +305,6 @@ private String getFileName(String name, Map counters, String type) { // try the other suffix...TODO: get rid of this, just support ending in .txt return fileName + ".txt"; } - return fileName; } @@ -310,4 +319,15 @@ public LinkedHashMap getMappings() { return new LinkedHashMap<>(mappings); } + /** + * Convencience method for configuring dependencies to be checked and ignoring transitive dependencies for now. + * */ + public void configureDependencies( + Configuration plusConfiguration, + Configuration minusConfiguration, + Spec componentFilter + ) { + setDependencies(createFileCollectionFromNonTransitiveArtifactsView(plusConfiguration, componentFilter).minus(minusConfiguration)); + } + } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java index f6d3787a4f686..1fc030be42480 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java @@ -15,11 +15,14 @@ import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.component.ModuleComponentIdentifier; import org.gradle.api.tasks.TaskProvider; import java.io.File; import java.nio.file.Path; +import static org.elasticsearch.gradle.internal.util.DependenciesUtils.createFileCollectionFromNonTransitiveArtifactsView; + public class ThirdPartyAuditPrecommitPlugin extends PrecommitPlugin { public static final String JDK_JAR_HELL_CONFIG_NAME = "jdkJarHell"; @@ -54,12 +57,14 @@ public TaskProvider createTask(Project project) { Configuration compileOnly = project.getConfigurations() .getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME); t.setClasspath(runtimeConfiguration.plus(compileOnly)); - t.getJarsToScan().from(runtimeConfiguration.fileCollection(dep -> { - // These are SelfResolvingDependency, and some of them backed by file collections, like the Gradle API files, - // or dependencies added as `files(...)`, we can't be sure if those are third party or not. - // err on the side of scanning these to make sure we don't miss anything - return dep.getGroup() != null && dep.getGroup().startsWith("org.elasticsearch") == false; - })); + t.getJarsToScan() + .from( + createFileCollectionFromNonTransitiveArtifactsView( + runtimeConfiguration, + identifier -> identifier instanceof ModuleComponentIdentifier + && ((ModuleComponentIdentifier) identifier).getGroup().startsWith("org.elasticsearch") == false + ) + ); t.dependsOn(resourcesTask); if (BuildParams.getIsRuntimeJavaHomeSet()) { t.getJavaHome().set(project.provider(BuildParams::getRuntimeJavaHome).map(File::getPath)); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java index 4c5f2abb9515c..a16057220ce89 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java @@ -9,7 +9,6 @@ import com.avast.gradle.dockercompose.ComposeExtension; import com.avast.gradle.dockercompose.DockerComposePlugin; -import com.avast.gradle.dockercompose.ServiceInfo; import com.avast.gradle.dockercompose.tasks.ComposeBuild; import com.avast.gradle.dockercompose.tasks.ComposeDown; import com.avast.gradle.dockercompose.tasks.ComposePull; @@ -106,12 +105,6 @@ public void apply(Project project) { .register("postProcessFixture", TestFixtureTask.class, task -> { task.getFixturesDir().set(testFixturesDir); task.dependsOn(buildFixture); - configureServiceInfoForTask( - task, - project, - false, - (name, port) -> task.getExtensions().getByType(ExtraPropertiesExtension.class).set(name, port) - ); }); maybeSkipTask(dockerSupport, preProcessFixture); @@ -131,12 +124,20 @@ public void apply(Project project) { return composePath != null ? composePath : "/usr/bin/docker-compose"; })); - tasks.named("composeUp").configure(t -> { + tasks.withType(ComposeUp.class).named("composeUp").configure(t -> { // Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions if (BuildParams.isCi()) { t.usesService(dockerComposeThrottle); + t.usesService(dockerSupport); } t.mustRunAfter(preProcessFixture); + t.doLast(new Action() { + @Override + public void execute(Task task) { + dockerSupport.get().storeInfo(t.getServicesInfos()); + } + }); + }); tasks.named("composePull").configure(t -> t.mustRunAfter(preProcessFixture)); tasks.named("composeDown").configure(t -> t.doLast(t2 -> getFileSystemOperations().delete(d -> d.delete(testFixturesDir)))); @@ -153,14 +154,9 @@ public void apply(Project project) { tasks.withType(Test.class).configureEach(testTask -> { testTask.dependsOn(postProcessFixture); testTask.finalizedBy(tasks.named("composeDown")); - configureServiceInfoForTask( - testTask, - project, - true, - (name, host) -> testTask.getExtensions() - .getByType(SystemPropertyCommandLineArgumentProvider.class) - .systemProperty(name, host) - ); + SystemPropertyCommandLineArgumentProvider sysArgumentsProvider = testTask.getExtensions() + .getByType(SystemPropertyCommandLineArgumentProvider.class); + configureServiceInfoForTask(testTask, dockerSupport, (name, host) -> sysArgumentsProvider.systemProperty(name, host)); }); } @@ -184,31 +180,34 @@ private void maybeSkipTask(Provider dockerSupport, Task ta private void configureServiceInfoForTask( Task task, - Project fixtureProject, - boolean enableFilter, + Provider dockerSupportServiceProvider, BiConsumer consumer ) { // Configure ports for the tests as system properties. // We only know these at execution time so we need to do it in doFirst + task.usesService(dockerSupportServiceProvider); task.doFirst(new Action() { @Override public void execute(Task theTask) { - fixtureProject.getExtensions().getByType(ComposeExtension.class).getServicesInfos().entrySet().stream().forEach(entry -> { + dockerSupportServiceProvider.get().getTcpPorts().entrySet().stream().forEach(entry -> { String service = entry.getKey(); - ServiceInfo infos = entry.getValue(); - infos.getTcpPorts().forEach((container, host) -> { - String name = "test.fixtures." + service + ".tcp." + container; - theTask.getLogger().info("port mapping property: {}={}", name, host); - consumer.accept(name, host); + entry.getValue().entrySet().stream().forEach(portMapping -> { + String name = "test.fixtures." + service + ".tcp." + portMapping.getKey(); + theTask.getLogger().info("port mapping property: {}={}", name, portMapping.getValue()); + consumer.accept(name, portMapping.getValue()); }); - infos.getUdpPorts().forEach((container, host) -> { - String name = "test.fixtures." + service + ".udp." + container; - theTask.getLogger().info("port mapping property: {}={}", name, host); - consumer.accept(name, host); + }); + dockerSupportServiceProvider.get().getUdpPorts().entrySet().stream().forEach(entry -> { + String service = entry.getKey(); + entry.getValue().entrySet().stream().forEach(portMapping -> { + String name = "test.fixtures." + service + ".udp." + portMapping.getKey(); + theTask.getLogger().info("port mapping property: {}={}", name, portMapping.getValue()); + consumer.accept(name, portMapping.getValue()); }); }); } }); + } @SuppressWarnings("unchecked") diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/DependenciesUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/DependenciesUtils.java new file mode 100644 index 0000000000000..081c28c14fd91 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/DependenciesUtils.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.util; + +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.ResolvableDependencies; +import org.gradle.api.artifacts.component.ComponentIdentifier; +import org.gradle.api.artifacts.result.ResolvedComponentResult; +import org.gradle.api.artifacts.result.ResolvedDependencyResult; +import org.gradle.api.file.FileCollection; +import org.gradle.api.specs.AndSpec; +import org.gradle.api.specs.Spec; + +import java.util.Set; +import java.util.stream.Collectors; + +public class DependenciesUtils { + + public static FileCollection createFileCollectionFromNonTransitiveArtifactsView( + Configuration configuration, + Spec componentFilter + ) { + ResolvableDependencies incoming = configuration.getIncoming(); + return incoming.artifactView(viewConfiguration -> { + Set firstLevelDependencyComponents = incoming.getResolutionResult() + .getRootComponent() + .map( + rootComponent -> rootComponent.getDependencies() + .stream() + .filter(dependency -> dependency instanceof ResolvedDependencyResult) + .map(dependency -> (ResolvedDependencyResult) dependency) + .filter(dependency -> dependency.getSelected() instanceof ResolvedComponentResult) + .map(dependency -> dependency.getSelected().getId()) + .collect(Collectors.toSet()) + ) + .get(); + viewConfiguration.componentFilter( + new AndSpec<>(identifier -> firstLevelDependencyComponents.contains(identifier), componentFilter) + ); + }).getFiles(); + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/HdfsUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/HdfsUtils.java deleted file mode 100644 index 8b9570d62389e..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/HdfsUtils.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.util; - -import org.elasticsearch.gradle.OS; -import org.gradle.api.Project; -import org.gradle.api.logging.Logging; - -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; - -public class HdfsUtils { - - public static boolean isHdfsFixtureSupported(Project project) { - String projectPath = project.getProjectDir().getPath(); - if (isLegalHdfsPath(projectPath) == false) { - Logging.getLogger(HdfsUtils.class).warn("hdfs Fixture unsupported since there are spaces in the path: '" + projectPath + "'"); - return false; - } - return (OS.current() != OS.WINDOWS) ? true : isHadoopWindowsInstallationAvailable(); - } - - private static boolean isHadoopWindowsInstallationAvailable() { - // hdfs fixture will not start without hadoop native libraries on windows - String nativePath = System.getenv("HADOOP_HOME"); - if (nativePath != null) { - Path path = Paths.get(nativePath); - if (Files.isDirectory(path) - && Files.exists(path.resolve("bin").resolve("winutils.exe")) - && Files.exists(path.resolve("bin").resolve("hadoop.dll")) - && Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { - return true; - } else { - throw new IllegalStateException( - "HADOOP_HOME: " + path + " is invalid, does not contain hadoop native libraries in \\$HADOOP_HOME\\bin" - ); - } - } - Logging.getLogger(HdfsUtils.class).warn("hdfs Fixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH"); - - return false; - } - - public static boolean isLegalHdfsPath(String path) { - return path.contains(" ") == false; - - } -} diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index 631c6d36a93a4..83ea3179ddacc 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.7 \ No newline at end of file +8.8 \ No newline at end of file diff --git a/build-tools/build.gradle b/build-tools/build.gradle index eb5573ac03e0e..7ba5e9f6faa62 100644 --- a/build-tools/build.gradle +++ b/build-tools/build.gradle @@ -6,6 +6,15 @@ * Side Public License, v 1. */ +buildscript { + repositories { + maven { + url 'https://jitpack.io' + } + mavenCentral() + } +} + plugins { id 'java-gradle-plugin' id 'groovy' @@ -107,6 +116,9 @@ configurations { } repositories { + maven { + url 'https://jitpack.io' + } mavenCentral() gradlePluginPortal() } diff --git a/build-tools/settings.gradle b/build-tools/settings.gradle index 63d80efcd505e..7590b8b6b054e 100644 --- a/build-tools/settings.gradle +++ b/build-tools/settings.gradle @@ -17,4 +17,4 @@ dependencyResolutionManagement { from(files("../gradle/build.versions.toml")) } } -} \ No newline at end of file +} diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index fb8416b24d052..2bc4aa1a1be36 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -11,11 +11,9 @@ import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes; import org.elasticsearch.gradle.transform.SymbolicLinkPreservingUntarTransform; import org.elasticsearch.gradle.transform.UnzipTransform; -import org.gradle.api.Action; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; import org.gradle.api.Project; -import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.repositories.IvyArtifactRepository; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; @@ -24,6 +22,7 @@ import org.gradle.api.provider.Provider; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import javax.inject.Inject; @@ -46,6 +45,7 @@ public class DistributionDownloadPlugin implements Plugin { public static final String DISTRO_EXTRACTED_CONFIG_PREFIX = "es_distro_extracted_"; public static final String DISTRO_CONFIG_PREFIX = "es_distro_file_"; + private final ObjectFactory objectFactory; private NamedDomainObjectContainer distributionsContainer; private List distributionsResolutionStrategies; @@ -53,6 +53,7 @@ public class DistributionDownloadPlugin implements Plugin { @Inject public DistributionDownloadPlugin(ObjectFactory objectFactory) { + this.objectFactory = objectFactory; this.dockerAvailability = objectFactory.property(Boolean.class).value(false); } @@ -67,36 +68,92 @@ public void apply(Project project) { transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); - ArtifactTypeDefinition tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz"); + var tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz"); project.getDependencies().registerTransform(SymbolicLinkPreservingUntarTransform.class, transformSpec -> { transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, tarArtifactTypeDefinition.getName()); transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); setupResolutionsContainer(project); - setupDistributionContainer(project, dockerAvailability); + setupDistributionContainer(project); setupDownloadServiceRepo(project); } - private void setupDistributionContainer(Project project, Property dockerAvailable) { - + private void setupDistributionContainer(Project project) { distributionsContainer = project.container(ElasticsearchDistribution.class, name -> { - Configuration fileConfiguration = project.getConfigurations().create(DISTRO_CONFIG_PREFIX + name); - Configuration extractedConfiguration = project.getConfigurations().create(DISTRO_EXTRACTED_CONFIG_PREFIX + name); + var fileConfiguration = project.getConfigurations().create(DISTRO_CONFIG_PREFIX + name); + var extractedConfiguration = project.getConfigurations().create(DISTRO_EXTRACTED_CONFIG_PREFIX + name); extractedConfiguration.getAttributes() .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); - return new ElasticsearchDistribution( + + var distribution = new ElasticsearchDistribution( name, - project.getObjects(), + objectFactory, dockerAvailability, - project.getObjects().fileCollection().from(fileConfiguration), - project.getObjects().fileCollection().from(extractedConfiguration), - new FinalizeDistributionAction(distributionsResolutionStrategies, project) + objectFactory.fileCollection().from(fileConfiguration), + objectFactory.fileCollection().from(extractedConfiguration) ); + + registerDistributionDependencies(project, distribution); + return distribution; }); project.getExtensions().add(CONTAINER_NAME, distributionsContainer); } + private void registerDistributionDependencies(Project project, ElasticsearchDistribution distribution) { + project.getConfigurations() + .getByName(DISTRO_CONFIG_PREFIX + distribution.getName()) + .getDependencies() + .addLater( + project.provider(() -> distribution.maybeFreeze()) + .map( + frozenDistro -> project.getDependencies() + .create(resolveDependencyNotation(project, frozenDistro).getDefaultNotation()) + ) + ); + + project.getConfigurations() + .getByName(DISTRO_EXTRACTED_CONFIG_PREFIX + distribution.getName()) + .getDependencies() + .addAllLater( + project.provider(() -> distribution.maybeFreeze()) + .map( + frozenDistro -> distribution.getType().shouldExtract() + ? List.of( + project.getDependencies().create(resolveDependencyNotation(project, frozenDistro).getExtractedNotation()) + ) + : Collections.emptyList() + ) + ); + } + + private DistributionDependency resolveDependencyNotation(Project project, ElasticsearchDistribution distro) { + return distributionsResolutionStrategies.stream() + .map(r -> r.getResolver().resolve(project, distro)) + .filter(d -> d != null) + .findFirst() + .orElseGet(() -> DistributionDependency.of(dependencyNotation(distro))); + } + + /** + * Returns a dependency object representing the given distribution. + *

+ * The returned object is suitable to be passed to {@link DependencyHandler}. + * The concrete type of the object will be a set of maven coordinates as a {@link String}. + * Maven coordinates point to either the integ-test-zip coordinates on maven central, or a set of artificial + * coordinates that resolve to the Elastic download service through an ivy repository. + */ + private static String dependencyNotation(ElasticsearchDistribution distribution) { + if (distribution.getType() == ElasticsearchDistributionTypes.INTEG_TEST_ZIP) { + return "org.elasticsearch.distribution.integ-test-zip:elasticsearch:" + distribution.getVersion() + "@zip"; + } + var distroVersion = Version.fromString(distribution.getVersion()); + var extension = distribution.getType().getExtension(distribution.getPlatform()); + var classifier = distribution.getType().getClassifier(distribution.getPlatform(), distroVersion); + var group = distribution.getVersion().endsWith("-SNAPSHOT") ? FAKE_SNAPSHOT_IVY_GROUP : FAKE_IVY_GROUP; + return group + ":elasticsearch" + ":" + distribution.getVersion() + classifier + "@" + extension; + } + private void setupResolutionsContainer(Project project) { distributionsResolutionStrategies = new ArrayList<>(); project.getExtensions().add(RESOLUTION_CONTAINER_NAME, distributionsResolutionStrategies); @@ -133,53 +190,4 @@ private static void setupDownloadServiceRepo(Project project) { addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://snapshots-no-kpi.elastic.co", FAKE_SNAPSHOT_IVY_GROUP); } - private record FinalizeDistributionAction(List resolutionList, Project project) - implements - Action { - @Override - - public void execute(ElasticsearchDistribution distro) { - finalizeDistributionDependencies(project, distro); - } - - private void finalizeDistributionDependencies(Project project, ElasticsearchDistribution distribution) { - // for the distribution as a file, just depend on the artifact directly - DistributionDependency distributionDependency = resolveDependencyNotation(project, distribution); - project.getDependencies().add(DISTRO_CONFIG_PREFIX + distribution.getName(), distributionDependency.getDefaultNotation()); - // no extraction needed for rpm, deb or docker - if (distribution.getType().shouldExtract()) { - // The extracted configuration depends on the artifact directly but has - // an artifact transform registered to resolve it as an unpacked folder. - project.getDependencies() - .add(DISTRO_EXTRACTED_CONFIG_PREFIX + distribution.getName(), distributionDependency.getExtractedNotation()); - } - } - - private DistributionDependency resolveDependencyNotation(Project project, ElasticsearchDistribution distro) { - return resolutionList.stream() - .map(r -> r.getResolver().resolve(project, distro)) - .filter(d -> d != null) - .findFirst() - .orElseGet(() -> DistributionDependency.of(dependencyNotation(distro))); - } - - /** - * Returns a dependency object representing the given distribution. - *

- * The returned object is suitable to be passed to {@link DependencyHandler}. - * The concrete type of the object will be a set of maven coordinates as a {@link String}. - * Maven coordinates point to either the integ-test-zip coordinates on maven central, or a set of artificial - * coordinates that resolve to the Elastic download service through an ivy repository. - */ - private String dependencyNotation(ElasticsearchDistribution distribution) { - if (distribution.getType() == ElasticsearchDistributionTypes.INTEG_TEST_ZIP) { - return "org.elasticsearch.distribution.integ-test-zip:elasticsearch:" + distribution.getVersion() + "@zip"; - } - Version distroVersion = Version.fromString(distribution.getVersion()); - String extension = distribution.getType().getExtension(distribution.getPlatform()); - String classifier = distribution.getType().getClassifier(distribution.getPlatform(), distroVersion); - String group = distribution.getVersion().endsWith("-SNAPSHOT") ? FAKE_SNAPSHOT_IVY_GROUP : FAKE_IVY_GROUP; - return group + ":elasticsearch" + ":" + distribution.getVersion() + classifier + "@" + extension; - } - } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java b/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java index fab6926008d6c..afb90ba1ca62e 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java @@ -9,7 +9,6 @@ package org.elasticsearch.gradle; import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes; -import org.gradle.api.Action; import org.gradle.api.Buildable; import org.gradle.api.file.ConfigurableFileCollection; import org.gradle.api.file.FileCollection; @@ -55,7 +54,6 @@ public String toString() { private final Property failIfUnavailable; private final Property preferArchive; private final ConfigurableFileCollection extracted; - private transient Action distributionFinalizer; private boolean frozen = false; ElasticsearchDistribution( @@ -63,8 +61,7 @@ public String toString() { ObjectFactory objectFactory, Property dockerAvailability, ConfigurableFileCollection fileConfiguration, - ConfigurableFileCollection extractedConfiguration, - Action distributionFinalizer + ConfigurableFileCollection extractedConfiguration ) { this.name = name; this.dockerAvailability = dockerAvailability; @@ -78,7 +75,6 @@ public String toString() { this.failIfUnavailable = objectFactory.property(Boolean.class).convention(true); this.preferArchive = objectFactory.property(Boolean.class).convention(false); this.extracted = extractedConfiguration; - this.distributionFinalizer = distributionFinalizer; } public String getName() { @@ -172,7 +168,6 @@ public String toString() { public ElasticsearchDistribution maybeFreeze() { if (frozen == false) { finalizeValues(); - distributionFinalizer.execute(this); frozen = true; } return this; diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java b/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java index 4fda91d332118..6087482db278d 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java @@ -65,6 +65,9 @@ public abstract class LoggedExec extends DefaultTask implements FileSystemOperat @Optional abstract public MapProperty getEnvironment(); + @Internal + abstract public MapProperty getNonTrackedEnvironment(); + @Input abstract public Property getExecutable(); @@ -139,7 +142,8 @@ public void run() { execSpec.setStandardOutput(finalOutputStream); execSpec.setErrorOutput(finalOutputStream); execSpec.setExecutable(getExecutable().get()); - execSpec.setEnvironment(getEnvironment().get()); + execSpec.environment(getEnvironment().get()); + execSpec.environment(getNonTrackedEnvironment().get()); if (getArgs().isPresent()) { execSpec.setArgs(getArgs().get()); } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 999f27a646b1f..d25798ad071bd 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -246,14 +246,12 @@ public void setVersions(List versions) { private void doSetVersion(String version) { String distroName = "testclusters" + path.replace(":", "-") + "-" + this.name + "-" + version; NamedDomainObjectContainer container = DistributionDownloadPlugin.getContainer(project); - if (container.findByName(distroName) == null) { - container.create(distroName); - } - ElasticsearchDistribution distro = container.getByName(distroName); - distro.setVersion(version); - distro.setArchitecture(Architecture.current()); - setDistributionType(distro, testDistribution); - distributions.add(distro); + // TODO Refactor test using register<> for reducing overhead + ElasticsearchDistribution distribution = container.maybeCreate(distroName); + distribution.setVersion(version); + distribution.setArchitecture(Architecture.current()); + setDistributionType(distribution, testDistribution); + distributions.add(distribution); } @Internal diff --git a/build.gradle b/build.gradle index 1d9757f32543d..3869d21b49bfe 100644 --- a/build.gradle +++ b/build.gradle @@ -25,6 +25,16 @@ import java.nio.file.Files import static java.nio.file.StandardCopyOption.REPLACE_EXISTING import static org.elasticsearch.gradle.util.GradleUtils.maybeConfigure +buildscript { + repositories { + maven { + url 'https://jitpack.io' + } + + mavenCentral() + } +} + plugins { id 'lifecycle-base' id 'elasticsearch.docker-support' @@ -325,7 +335,7 @@ allprojects { integTestTask.mustRunAfter tasks.matching { it.name.equals("test") } } - configurations.matching { it.canBeResolved }.all { Configuration configuration -> +/* configurations.matching { it.canBeResolved }.all { Configuration configuration -> dependencies.matching { it instanceof ProjectDependency }.all { ProjectDependency dep -> Project upstreamProject = dep.dependencyProject if (project.path != upstreamProject?.path) { @@ -336,7 +346,7 @@ allprojects { } } } - } + }*/ } apply plugin: 'elasticsearch.formatting' diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 4d7850477dbf5..815ac5d4c2dd8 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -18,11 +18,17 @@ CopySpec archiveFiles(String distributionType, String os, String architecture, b with libFiles(os, architecture) } into('config') { - dirMode 0750 - fileMode 0660 + dirPermissions { + unix 0750 + } + filePermissions { + unix 0660 + } with configFiles(distributionType, isTestDistro) from { - dirMode 0750 + dirPermissions { + unix 0750 + } jvmOptionsDir.getParent() } } @@ -36,21 +42,31 @@ CopySpec archiveFiles(String distributionType, String os, String architecture, b } into('') { from { - dirMode 0755 + dirPermissions { + unix 0755 + } logsDir.getParent() } } into('') { from { - dirMode 0755 + dirPermissions { + unix 0755 + } pluginsDir.getParent() } } from(rootProject.projectDir) { + filePermissions { + unix(0644) + } include 'README.asciidoc' } from(rootProject.file('licenses')) { include isTestDistro ? 'SSPL-1.0+ELASTIC-LICENSE-2.0.txt' : 'ELASTIC-LICENSE-2.0.txt' + filePermissions { + unix(0644) + } rename { 'LICENSE.txt' } } diff --git a/distribution/build.gradle b/distribution/build.gradle index c3f9192ecee05..77f1a2d032c73 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -346,9 +346,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { if (it.relativePath.segments[-2] == 'bin' || (os == 'darwin' && it.relativePath.segments[-2] == 'MacOS')) { // bin files, wherever they are within modules (eg platform specific) should be executable // and MacOS is an alternative to bin on macOS - it.mode = 0755 + it.permissions.unix(0755) } else { - it.mode = 0644 + it.permissions.unix(0644) } } List excludePlatforms = ['linux-x86_64', 'linux-aarch64', 'windows-x86_64', 'darwin-x86_64', 'darwin-aarch64'] @@ -404,7 +404,11 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { from '../src/bin' exclude '*.exe' exclude '*.bat' - eachFile { it.setMode(0755) } + eachFile { + it.permissions{ + unix(0755) + } + } filter("tokens" : expansionsForDistribution(distributionType, testDistro), ReplaceTokens.class) } // windows files, only for zip @@ -422,7 +426,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } // module provided bin files with copySpec { - eachFile { it.setMode(0755) } + eachFile { it.permissions.unix(0755) } from(testDistro ? integTestBinFiles : defaultBinFiles) if (distributionType != 'zip') { exclude '*.bat' @@ -437,7 +441,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { from buildServerNoticeTaskProvider } else { from (buildDefaultNoticeTaskProvider) { - fileMode = 0644 + filePermissions { + unix(0644) + } } } } @@ -456,7 +462,13 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } eachFile { FileCopyDetails details -> if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') { - details.mode = 0755 + details.permissions { + unix(0755) + } + } else { + details.permissions { + unix(0644) + } } if (details.name == 'src.zip') { details.exclude() diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 68ff2028b92a3..85e66ccba34b1 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -10,7 +10,6 @@ import org.elasticsearch.gradle.internal.docker.ShellRetry import org.elasticsearch.gradle.internal.docker.TransformLog4jConfigFilter import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.util.GradleUtils - import java.nio.file.Path import java.time.temporal.ChronoUnit @@ -22,6 +21,8 @@ apply plugin: 'elasticsearch.dra-artifacts' String buildId = providers.systemProperty('build.id').getOrNull() boolean useLocalArtifacts = buildId != null && buildId.isBlank() == false && useDra == false + + repositories { // Define a repository that allows Gradle to fetch a resource from GitHub. This // is only used to fetch the `tini` binary, when building the Iron Bank docker image @@ -120,7 +121,7 @@ ext.expansions = { Architecture architecture, DockerBase base -> // the image. When developing the Docker images, it's very tedious to completely rebuild // an image for every single change. Therefore, outside of CI, we fix the // build time to midnight so that the Docker build cache is usable. - def buildDate = BuildParams.isCi() ? BuildParams.buildDate : BuildParams.buildDate.truncatedTo(ChronoUnit.DAYS) + def buildDate = BuildParams.isCi() ? BuildParams.buildDate : BuildParams.buildDate.truncatedTo(ChronoUnit.DAYS).toString() return [ 'arch' : architecture.classifier, @@ -188,26 +189,29 @@ ext.dockerBuildContext = { Architecture architecture, DockerBase base -> } } } - -def createAndSetWritable(Object... locations) { - locations.each { location -> - File file = file(location) - file.mkdirs() - file.setWritable(true, false) - } -} +// +//def createAndSetWritable(Object... locations) { +// locations.each { location -> +// File file = file(location) +// file.mkdirs() +// file.setWritable(true, false) +// } +//} tasks.register("copyNodeKeyMaterial", Sync) { + def certsDir = file("build/certs") + def pemFile = file("build/certs/testnode.pem") + def crtFile = file("build/certs/testnode.crt") from project(':x-pack:plugin:core') .files( 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem', 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt' ) - into "${buildDir}/certs" + into "build/certs" doLast { - file("${buildDir}/certs").setReadable(true, false) - file("${buildDir}/certs/testnode.pem").setReadable(true, false) - file("${buildDir}/certs/testnode.crt").setReadable(true, false) + certsDir.setReadable(true, false) + pemFile.setReadable(true, false) + crtFile.setReadable(true, false) } } @@ -222,19 +226,27 @@ elasticsearch_distributions { } } +interface Injected { + @Inject FileSystemOperations getFs() +} + tasks.named("preProcessFixture").configure { dependsOn elasticsearch_distributions.matching { it.architecture == Architecture.current() } dependsOn "copyNodeKeyMaterial" + def injected = project.objects.newInstance(Injected) + def testFixturesFolder = project.testFixturesDir.absoluteFile doLast { // tests expect to have an empty repo - project.delete( - "${testFixturesDir}/repo", - ) - createAndSetWritable( - "${testFixturesDir}/repo", - "${testFixturesDir}/logs/default-1", - "${testFixturesDir}/logs/default-2", - ) + injected.fs.delete { + it.delete("${testFixturesFolder}/repo") + } + ["${testFixturesFolder}/repo", + "${testFixturesFolder}/logs/default-1", + "${testFixturesFolder}/logs/default-2"].each { location -> + File file = new File(location) + file.mkdirs() + file.setWritable(true, false) + } } } @@ -299,8 +311,8 @@ void addBuildDockerContextTask(Architecture architecture, DockerBase base) { // For some reason, the artifact name can differ depending on what repository we used. rename ~/((?:file|metric)beat)-.*\.tar\.gz$/, "\$1-${VersionProperties.elasticsearch}.tar.gz" } - - onlyIf("$architecture supported") { isArchitectureSupported(architecture) } + Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) + onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) } } if (base == DockerBase.IRON_BANK) { @@ -348,8 +360,8 @@ void addTransformDockerContextTask(Architecture architecture, DockerBase base) { expansions(architecture, base).findAll { it.key != 'build_date' }.each { k, v -> inputs.property(k, { v.toString() }) } - - onlyIf("$architecture supported") { isArchitectureSupported(architecture) } + Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) + onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) } } // Register transformed context as a project artifact @@ -386,6 +398,7 @@ private static List generateTags(DockerBase base, Architecture architect } void addBuildDockerImageTask(Architecture architecture, DockerBase base) { + final TaskProvider buildDockerImageTask = tasks.register(taskName("build", architecture, base, "DockerImage"), DockerBuildTask) { @@ -421,7 +434,9 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) { baseImages = [base.image] } - onlyIf("$architecture supported") { isArchitectureSupported(architecture) } + Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) + onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) } + } if (base != DockerBase.IRON_BANK && base != DockerBase.CLOUD && base != DockerBase.CLOUD_ESS) { @@ -467,8 +482,9 @@ void addBuildEssDockerImageTask(Architecture architecture) { baseImages = [] tags = generateTags(base, architecture) platforms.add(architecture.dockerPlatform) + Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) + onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) } - onlyIf("$architecture supported") { isArchitectureSupported(architecture) } } tasks.named("assemble").configure { @@ -489,11 +505,6 @@ for (final Architecture architecture : Architecture.values()) { addBuildEssDockerImageTask(architecture) } -boolean isArchitectureSupported(Architecture architecture) { - Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) - return serviceProvider.get().dockerAvailability.supportedArchitectures().contains(architecture) -} - def exportDockerImages = tasks.register("exportDockerImages") def exportCompressedDockerImages = tasks.register("exportCompressedDockerImages") @@ -538,7 +549,8 @@ subprojects { Project subProject -> tarFile, "elasticsearch${base.suffix}:${architecture.classifier}" dependsOn(parent.path + ":" + buildTaskName) - onlyIf("$architecture supported") { isArchitectureSupported(architecture) } + Provider serviceProvider = GradleUtils.getBuildService(project.gradle.sharedServices, DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME) + onlyIf("$architecture supported") { serviceProvider.get().isArchitectureSupported(architecture) } } exportDockerImages.configure { diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 6b57f32310c93..6c31bc44017c3 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -42,20 +42,10 @@ import java.util.regex.Pattern * dpkg -c path/to/elasticsearch.deb */ -buildscript { - repositories { - maven { - url 'https://jitpack.io' - } - mavenCentral() - } - dependencies { - classpath "com.github.breskeby:gradle-ospackage-plugin:2da19425133" - } +plugins { + id "com.netflix.nebula.ospackage-base" version "11.9.1" } -apply plugin: "com.netflix.nebula.ospackage-base" - ['deb', 'rpm'].each { type -> String packagingFiles = "build/packaging/${type}" @@ -138,7 +128,9 @@ def commonPackageConfig(String type, String architecture) { } from(rootProject.projectDir) { include 'README.asciidoc' - fileMode 0644 + filePermissions { + unix 0644 + } } into('lib') { with libFiles('linux', architecture) @@ -159,9 +151,13 @@ def commonPackageConfig(String type, String architecture) { directory('/' + segments[0..i].join('/'), 0755) } if (segments[-2] == 'bin' || segments[-1] == 'jspawnhelper') { - fcp.mode = 0755 + fcp.permissions { + unix(0755) + } } else { - fcp.mode = 0644 + fcp.permissions { + unix(0644) + } } } } @@ -171,7 +167,9 @@ def commonPackageConfig(String type, String architecture) { if (type == 'deb') { into("/usr/share/doc/${packageName}") { from "${packagingFiles}/copyright" - fileMode 0644 + filePermissions { + unix(0644) + } } } else { assert type == 'rpm' @@ -180,7 +178,9 @@ def commonPackageConfig(String type, String architecture) { include 'ELASTIC-LICENSE-2.0.txt' rename { 'LICENSE.txt' } } - fileMode 0644 + filePermissions { + unix(0644) + } } } @@ -194,7 +194,9 @@ def commonPackageConfig(String type, String architecture) { configurationFile '/etc/elasticsearch/users' configurationFile '/etc/elasticsearch/users_roles' from("${packagingFiles}") { - dirMode 02750 + dirPermissions { + unix(02750) + } into('/etc') permissionGroup 'elasticsearch' setgid true @@ -205,9 +207,13 @@ def commonPackageConfig(String type, String architecture) { } from("${packagingFiles}/etc/elasticsearch") { into('/etc/elasticsearch') - dirMode 02750 + dirPermissions { + unix(02750) + } setgid = true - fileMode 0660 + filePermissions { + unix(0660) + } permissionGroup 'elasticsearch' includeEmptyDirs true createDirectoryEntry true @@ -218,28 +224,38 @@ def commonPackageConfig(String type, String architecture) { into(new File(envFile).getParent()) { fileType CONFIG | NOREPLACE permissionGroup 'elasticsearch' - fileMode 0660 + filePermissions { + unix(0660) + } from "${packagingFiles}/env/elasticsearch" } // ========= systemd ========= into('/usr/lib/tmpfiles.d') { from "${packagingFiles}/systemd/elasticsearch.conf" - fileMode 0644 + filePermissions { + unix(0644) + } } into('/usr/lib/systemd/system') { fileType CONFIG | NOREPLACE from "${packagingFiles}/systemd/elasticsearch.service" - fileMode 0644 + filePermissions { + unix(0644) + } } into('/usr/lib/sysctl.d') { fileType CONFIG | NOREPLACE from "${packagingFiles}/systemd/sysctl/elasticsearch.conf" - fileMode 0644 + filePermissions { + unix(0644) + } } into('/usr/share/elasticsearch/bin') { from "${packagingFiles}/systemd/systemd-entrypoint" - fileMode 0755 + filePermissions { + unix(0755) + } } // ========= empty dirs ========= @@ -253,7 +269,9 @@ def commonPackageConfig(String type, String architecture) { createDirectoryEntry true user u permissionGroup g - dirMode = mode + dirPermissions { + unix(mode) + } setgid (mode == 02750) } } @@ -322,7 +340,9 @@ Closure commonDebConfig(String architecture) { into('/usr/share/lintian/overrides') { from('src/deb/lintian/elasticsearch') - fileMode 0644 + filePermissions { + unix(0644) + } } } } diff --git a/docs/changelog/103542.yaml b/docs/changelog/103542.yaml deleted file mode 100644 index 74e713eb2f606..0000000000000 --- a/docs/changelog/103542.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 103542 -summary: Flatten object mappings when subobjects is false -area: Mapping -type: feature -issues: - - 99860 - - 103497 diff --git a/docs/changelog/104711.yaml b/docs/changelog/104711.yaml deleted file mode 100644 index f0f9bf7f10e45..0000000000000 --- a/docs/changelog/104711.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104711 -summary: "Fixing NPE when requesting [_none_] for `stored_fields`" -area: Search -type: bug -issues: [] diff --git a/docs/changelog/104830.yaml b/docs/changelog/104830.yaml deleted file mode 100644 index c056f3d618b75..0000000000000 --- a/docs/changelog/104830.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104830 -summary: All new `shard_seed` parameter for `random_sampler` agg -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/104907.yaml b/docs/changelog/104907.yaml deleted file mode 100644 index 0d8592ae29526..0000000000000 --- a/docs/changelog/104907.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104907 -summary: Support ST_INTERSECTS between geometry column and other geometry or string -area: "ES|QL" -type: enhancement -issues: -- 104874 diff --git a/docs/changelog/105063.yaml b/docs/changelog/105063.yaml deleted file mode 100644 index 668f8ac104493..0000000000000 --- a/docs/changelog/105063.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105063 -summary: Infrastructure for metering the update requests -area: Infra/Metrics -type: enhancement -issues: [] diff --git a/docs/changelog/105067.yaml b/docs/changelog/105067.yaml deleted file mode 100644 index 562e8271f5502..0000000000000 --- a/docs/changelog/105067.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105067 -summary: "ESQL: Use faster field caps" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105168.yaml b/docs/changelog/105168.yaml deleted file mode 100644 index 0f3792b832f55..0000000000000 --- a/docs/changelog/105168.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105168 -summary: Add ?master_timeout query parameter to ccr apis -area: CCR -type: bug -issues: [] diff --git a/docs/changelog/105360.yaml b/docs/changelog/105360.yaml deleted file mode 100644 index 41a7ea24e5500..0000000000000 --- a/docs/changelog/105360.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105360 -summary: Cross-cluster painless/execute actions should check permissions only on target - remote cluster -area: Search -type: bug -issues: [] diff --git a/docs/changelog/105393.yaml b/docs/changelog/105393.yaml deleted file mode 100644 index 4a4cc299b7bd7..0000000000000 --- a/docs/changelog/105393.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105393 -summary: Adding support for hex-encoded byte vectors on knn-search -area: Vector Search -type: feature -issues: [] diff --git a/docs/changelog/105421.yaml b/docs/changelog/105421.yaml deleted file mode 100644 index 2ff9ef008c803..0000000000000 --- a/docs/changelog/105421.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105421 -summary: "ESQL: Add timers to many status results" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105439.yaml b/docs/changelog/105439.yaml deleted file mode 100644 index 45bbede469542..0000000000000 --- a/docs/changelog/105439.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105439 -summary: Support Profile Activate with JWTs with client authn -area: Authentication -type: enhancement -issues: - - 105342 diff --git a/docs/changelog/105449.yaml b/docs/changelog/105449.yaml deleted file mode 100644 index b565d6c782bd9..0000000000000 --- a/docs/changelog/105449.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105449 -summary: Don't stop checking if the `HealthNode` persistent task is present -area: Health -type: bug -issues: - - 98926 diff --git a/docs/changelog/105454.yaml b/docs/changelog/105454.yaml deleted file mode 100644 index fc814a343c46b..0000000000000 --- a/docs/changelog/105454.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105454 -summary: "ESQL: Sum of constants" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105470.yaml b/docs/changelog/105470.yaml deleted file mode 100644 index 56425de6c88e4..0000000000000 --- a/docs/changelog/105470.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105470 -summary: Add retrievers using the parser-only approach -area: Ranking -type: enhancement -issues: [] diff --git a/docs/changelog/105477.yaml b/docs/changelog/105477.yaml deleted file mode 100644 index f994d38a3f671..0000000000000 --- a/docs/changelog/105477.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105477 -summary: "ESQL: Introduce expression validation phase" -area: ES|QL -type: enhancement -issues: - - 105425 diff --git a/docs/changelog/105501.yaml b/docs/changelog/105501.yaml deleted file mode 100644 index 2e5e375764640..0000000000000 --- a/docs/changelog/105501.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105501 -summary: Support non-keyword dimensions as routing fields in TSDB -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/105517.yaml b/docs/changelog/105517.yaml deleted file mode 100644 index 7cca86d1cff6e..0000000000000 --- a/docs/changelog/105517.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105517 -summary: Upgrade to Netty 4.1.107 -area: Network -type: upgrade -issues: [] diff --git a/docs/changelog/105617.yaml b/docs/changelog/105617.yaml deleted file mode 100644 index 7fd8203336fff..0000000000000 --- a/docs/changelog/105617.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105617 -summary: Fix HTTP corner-case response leaks -area: Network -type: bug -issues: [] diff --git a/docs/changelog/105622.yaml b/docs/changelog/105622.yaml deleted file mode 100644 index 33093f5ffceb5..0000000000000 --- a/docs/changelog/105622.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105622 -summary: Distinguish different snapshot failures by log level -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/105629.yaml b/docs/changelog/105629.yaml deleted file mode 100644 index 00fa73a759558..0000000000000 --- a/docs/changelog/105629.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105629 -summary: Show owner `realm_type` for returned API keys -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/105636.yaml b/docs/changelog/105636.yaml deleted file mode 100644 index 01f27199771d4..0000000000000 --- a/docs/changelog/105636.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105636 -summary: Flip dynamic mapping condition when create tsid -area: TSDB -type: bug -issues: [] diff --git a/docs/changelog/105660.yaml b/docs/changelog/105660.yaml deleted file mode 100644 index 1b30a25417906..0000000000000 --- a/docs/changelog/105660.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105660 -summary: "Text structure endpoints to determine the structure of a list of messages and of an indexed field" -area: Machine Learning -type: feature -issues: [] diff --git a/docs/changelog/105670.yaml b/docs/changelog/105670.yaml deleted file mode 100644 index 234f4b6af5a73..0000000000000 --- a/docs/changelog/105670.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105670 -summary: "Painless: Apply true regex limit factor with FIND and MATCH operation" -area: Infra/Scripting -type: bug -issues: [] diff --git a/docs/changelog/105674.yaml b/docs/changelog/105674.yaml deleted file mode 100644 index 7b8d04f4687a3..0000000000000 --- a/docs/changelog/105674.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105674 -summary: Health monitor concurrency fixes -area: Health -type: bug -issues: - - 105065 diff --git a/docs/changelog/105689.yaml b/docs/changelog/105689.yaml deleted file mode 100644 index e76281f1b2fc7..0000000000000 --- a/docs/changelog/105689.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105689 -summary: Fix `uri_parts` processor behaviour for missing extensions -area: Ingest Node -type: bug -issues: - - 105612 diff --git a/docs/changelog/105693.yaml b/docs/changelog/105693.yaml deleted file mode 100644 index 8d14d611e19a3..0000000000000 --- a/docs/changelog/105693.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105693 -summary: Fix error 500 on invalid `ParentIdQuery` -area: Search -type: bug -issues: - - 105366 diff --git a/docs/changelog/105709.yaml b/docs/changelog/105709.yaml deleted file mode 100644 index 41b6e749d9270..0000000000000 --- a/docs/changelog/105709.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105709 -summary: Apply stricter Document Level Security (DLS) rules for the validate query API with the rewrite parameter. -area: Security -type: bug -issues: [] diff --git a/docs/changelog/105714.yaml b/docs/changelog/105714.yaml deleted file mode 100644 index b6ab5e128c72c..0000000000000 --- a/docs/changelog/105714.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105714 -summary: Apply stricter Document Level Security (DLS) rules for terms aggregations when min_doc_count is set to 0. -area: "Aggregations" -type: bug -issues: [] diff --git a/docs/changelog/105717.yaml b/docs/changelog/105717.yaml deleted file mode 100644 index c75bc4fe65798..0000000000000 --- a/docs/changelog/105717.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105717 -summary: Upgrade jna to 5.12.1 -area: Infra/Core -type: upgrade -issues: [] diff --git a/docs/changelog/105745.yaml b/docs/changelog/105745.yaml deleted file mode 100644 index e9a61f692d94d..0000000000000 --- a/docs/changelog/105745.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105745 -summary: Fix `noop_update_total` is not being updated when using the `_bulk` -area: CRUD -type: bug -issues: - - 105742 diff --git a/docs/changelog/105757.yaml b/docs/changelog/105757.yaml deleted file mode 100644 index f11aed2b2d96b..0000000000000 --- a/docs/changelog/105757.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105757 -summary: Add pluggable `BuildVersion` in `NodeMetadata` -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/105768.yaml b/docs/changelog/105768.yaml deleted file mode 100644 index 49d7f1f15c453..0000000000000 --- a/docs/changelog/105768.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105768 -summary: Add two new OGC functions ST_X and ST_Y -area: "ES|QL" -type: enhancement -issues: [] diff --git a/docs/changelog/105779.yaml b/docs/changelog/105779.yaml deleted file mode 100644 index 3699ca0e2f246..0000000000000 --- a/docs/changelog/105779.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105779 -summary: "[Profiling] Speed up serialization of flamegraph" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/105781.yaml b/docs/changelog/105781.yaml deleted file mode 100644 index c3ae7f0035904..0000000000000 --- a/docs/changelog/105781.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105781 -summary: CCS with `minimize_roundtrips` performs incremental merges of each `SearchResponse` -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/105791.yaml b/docs/changelog/105791.yaml deleted file mode 100644 index f18b5e6b8fdd7..0000000000000 --- a/docs/changelog/105791.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105791 -summary: "Bugfix: Disable eager loading `BitSetFilterCache` on Indexing Nodes" -area: Search -type: bug -issues: [] diff --git a/docs/changelog/105797.yaml b/docs/changelog/105797.yaml deleted file mode 100644 index 7c832e2e5e63c..0000000000000 --- a/docs/changelog/105797.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105797 -summary: Enable retrying on 500 error response from Cohere text embedding API -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/105847.yaml b/docs/changelog/105847.yaml deleted file mode 100644 index a731395bc9a81..0000000000000 --- a/docs/changelog/105847.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105847 -summary: (API+) CAT Nodes alias for shard header to match CAT Allocation -area: Stats -type: enhancement -issues: [] diff --git a/docs/changelog/105860.yaml b/docs/changelog/105860.yaml deleted file mode 100644 index 71f3544a02a1f..0000000000000 --- a/docs/changelog/105860.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105860 -summary: "ESQL: Re-enable logical dependency check" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/105893.yaml b/docs/changelog/105893.yaml deleted file mode 100644 index c88736f5dda3d..0000000000000 --- a/docs/changelog/105893.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105893 -summary: Specialize serialization for `ArrayVectors` -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105894.yaml b/docs/changelog/105894.yaml deleted file mode 100644 index a1a99eaa6259b..0000000000000 --- a/docs/changelog/105894.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105894 -summary: Add allocation stats -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/105985.yaml b/docs/changelog/105985.yaml deleted file mode 100644 index 2f2a8c1394070..0000000000000 --- a/docs/changelog/105985.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105985 -summary: Wait forever for `IndexTemplateRegistry` asset installation -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/106031.yaml b/docs/changelog/106031.yaml deleted file mode 100644 index d0a0303e74164..0000000000000 --- a/docs/changelog/106031.yaml +++ /dev/null @@ -1,13 +0,0 @@ -pr: 106031 -summary: Deprecate allowing `fields` in scenarios where it is ignored -area: Mapping -type: deprecation -issues: [] -deprecation: - title: Deprecate allowing `fields` in scenarios where it is ignored - area: Mapping - details: The following mapped types have always ignored `fields` when using multi-fields. - This deprecation makes this clearer and we will completely disallow `fields` for - these mapped types in the future. - impact: "In the future, `join`, `aggregate_metric_double`, and `constant_keyword`,\ - \ will all disallow supplying `fields` as a parameter in the mapping." diff --git a/docs/changelog/106036.yaml b/docs/changelog/106036.yaml deleted file mode 100644 index 7b129c6c0a7a3..0000000000000 --- a/docs/changelog/106036.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106036 -summary: Add status for enrich operator -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106053.yaml b/docs/changelog/106053.yaml deleted file mode 100644 index 72cfe0207795d..0000000000000 --- a/docs/changelog/106053.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106053 -summary: Speed up serialization of `BytesRefArray` -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106063.yaml b/docs/changelog/106063.yaml deleted file mode 100644 index 57c05370a943f..0000000000000 --- a/docs/changelog/106063.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106063 -summary: Consider `ShardRouting` roles when calculating shard copies in shutdown status -area: Infra/Node Lifecycle -type: bug -issues: [] diff --git a/docs/changelog/106065.yaml b/docs/changelog/106065.yaml deleted file mode 100644 index b87f4848fb574..0000000000000 --- a/docs/changelog/106065.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106065 -summary: "ESQL: Values aggregation function" -area: ES|QL -type: feature -issues: - - 103600 diff --git a/docs/changelog/106068.yaml b/docs/changelog/106068.yaml deleted file mode 100644 index 51bcc2bcf98b0..0000000000000 --- a/docs/changelog/106068.yaml +++ /dev/null @@ -1,21 +0,0 @@ -pr: 106068 -summary: Add `modelId` and `modelText` to `KnnVectorQueryBuilder` -area: Search -type: enhancement -issues: [] -highlight: - title: Query phase KNN now supports query_vector_builder - body: |- - It is now possible to pass `model_text` and `model_id` within a `knn` query - in the [query DSL](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-knn-query.html) to convert a text query into a dense vector and run the - nearest neighbor query on it, instead of requiring the dense vector to be - directly passed (within the `query_vector` parameter). Similar to the - [top-level knn query](https://www.elastic.co/guide/en/elasticsearch/reference/current/knn-search.html) (executed in the DFS phase), it is possible to supply - a `query_vector_builder` object containing a `text_embedding` object with - `model_text` (the text query to be converted into a dense vector) and - `model_id` (the identifier of a deployed model responsible for transforming - the text query into a dense vector). Note that an embedding model with the - referenced `model_id` needs to be [deployed on a ML node](https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-deploy-models.html). - in the cluster. - notable: true - diff --git a/docs/changelog/106094.yaml b/docs/changelog/106094.yaml deleted file mode 100644 index 4341164222338..0000000000000 --- a/docs/changelog/106094.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106094 -summary: "ESQL: Support partially folding CASE" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106102.yaml b/docs/changelog/106102.yaml deleted file mode 100644 index b7c13514f6715..0000000000000 --- a/docs/changelog/106102.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106102 -summary: Specialize serialization of array blocks -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106133.yaml b/docs/changelog/106133.yaml deleted file mode 100644 index 6dd7bf6cea086..0000000000000 --- a/docs/changelog/106133.yaml +++ /dev/null @@ -1,19 +0,0 @@ -pr: 106133 -summary: Add a SIMD (Neon) optimised vector distance function for int8 -area: Search -type: enhancement -issues: [] -highlight: - title: A SIMD (Neon) optimised vector distance function for merging int8 Scalar Quantized vectors has been added - body: |- - An optimised int8 vector distance implementation for aarch64 has been added. - This implementation is currently only used during merging. - The vector distance implementation outperforms Lucene's Pamana Vector - implementation for binary comparisons by approx 5x (depending on the number - of dimensions). It does so by means of SIMD (Neon) intrinsics compiled into a - separate native library and link by Panama's FFI. Comparisons are performed on - off-heap mmap'ed vector data. - Macro benchmarks, SO_Dense_Vector with scalar quantization enabled, shows - significant improvements in merge times, approximately 3 times faster. - notable: true - diff --git a/docs/changelog/106150.yaml b/docs/changelog/106150.yaml deleted file mode 100644 index 05bd8b06987c6..0000000000000 --- a/docs/changelog/106150.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106150 -summary: Use correct system index bulk executor -area: CRUD -type: bug -issues: [] diff --git a/docs/changelog/106171.yaml b/docs/changelog/106171.yaml deleted file mode 100644 index 9daf1b9acd994..0000000000000 --- a/docs/changelog/106171.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106171 -summary: Do not log error on node restart when the transform is already failed -area: Transform -type: enhancement -issues: - - 106168 diff --git a/docs/changelog/106172.yaml b/docs/changelog/106172.yaml deleted file mode 100644 index 80d80b9d7f299..0000000000000 --- a/docs/changelog/106172.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106172 -summary: "[Profiling] Allow to override index settings" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/106186.yaml b/docs/changelog/106186.yaml deleted file mode 100644 index 097639dd28f1b..0000000000000 --- a/docs/changelog/106186.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106186 -summary: Expand support for ENRICH to full set supported by ES ingest processors -area: ES|QL -type: enhancement -issues: - - 106162 diff --git a/docs/changelog/106189.yaml b/docs/changelog/106189.yaml deleted file mode 100644 index ec485f0e60efb..0000000000000 --- a/docs/changelog/106189.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106189 -summary: Fix numeric sorts in `_cat/nodes` -area: CAT APIs -type: bug -issues: - - 48070 diff --git a/docs/changelog/106243.yaml b/docs/changelog/106243.yaml deleted file mode 100644 index 6b02e3f1699d4..0000000000000 --- a/docs/changelog/106243.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106243 -summary: "[Transform] Auto retry Transform start" -area: "Transform" -type: bug -issues: [] diff --git a/docs/changelog/106244.yaml b/docs/changelog/106244.yaml deleted file mode 100644 index fe03f575b9efb..0000000000000 --- a/docs/changelog/106244.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106244 -summary: Support ES|QL requests through the `NodeClient::execute` -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/106259.yaml b/docs/changelog/106259.yaml deleted file mode 100644 index d56b5e5a5e379..0000000000000 --- a/docs/changelog/106259.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106259 -summary: Add data stream lifecycle to kibana reporting template -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/106285.yaml b/docs/changelog/106285.yaml deleted file mode 100644 index 37a7e67fe9395..0000000000000 --- a/docs/changelog/106285.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106285 -summary: Add a check for the same feature being declared regular and historical -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/106306.yaml b/docs/changelog/106306.yaml deleted file mode 100644 index 571fe73c31a3e..0000000000000 --- a/docs/changelog/106306.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 99961 -summary: "added fix for inconsistent text trimming in Unified Highlighter" -area: Highlighting -type: bug -issues: - - 101803 diff --git a/docs/changelog/106315.yaml b/docs/changelog/106315.yaml deleted file mode 100644 index 57c41c8024d20..0000000000000 --- a/docs/changelog/106315.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106315 -summary: Updating the tika version to 2.9.1 in the ingest attachment plugin -area: Ingest Node -type: upgrade -issues: [] diff --git a/docs/changelog/106327.yaml b/docs/changelog/106327.yaml deleted file mode 100644 index 2b4b811ece40b..0000000000000 --- a/docs/changelog/106327.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106327 -summary: Serialize big array vectors -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106338.yaml b/docs/changelog/106338.yaml deleted file mode 100644 index c05826d87a11f..0000000000000 --- a/docs/changelog/106338.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106338 -summary: Text fields are stored by default in TSDB indices -area: TSDB -type: enhancement -issues: - - 97039 diff --git a/docs/changelog/106361.yaml b/docs/changelog/106361.yaml deleted file mode 100644 index a4cd608279c12..0000000000000 --- a/docs/changelog/106361.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106361 -summary: Add a `PriorityQueue` backed by `BigArrays` -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/106373.yaml b/docs/changelog/106373.yaml deleted file mode 100644 index e838c7b1a660d..0000000000000 --- a/docs/changelog/106373.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106373 -summary: Serialize big array blocks -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106377.yaml b/docs/changelog/106377.yaml deleted file mode 100644 index 7f0f18d43b440..0000000000000 --- a/docs/changelog/106377.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106377 -summary: Add transport version for search load autoscaling -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/106378.yaml b/docs/changelog/106378.yaml deleted file mode 100644 index b54760553d184..0000000000000 --- a/docs/changelog/106378.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106378 -summary: Add Cohere rerank to `_inference` service -area: Machine Learning -type: feature -issues: [] diff --git a/docs/changelog/106381.yaml b/docs/changelog/106381.yaml deleted file mode 100644 index 500f6d5416822..0000000000000 --- a/docs/changelog/106381.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106381 -summary: Dedupe terms in terms queries -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/106396.yaml b/docs/changelog/106396.yaml deleted file mode 100644 index 7aa06566c75e7..0000000000000 --- a/docs/changelog/106396.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106396 -summary: "Check preTags and postTags params for empty values" -area: Highlighting -type: bug -issues: - - 69009 diff --git a/docs/changelog/106413.yaml b/docs/changelog/106413.yaml deleted file mode 100644 index 8e13a839bc41e..0000000000000 --- a/docs/changelog/106413.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106413 -summary: Consolidate permissions checks -area: Transform -type: bug -issues: - - 105794 diff --git a/docs/changelog/106429.yaml b/docs/changelog/106429.yaml deleted file mode 100644 index 7ac524d13909b..0000000000000 --- a/docs/changelog/106429.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106429 -summary: "ESQL: Regex improvements" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106435.yaml b/docs/changelog/106435.yaml deleted file mode 100644 index 5bfe0087a93d3..0000000000000 --- a/docs/changelog/106435.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106435 -summary: "ENRICH support for TEXT fields" -area: ES|QL -type: enhancement -issues: - - 105384 diff --git a/docs/changelog/106472.yaml b/docs/changelog/106472.yaml deleted file mode 100644 index 120286c4cd8c7..0000000000000 --- a/docs/changelog/106472.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106472 -summary: "Fix the position of spike, dip and distribution changes bucket when the\ - \ sibling aggregation includes empty buckets" -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/106503.yaml b/docs/changelog/106503.yaml deleted file mode 100644 index 1b7e78d8ffc27..0000000000000 --- a/docs/changelog/106503.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106503 -summary: "Support ST_CONTAINS and ST_WITHIN" -area: "ES|QL" -type: enhancement -issues: [] diff --git a/docs/changelog/106511.yaml b/docs/changelog/106511.yaml deleted file mode 100644 index bdef7f1aea225..0000000000000 --- a/docs/changelog/106511.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106511 -summary: Wait indefintely for http connections on shutdown by default -area: Infra/Node Lifecycle -type: bug -issues: [] diff --git a/docs/changelog/106514.yaml b/docs/changelog/106514.yaml deleted file mode 100644 index 5b25f40db2742..0000000000000 --- a/docs/changelog/106514.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106514 -summary: Add granular error list to alias action response -area: Indices APIs -type: feature -issues: - - 94478 diff --git a/docs/changelog/106516.yaml b/docs/changelog/106516.yaml deleted file mode 100644 index 905896fb0ef03..0000000000000 --- a/docs/changelog/106516.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106516 -summary: "ESQL: perform a reduction on the data node" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106526.yaml b/docs/changelog/106526.yaml deleted file mode 100644 index ac98454b5d8b4..0000000000000 --- a/docs/changelog/106526.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106526 -summary: Enhance search tier GC options -area: Infra/CLI -type: enhancement -issues: [] diff --git a/docs/changelog/106531.yaml b/docs/changelog/106531.yaml deleted file mode 100644 index 631d74185d2d8..0000000000000 --- a/docs/changelog/106531.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106531 -summary: Get and Query API Key with profile uid -area: Security -type: feature -issues: [] diff --git a/docs/changelog/106563.yaml b/docs/changelog/106563.yaml deleted file mode 100644 index 79476f909a04c..0000000000000 --- a/docs/changelog/106563.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106563 -summary: Improve short-circuiting downsample execution -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/106575.yaml b/docs/changelog/106575.yaml deleted file mode 100644 index fb5230a9edb3d..0000000000000 --- a/docs/changelog/106575.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106575 -summary: Unable to retrieve multiple stored field values -area: "Search" -type: bug -issues: [] diff --git a/docs/changelog/106579.yaml b/docs/changelog/106579.yaml deleted file mode 100644 index 104ed3066a6f6..0000000000000 --- a/docs/changelog/106579.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106579 -summary: "ESQL: Allow grouping key inside stats expressions" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106638.yaml b/docs/changelog/106638.yaml deleted file mode 100644 index 019800bf03157..0000000000000 --- a/docs/changelog/106638.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106638 -summary: Allow users to get status of own async search tasks -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/106654.yaml b/docs/changelog/106654.yaml deleted file mode 100644 index 3443b68482443..0000000000000 --- a/docs/changelog/106654.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106654 -summary: "ES|QL: Fix usage of IN operator with TEXT fields" -area: ES|QL -type: bug -issues: - - 105379 diff --git a/docs/changelog/106685.yaml b/docs/changelog/106685.yaml deleted file mode 100644 index ed4a16ba0666c..0000000000000 --- a/docs/changelog/106685.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106685 -summary: '`SharedBlobCacheService.maybeFetchRegion` should use `computeCacheFileRegionSize`' -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/106691.yaml b/docs/changelog/106691.yaml deleted file mode 100644 index cbae9796e38c7..0000000000000 --- a/docs/changelog/106691.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106691 -summary: Fix range queries for float/half_float fields when bounds are out of type's - range -area: Search -type: bug -issues: [] diff --git a/docs/changelog/106708.yaml b/docs/changelog/106708.yaml deleted file mode 100644 index b8fdd37e5f03f..0000000000000 --- a/docs/changelog/106708.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106708 -summary: Improve error message when rolling over DS alias -area: Data streams -type: bug -issues: - - 106137 diff --git a/docs/changelog/106714.yaml b/docs/changelog/106714.yaml deleted file mode 100644 index 65b0acd77d764..0000000000000 --- a/docs/changelog/106714.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106714 -summary: Add non-indexed fields to ecs templates -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/106720.yaml b/docs/changelog/106720.yaml deleted file mode 100644 index 93358ed1d3dff..0000000000000 --- a/docs/changelog/106720.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106720 -summary: "ESQL: Fix treating all fields as MV in COUNT pushdown" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/106731.yaml b/docs/changelog/106731.yaml deleted file mode 100644 index 0d8e16a8f9616..0000000000000 --- a/docs/changelog/106731.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106731 -summary: Fix field caps and field level security -area: Security -type: bug -issues: [] diff --git a/docs/changelog/106745.yaml b/docs/changelog/106745.yaml deleted file mode 100644 index a6cb035bd267a..0000000000000 --- a/docs/changelog/106745.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106745 -summary: Fix `AffixSetting.exists` to include secure settings -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/106767.yaml b/docs/changelog/106767.yaml deleted file mode 100644 index 8541e1b14f275..0000000000000 --- a/docs/changelog/106767.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106767 -summary: Handle pass-through subfields with deep nesting -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/106796.yaml b/docs/changelog/106796.yaml deleted file mode 100644 index 83eb99dba1603..0000000000000 --- a/docs/changelog/106796.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106796 -summary: Bulk loading enrich fields in ESQL -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106808.yaml b/docs/changelog/106808.yaml deleted file mode 100644 index 287477fc302fd..0000000000000 --- a/docs/changelog/106808.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106808 -summary: Make OpenAI embeddings parser more flexible -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/106810.yaml b/docs/changelog/106810.yaml deleted file mode 100644 index e93e5cf1e5361..0000000000000 --- a/docs/changelog/106810.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106810 -summary: "ES|QL: Improve support for TEXT fields in functions" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/106836.yaml b/docs/changelog/106836.yaml deleted file mode 100644 index f561f44d9bb2d..0000000000000 --- a/docs/changelog/106836.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106836 -summary: Make int8_hnsw our default index for new dense-vector fields -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/106840.yaml b/docs/changelog/106840.yaml deleted file mode 100644 index 3f6831e4907ca..0000000000000 --- a/docs/changelog/106840.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106840 -summary: Add total size in bytes to doc stats -area: Stats -type: enhancement -issues: - - 97670 diff --git a/docs/changelog/106851.yaml b/docs/changelog/106851.yaml deleted file mode 100644 index 2ada6a6a4e088..0000000000000 --- a/docs/changelog/106851.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106851 -summary: Catching `StackOverflowErrors` from bad regexes in `GsubProcessor` -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/106852.yaml b/docs/changelog/106852.yaml deleted file mode 100644 index 2161b1ea22f30..0000000000000 --- a/docs/changelog/106852.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106852 -summary: Introduce ordinal bytesref block -area: ES|QL -type: enhancement -issues: - - 106387 diff --git a/docs/changelog/106860.yaml b/docs/changelog/106860.yaml deleted file mode 100644 index 376f8753023b9..0000000000000 --- a/docs/changelog/106860.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106860 -summary: "[Profiling] Add TopN Functions API" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/106862.yaml b/docs/changelog/106862.yaml deleted file mode 100644 index 3ca2660fc3f73..0000000000000 --- a/docs/changelog/106862.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106862 -summary: Extend support of `allowedFields` to `getMatchingFieldNames` and `getAllFields` -area: "Mapping" -type: bug -issues: [] diff --git a/docs/changelog/106866.yaml b/docs/changelog/106866.yaml deleted file mode 100644 index ffc34e5962850..0000000000000 --- a/docs/changelog/106866.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106866 -summary: Add ES|QL signum function -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106889.yaml b/docs/changelog/106889.yaml deleted file mode 100644 index 7755081d09036..0000000000000 --- a/docs/changelog/106889.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106889 -summary: Slightly better geoip `databaseType` validation -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/106899.yaml b/docs/changelog/106899.yaml deleted file mode 100644 index a2db24236a47e..0000000000000 --- a/docs/changelog/106899.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106899 -summary: Add ES|QL Locate function -area: ES|QL -type: enhancement -issues: - - 106818 diff --git a/docs/changelog/106919.yaml b/docs/changelog/106919.yaml deleted file mode 100644 index d8288095590de..0000000000000 --- a/docs/changelog/106919.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106919 -summary: Fix downsample action request serialization -area: Downsampling -type: bug -issues: - - 106917 diff --git a/docs/changelog/106934.yaml b/docs/changelog/106934.yaml deleted file mode 100644 index fbfce3118e8a6..0000000000000 --- a/docs/changelog/106934.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106934 -summary: Adjust array resizing in block builder -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/106952.yaml b/docs/changelog/106952.yaml deleted file mode 100644 index 1b45bf6ca28a2..0000000000000 --- a/docs/changelog/106952.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106952 -summary: Add Lucene spanish plural stemmer -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/106989.yaml b/docs/changelog/106989.yaml deleted file mode 100644 index 47df5fe5b47d7..0000000000000 --- a/docs/changelog/106989.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 106989 -summary: Make force-stopping the transform always remove persistent task from cluster - state -area: Transform -type: bug -issues: - - 106811 diff --git a/docs/changelog/107007.yaml b/docs/changelog/107007.yaml deleted file mode 100644 index b2a755171725b..0000000000000 --- a/docs/changelog/107007.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107007 -summary: "ESQL: Support ST_DISJOINT" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/107016.yaml b/docs/changelog/107016.yaml deleted file mode 100644 index a2e32749a8008..0000000000000 --- a/docs/changelog/107016.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107016 -summary: "ESQL: Enable VALUES agg for datetime" -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/107038.yaml b/docs/changelog/107038.yaml deleted file mode 100644 index e00b0d45a8a3a..0000000000000 --- a/docs/changelog/107038.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107038 -summary: Replace `UnsupportedOperationException` with `IllegalArgumentException` for non-existing columns -area: Search -type: bug -issues: [] diff --git a/docs/changelog/107041.yaml b/docs/changelog/107041.yaml deleted file mode 100644 index b8b4f3d7c5690..0000000000000 --- a/docs/changelog/107041.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107041 -summary: '`DocumentParsingObserver` to accept an `indexName` to allow skipping system - indices' -area: Infra/Metrics -type: enhancement -issues: [] diff --git a/docs/changelog/107046.yaml b/docs/changelog/107046.yaml deleted file mode 100644 index 6c1373e09d17c..0000000000000 --- a/docs/changelog/107046.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107046 -summary: "[Security Solution] Add `read` permission for third party agent indices\ - \ for `kibana_system`" -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/107050.yaml b/docs/changelog/107050.yaml deleted file mode 100644 index ecb375967ae44..0000000000000 --- a/docs/changelog/107050.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107050 -summary: Fix support for infinite `?master_timeout` -area: Cluster Coordination -type: bug -issues: [] diff --git a/docs/changelog/107107.yaml b/docs/changelog/107107.yaml deleted file mode 100644 index 5ca611befeb5d..0000000000000 --- a/docs/changelog/107107.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107107 -summary: Increase KDF iteration count in `KeyStoreWrapper` -area: Infra/CLI -type: enhancement -issues: [] diff --git a/docs/changelog/107121.yaml b/docs/changelog/107121.yaml deleted file mode 100644 index d46b1d58e9dfb..0000000000000 --- a/docs/changelog/107121.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107121 -summary: Add a flag to re-enable writes on the final index after an ILM shrink action. -area: ILM+SLM -type: enhancement -issues: - - 106599 diff --git a/docs/changelog/107129.yaml b/docs/changelog/107129.yaml deleted file mode 100644 index 6c9b9094962c1..0000000000000 --- a/docs/changelog/107129.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107129 -summary: Track ongoing search tasks -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/107158.yaml b/docs/changelog/107158.yaml deleted file mode 100644 index 9589fe7e7264b..0000000000000 --- a/docs/changelog/107158.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107158 -summary: "ESQL: allow sorting by expressions and not only regular fields" -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/107178.yaml b/docs/changelog/107178.yaml deleted file mode 100644 index 94a91357d38e6..0000000000000 --- a/docs/changelog/107178.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107178 -summary: "Add support for Azure OpenAI embeddings to inference service" -area: Machine Learning -type: feature -issues: [ ] diff --git a/docs/changelog/107183.yaml b/docs/changelog/107183.yaml deleted file mode 100644 index 226d036456858..0000000000000 --- a/docs/changelog/107183.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107183 -summary: ES|QL fix no-length substring with supplementary (4-byte) character -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/107196.yaml b/docs/changelog/107196.yaml deleted file mode 100644 index 9892ccf71856f..0000000000000 --- a/docs/changelog/107196.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107196 -summary: Add metric for calculating index flush time excluding waiting on locks -area: Engine -type: enhancement -issues: [] diff --git a/docs/changelog/107224.yaml b/docs/changelog/107224.yaml deleted file mode 100644 index b0d40c09b758a..0000000000000 --- a/docs/changelog/107224.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107224 -summary: "Enable 'encoder' and 'tags_schema' highlighting settings at field level" -area: Highlighting -type: enhancement -issues: - - 94028 diff --git a/docs/changelog/107232.yaml b/docs/changelog/107232.yaml deleted file mode 100644 index 1422848cb1c91..0000000000000 --- a/docs/changelog/107232.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107232 -summary: Only trigger action once per thread -area: Transform -type: bug -issues: - - 107215 diff --git a/docs/changelog/107242.yaml b/docs/changelog/107242.yaml deleted file mode 100644 index 4a5e9821a1fa9..0000000000000 --- a/docs/changelog/107242.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107242 -summary: Added a timeout parameter to the inference API -area: Machine Learning -type: enhancement -issues: [ ] diff --git a/docs/changelog/107253.yaml b/docs/changelog/107253.yaml deleted file mode 100644 index 6961b59231ea3..0000000000000 --- a/docs/changelog/107253.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107253 -summary: "[Connector API] Support cleaning up sync jobs when deleting a connector" -area: Application -type: feature -issues: [] diff --git a/docs/changelog/107272.yaml b/docs/changelog/107272.yaml deleted file mode 100644 index eb9e0c5e8bab8..0000000000000 --- a/docs/changelog/107272.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107272 -summary: "ESQL: extend BUCKET with spans" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/107279.yaml b/docs/changelog/107279.yaml new file mode 100644 index 0000000000000..a2940ecc9ba2d --- /dev/null +++ b/docs/changelog/107279.yaml @@ -0,0 +1,5 @@ +pr: 107279 +summary: Introduce _transform/_node_stats API +area: Transform +type: feature +issues: [] diff --git a/docs/changelog/107287.yaml b/docs/changelog/107287.yaml deleted file mode 100644 index 791f07fd1c729..0000000000000 --- a/docs/changelog/107287.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107287 -summary: Add support for the 'Anonymous IP' database to the geoip processor -area: Ingest Node -type: enhancement -issues: - - 90789 diff --git a/docs/changelog/107291.yaml b/docs/changelog/107291.yaml deleted file mode 100644 index 3274fb77ef8c8..0000000000000 --- a/docs/changelog/107291.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107291 -summary: Support data streams in enrich policy indices -area: Ingest Node -type: enhancement -issues: - - 98836 diff --git a/docs/changelog/107303.yaml b/docs/changelog/107303.yaml deleted file mode 100644 index 2e04ce6be3627..0000000000000 --- a/docs/changelog/107303.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107303 -summary: Create default word based chunker -area: Machine Learning -type: feature -issues: [] diff --git a/docs/changelog/107312.yaml b/docs/changelog/107312.yaml deleted file mode 100644 index 6ecd4179596e5..0000000000000 --- a/docs/changelog/107312.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107312 -summary: Fix NPE in ML assignment notifier -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/107334.yaml b/docs/changelog/107334.yaml deleted file mode 100644 index d1e8df2fa9c40..0000000000000 --- a/docs/changelog/107334.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107334 -summary: Adding `cache_stats` to geoip stats API -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/107358.yaml b/docs/changelog/107358.yaml deleted file mode 100644 index edb6deeffd100..0000000000000 --- a/docs/changelog/107358.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107358 -summary: Check node shutdown before fail -area: Transform -type: enhancement -issues: - - 100891 diff --git a/docs/changelog/107370.yaml b/docs/changelog/107370.yaml deleted file mode 100644 index e7bdeef68cffe..0000000000000 --- a/docs/changelog/107370.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107370 -summary: Fork when handling remote field-caps responses -area: Search -type: bug -issues: [] diff --git a/docs/changelog/107377.yaml b/docs/changelog/107377.yaml deleted file mode 100644 index b9fea61d38a0a..0000000000000 --- a/docs/changelog/107377.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107377 -summary: Add support for the 'Enterprise' database to the geoip processor -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/107383.yaml b/docs/changelog/107383.yaml deleted file mode 100644 index 07886ac96180c..0000000000000 --- a/docs/changelog/107383.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107383 -summary: Users with monitor privileges can access async_search/status endpoint - even when setting keep_alive -area: Authorization -type: bug -issues: [] diff --git a/docs/changelog/107411.yaml b/docs/changelog/107411.yaml deleted file mode 100644 index fda040bcdab80..0000000000000 --- a/docs/changelog/107411.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107411 -summary: Invalidating cross cluster API keys requires `manage_security` -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/107414.yaml b/docs/changelog/107414.yaml deleted file mode 100644 index 60e31f22ca834..0000000000000 --- a/docs/changelog/107414.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 107414 -summary: "ESQL: median, count and `count_distinct` over constants" -area: ES|QL -type: bug -issues: - - 105248 - - 104900 diff --git a/docs/changelog/107447.yaml b/docs/changelog/107447.yaml deleted file mode 100644 index 6ace513013e3e..0000000000000 --- a/docs/changelog/107447.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107447 -summary: "Fix regression in get index settings (human=true) where the version was not displayed in human-readable format" -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/107449.yaml b/docs/changelog/107449.yaml deleted file mode 100644 index 7f0b1bb826e94..0000000000000 --- a/docs/changelog/107449.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107449 -summary: Leverage ordinals in enrich lookup -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/107467.yaml b/docs/changelog/107467.yaml deleted file mode 100644 index e775e5928770d..0000000000000 --- a/docs/changelog/107467.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107467 -summary: "[Connector API] Fix bug with filtering validation toXContent" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/107494.yaml b/docs/changelog/107494.yaml deleted file mode 100644 index 1d71ce284a4a8..0000000000000 --- a/docs/changelog/107494.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107494 -summary: Handle infinity during synthetic source construction for scaled float field -area: Mapping -type: bug -issues: - - 107101 diff --git a/docs/changelog/107517.yaml b/docs/changelog/107517.yaml deleted file mode 100644 index 4d7830699ad49..0000000000000 --- a/docs/changelog/107517.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107517 -summary: Add GET `_inference` for all inference endpoints -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/107533.yaml b/docs/changelog/107533.yaml deleted file mode 100644 index da95cfd5b312e..0000000000000 --- a/docs/changelog/107533.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107533 -summary: Add setting for max connections to S3 -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/107551.yaml b/docs/changelog/107551.yaml deleted file mode 100644 index 78e64cc526638..0000000000000 --- a/docs/changelog/107551.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107551 -summary: Avoid attempting to load the same empty field twice in fetch phase -area: Search -type: bug -issues: [] diff --git a/docs/changelog/107577.yaml b/docs/changelog/107577.yaml deleted file mode 100644 index a9a3c36a0e04d..0000000000000 --- a/docs/changelog/107577.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107577 -summary: "ESQL: Fix MV_DEDUPE when using data from an index" -area: ES|QL -type: bug -issues: - - 104745 diff --git a/docs/changelog/107578.yaml b/docs/changelog/107578.yaml deleted file mode 100644 index 30746aeee6986..0000000000000 --- a/docs/changelog/107578.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107578 -summary: "ESQL: Allow reusing BUCKET grouping expressions in aggs" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/107598.yaml b/docs/changelog/107598.yaml deleted file mode 100644 index 125bbe759d2ea..0000000000000 --- a/docs/changelog/107598.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107598 -summary: Fix bulk NPE when retrying failure redirect after cluster block -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/107655.yaml b/docs/changelog/107655.yaml deleted file mode 100644 index 7091224d211f1..0000000000000 --- a/docs/changelog/107655.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107655 -summary: "Use #addWithoutBreaking when adding a negative number of bytes to the circuit\ - \ breaker in `SequenceMatcher`" -area: EQL -type: bug -issues: [] diff --git a/docs/changelog/107678.yaml b/docs/changelog/107678.yaml deleted file mode 100644 index 9be55dd4d6b96..0000000000000 --- a/docs/changelog/107678.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107678 -summary: Validate stats formatting in standard `InternalStats` constructor -area: Aggregations -type: bug -issues: - - 107671 diff --git a/docs/changelog/107706.yaml b/docs/changelog/107706.yaml new file mode 100644 index 0000000000000..76b7f662bf0e0 --- /dev/null +++ b/docs/changelog/107706.yaml @@ -0,0 +1,5 @@ +pr: 107706 +summary: Add rate limiting support for the Inference API +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/107743.yaml b/docs/changelog/107743.yaml deleted file mode 100644 index fad45040330d2..0000000000000 --- a/docs/changelog/107743.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107743 -summary: Validate `model_id` is required when using the `learning_to_rank` rescorer -area: Search -type: bug -issues: [] diff --git a/docs/changelog/107828.yaml b/docs/changelog/107828.yaml deleted file mode 100644 index ba0d44029203d..0000000000000 --- a/docs/changelog/107828.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107828 -summary: Update several references to `IndexVersion.toString` to use `toReleaseVersion` -area: Infra/Core -type: bug -issues: - - 107821 diff --git a/docs/changelog/107865.yaml b/docs/changelog/107865.yaml deleted file mode 100644 index f7bb1d869eed5..0000000000000 --- a/docs/changelog/107865.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107865 -summary: Fix docs generation of signatures for variadic functions -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/107891.yaml b/docs/changelog/107891.yaml deleted file mode 100644 index deb3fbd2258ff..0000000000000 --- a/docs/changelog/107891.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107891 -summary: Fix `startOffset` must be non-negative error in XLMRoBERTa tokenizer -area: Machine Learning -type: bug -issues: - - 104626 diff --git a/docs/changelog/107902.yaml b/docs/changelog/107902.yaml deleted file mode 100644 index 6b25f8c12df60..0000000000000 --- a/docs/changelog/107902.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107902 -summary: Update several references to `TransportVersion.toString` to use `toReleaseVersion` -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/107969.yaml b/docs/changelog/107969.yaml deleted file mode 100644 index ed63513d8d57d..0000000000000 --- a/docs/changelog/107969.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107969 -summary: Disable PIT for remote clusters -area: Transform -type: bug -issues: [] diff --git a/docs/changelog/107987.yaml b/docs/changelog/107987.yaml new file mode 100644 index 0000000000000..e8afebde0b190 --- /dev/null +++ b/docs/changelog/107987.yaml @@ -0,0 +1,6 @@ +pr: 107987 +summary: "ESQL: Implement LOOKUP, an \"inline\" enrich" +area: ES|QL +type: enhancement +issues: + - 107306 diff --git a/docs/changelog/108007.yaml b/docs/changelog/108007.yaml deleted file mode 100644 index 5d24f8c87597c..0000000000000 --- a/docs/changelog/108007.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108007 -summary: Allow `typed_keys` for search application Search API -area: Application -type: feature -issues: [] diff --git a/docs/changelog/108019.yaml b/docs/changelog/108019.yaml new file mode 100644 index 0000000000000..69e8e9fd371f8 --- /dev/null +++ b/docs/changelog/108019.yaml @@ -0,0 +1,6 @@ +pr: 108019 +summary: Ignore additional cpu.stat fields +area: Infra/Core +type: bug +issues: + - 107983 diff --git a/docs/changelog/108031.yaml b/docs/changelog/108031.yaml deleted file mode 100644 index 0d02ddddbd472..0000000000000 --- a/docs/changelog/108031.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108031 -summary: Fix lingering license warning header -area: License -type: bug -issues: - - 107573 diff --git a/docs/changelog/108041.yaml b/docs/changelog/108041.yaml deleted file mode 100644 index a82e0798dba21..0000000000000 --- a/docs/changelog/108041.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 108041 -summary: Handle parallel calls to `createWeight` when profiling is on -area: Search -type: bug -issues: - - 104131 - - 104235 diff --git a/docs/changelog/108101.yaml b/docs/changelog/108101.yaml deleted file mode 100644 index e935ec1beecd6..0000000000000 --- a/docs/changelog/108101.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108101 -summary: "ESQL: Fix error message when failing to resolve aggregate groupings" -area: ES|QL -type: bug -issues: - - 108053 diff --git a/docs/changelog/108238.yaml b/docs/changelog/108238.yaml deleted file mode 100644 index 607979c2eb0ac..0000000000000 --- a/docs/changelog/108238.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108238 -summary: "Nativeaccess: try to load all located libsystemds" -area: Infra/Core -type: bug -issues: - - 107878 diff --git a/docs/changelog/108257.yaml b/docs/changelog/108257.yaml deleted file mode 100644 index ce2c72353af82..0000000000000 --- a/docs/changelog/108257.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108257 -summary: "ESQL: Log queries at debug level" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/108365.yaml b/docs/changelog/108365.yaml deleted file mode 100644 index d94486e2f3ea7..0000000000000 --- a/docs/changelog/108365.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108365 -summary: "[Bugfix] Connector API - fix status serialisation issue in termquery" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/108431.yaml b/docs/changelog/108431.yaml deleted file mode 100644 index 84607b1b99ac3..0000000000000 --- a/docs/changelog/108431.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108431 -summary: "ESQL: Disable quoting in FROM command" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/108455.yaml b/docs/changelog/108455.yaml new file mode 100644 index 0000000000000..8397af7b07cf1 --- /dev/null +++ b/docs/changelog/108455.yaml @@ -0,0 +1,6 @@ +pr: 108455 +summary: "[ES|QL] Convert string to datetime when the other size of an arithmetic\ + \ operator is `date_period` or `time_duration`" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/108518.yaml b/docs/changelog/108518.yaml deleted file mode 100644 index aad823ccc89f6..0000000000000 --- a/docs/changelog/108518.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108518 -summary: Remove leading is_ prefix from Enterprise geoip docs -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/108537.yaml b/docs/changelog/108537.yaml new file mode 100644 index 0000000000000..1c0228a71d449 --- /dev/null +++ b/docs/changelog/108537.yaml @@ -0,0 +1,6 @@ +pr: 108537 +summary: Limit the value in prefix query +area: Search +type: enhancement +issues: + - 108486 diff --git a/docs/changelog/108538.yaml b/docs/changelog/108538.yaml new file mode 100644 index 0000000000000..10ae49f0c1670 --- /dev/null +++ b/docs/changelog/108538.yaml @@ -0,0 +1,5 @@ +pr: 108538 +summary: Adding RankFeature search phase implementation +area: Search +type: feature +issues: [] diff --git a/docs/changelog/108562.yaml b/docs/changelog/108562.yaml deleted file mode 100644 index 2a0047fe807fd..0000000000000 --- a/docs/changelog/108562.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108562 -summary: Add `internalClusterTest` for and fix leak in `ExpandSearchPhase` -area: Search -type: bug -issues: - - 108369 diff --git a/docs/changelog/108571.yaml b/docs/changelog/108571.yaml deleted file mode 100644 index b863ac90d9e5f..0000000000000 --- a/docs/changelog/108571.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108571 -summary: Workaround G1 bug for JDK 22 and 22.0.1 -area: Infra/CLI -type: bug -issues: [] diff --git a/docs/changelog/108600.yaml b/docs/changelog/108600.yaml deleted file mode 100644 index 59177bf34114c..0000000000000 --- a/docs/changelog/108600.yaml +++ /dev/null @@ -1,15 +0,0 @@ -pr: 108600 -summary: "Prevent DLS/FLS if `replication` is assigned" -area: Security -type: breaking -issues: [ ] -breaking: - title: "Prevent DLS/FLS if `replication` is assigned" - area: REST API - details: For cross-cluster API keys, {es} no longer allows specifying document-level security (DLS) - or field-level security (FLS) in the `search` field, if `replication` is also specified. - {es} likewise blocks the use of any existing cross-cluster API keys that meet this condition. - impact: Remove any document-level security (DLS) or field-level security (FLS) definitions from the `search` field - for cross-cluster API keys that also have a `replication` field, or create two separate cross-cluster API keys, - one for search and one for replication. - notable: false diff --git a/docs/changelog/108639.yaml b/docs/changelog/108639.yaml index 586270c3c761c..e4964cbeb0285 100644 --- a/docs/changelog/108639.yaml +++ b/docs/changelog/108639.yaml @@ -3,26 +3,3 @@ summary: Add support for the 'Domain' database to the geoip processor area: Ingest Node type: enhancement issues: [] -highlight: - title: Add support for the 'Domain' database to the geoip processor - body: |- - Follow on to #107287 and #107377 - - Adds support for the ['GeoIP2 - Domain'](https://dev.maxmind.com/geoip/docs/databases/domain) database - from MaxMind to the `geoip` processor. - - The `geoip` processor will automatically download the [various - 'GeoLite2' - databases](https://dev.maxmind.com/geoip/geolite2-free-geolocation-data), - but the 'GeoIP2 Domain' database is not a 'GeoLite2' database -- it's a - commercial database available to those with a suitable license from - MaxMind. - - The support that is being added for it in this PR is in line with the - support that we already have for MaxMind's 'GeoIP2 City' and 'GeoIP2 - Country' databases -- that is, one would need to arrange their own - download management via some custom endpoint or otherwise arrange for - the relevant file(s) to be in the `$ES_CONFIG/ingest-geoip` directory on - the nodes of the cluster. - notable: true diff --git a/docs/changelog/108651.yaml b/docs/changelog/108651.yaml index e629c114dac51..227c464909d50 100644 --- a/docs/changelog/108651.yaml +++ b/docs/changelog/108651.yaml @@ -3,27 +3,3 @@ summary: Add support for the 'ISP' database to the geoip processor area: Ingest Node type: enhancement issues: [] -highlight: - title: Add support for the 'ISP' database to the geoip processor - body: |- - Follow on to https://github.com/elastic/elasticsearch/pull/107287, - https://github.com/elastic/elasticsearch/pull/107377, and - https://github.com/elastic/elasticsearch/pull/108639 - - Adds support for the ['GeoIP2 - ISP'](https://dev.maxmind.com/geoip/docs/databases/isp) database from - MaxMind to the geoip processor. - - The geoip processor will automatically download the [various 'GeoLite2' - databases](https://dev.maxmind.com/geoip/geolite2-free-geolocation-data), - but the 'GeoIP2 ISP' database is not a 'GeoLite2' database -- it's a - commercial database available to those with a suitable license from - MaxMind. - - The support that is being added for it in this PR is in line with the - support that we already have for MaxMind's 'GeoIP2 City' and 'GeoIP2 - Country' databases -- that is, one would need to arrange their own - download management via some custom endpoint or otherwise arrange for - the relevant file(s) to be in the $ES_CONFIG/ingest-geoip directory on - the nodes of the cluster. - notable: true diff --git a/docs/changelog/108654.yaml b/docs/changelog/108654.yaml deleted file mode 100644 index 9afae6a19ca80..0000000000000 --- a/docs/changelog/108654.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108654 -summary: Update bundled JDK to Java 22 (again) -area: Packaging -type: upgrade -issues: [] diff --git a/docs/changelog/108682.yaml b/docs/changelog/108682.yaml new file mode 100644 index 0000000000000..bd566acab8306 --- /dev/null +++ b/docs/changelog/108682.yaml @@ -0,0 +1,5 @@ +pr: 108682 +summary: Adding support for explain in rrf +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/108683.yaml b/docs/changelog/108683.yaml index ad796fb9b25c7..b9e7df5fefc18 100644 --- a/docs/changelog/108683.yaml +++ b/docs/changelog/108683.yaml @@ -4,25 +4,11 @@ area: Ingest Node type: enhancement issues: [] highlight: - title: Add support for the 'Connection Type' database to the geoip processor + title: "Preview: Support for the 'Connection Type, 'Domain', and 'ISP' databases in the geoip processor" body: |- - Follow on to #107287, #107377, #108639, and #108651 - - Adds support for the ['GeoIP2 Connection - Type'](https://dev.maxmind.com/geoip/docs/databases/connection-type) - database from MaxMind to the `geoip` processor. - - The `geoip` processor will automatically download the [various - 'GeoLite2' - databases](https://dev.maxmind.com/geoip/geolite2-free-geolocation-data), - but the 'GeoIP2 Connection Type' database is not a 'GeoLite2' database - -- it's a commercial database available to those with a suitable license - from MaxMind. - - The support that is being added for it in this PR is in line with the - support that we already have for MaxMind's 'GeoIP2 City' and 'GeoIP2 - Country' databases -- that is, one would need to arrange their own - download management via some custom endpoint or otherwise arrange for - the relevant file(s) to be in the `$ES_CONFIG/ingest-geoip` directory on - the nodes of the cluster. - notable: true + As a Technical Preview, the {ref}/geoip-processor.html[`geoip`] processor can now use the commercial + https://dev.maxmind.com/geoip/docs/databases/connection-type[GeoIP2 'Connection Type'], + https://dev.maxmind.com/geoip/docs/databases/domain[GeoIP2 'Domain'], + and + https://dev.maxmind.com/geoip/docs/databases/isp[GeoIP2 'ISP'] + databases from MaxMind. diff --git a/docs/changelog/108736.yaml b/docs/changelog/108736.yaml deleted file mode 100644 index 41e4084021e00..0000000000000 --- a/docs/changelog/108736.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108736 -summary: Harden field-caps request dispatcher -area: Search -type: bug -issues: [] diff --git a/docs/changelog/108802.yaml b/docs/changelog/108802.yaml deleted file mode 100644 index 7c28a81a1b353..0000000000000 --- a/docs/changelog/108802.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108802 -summary: Fix multithreading copies in lib vec -area: Vector Search -type: bug -issues: [] diff --git a/docs/changelog/108834.yaml b/docs/changelog/108834.yaml deleted file mode 100644 index 044056fa9a9da..0000000000000 --- a/docs/changelog/108834.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108834 -summary: "[ESQL] Mark `date_diff` as requiring all three arguments" -area: ES|QL -type: bug -issues: - - 108383 diff --git a/docs/changelog/108854.yaml b/docs/changelog/108854.yaml deleted file mode 100644 index d6a880830f0d9..0000000000000 --- a/docs/changelog/108854.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108854 -summary: "[Connector API] Fix bug with parsing *_doc_count nullable fields" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/108867.yaml b/docs/changelog/108867.yaml deleted file mode 100644 index 545349dd84aeb..0000000000000 --- a/docs/changelog/108867.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108867 -summary: Fix for raw mapping merge of fields named "properties" -area: Mapping -type: bug -issues: - - 108866 diff --git a/docs/changelog/108900.yaml b/docs/changelog/108900.yaml deleted file mode 100644 index 2a182f03ff8ce..0000000000000 --- a/docs/changelog/108900.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108900 -summary: Treat 404 as empty register in `AzureBlobStore` -area: Snapshot/Restore -type: bug -issues: - - 108504 diff --git a/docs/changelog/108947.yaml b/docs/changelog/108947.yaml new file mode 100644 index 0000000000000..8aa4293242985 --- /dev/null +++ b/docs/changelog/108947.yaml @@ -0,0 +1,5 @@ +pr: 108947 +summary: Provide the `DocumentSizeReporter` with index mode +area: Infra/Metrics +type: enhancement +issues: [] diff --git a/docs/changelog/109020.yaml b/docs/changelog/109020.yaml deleted file mode 100644 index c3efb1a1409bf..0000000000000 --- a/docs/changelog/109020.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 109020 -summary: Only skip deleting a downsampled index if downsampling is in progress as - part of DSL retention -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/109034.yaml b/docs/changelog/109034.yaml deleted file mode 100644 index cdf1f6fe28d8d..0000000000000 --- a/docs/changelog/109034.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 109034 -summary: Fix IOOBE in TTest aggregation when using filters -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/109048.yaml b/docs/changelog/109048.yaml deleted file mode 100644 index 8bae082404ecd..0000000000000 --- a/docs/changelog/109048.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 109048 -summary: Guard against a null scorer in painless execute -area: Infra/Scripting -type: bug -issues: - - 43541 diff --git a/docs/changelog/109097.yaml b/docs/changelog/109097.yaml deleted file mode 100644 index a7520f4eaa9be..0000000000000 --- a/docs/changelog/109097.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 109097 -summary: "[Connector API] Fix bug with with wrong target index for access control\ - \ sync" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/109148.yaml b/docs/changelog/109148.yaml deleted file mode 100644 index 902da6f1a1db3..0000000000000 --- a/docs/changelog/109148.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 109148 -summary: Fix double-pausing shard snapshot -area: Snapshot/Restore -type: bug -issues: - - 109143 diff --git a/docs/changelog/109173.yaml b/docs/changelog/109173.yaml deleted file mode 100644 index 9f4f73a6f74c8..0000000000000 --- a/docs/changelog/109173.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 109173 -summary: Wrap "Pattern too complex" exception into an `IllegalArgumentException` -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/109185.yaml b/docs/changelog/109185.yaml new file mode 100644 index 0000000000000..4da72c4b20ffb --- /dev/null +++ b/docs/changelog/109185.yaml @@ -0,0 +1,6 @@ +pr: 109185 +summary: Handle unmatching remote cluster wildcards properly for `IndicesRequest.SingleIndexNoWildcards` + requests +area: Authorization +type: bug +issues: [] diff --git a/docs/changelog/109194.yaml b/docs/changelog/109194.yaml new file mode 100644 index 0000000000000..bf50139547f62 --- /dev/null +++ b/docs/changelog/109194.yaml @@ -0,0 +1,5 @@ +pr: 109194 +summary: "[Inference API] Add Mistral Embeddings Support to Inference API" +area: Machine Learning +type: enhancement +issues: [ ] diff --git a/docs/changelog/109204.yaml b/docs/changelog/109204.yaml new file mode 100644 index 0000000000000..b5b22ef1a06f9 --- /dev/null +++ b/docs/changelog/109204.yaml @@ -0,0 +1,5 @@ +pr: 109204 +summary: Detect long-running tasks on network threads +area: Network +type: enhancement +issues: [] diff --git a/docs/changelog/109220.yaml b/docs/changelog/109220.yaml new file mode 100644 index 0000000000000..b8efa8f784d7a --- /dev/null +++ b/docs/changelog/109220.yaml @@ -0,0 +1,5 @@ +pr: 109220 +summary: "ESQL: add REPEAT string function" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/109233.yaml b/docs/changelog/109233.yaml new file mode 100644 index 0000000000000..36010273c80db --- /dev/null +++ b/docs/changelog/109233.yaml @@ -0,0 +1,5 @@ +pr: 109233 +summary: Fix trappy timeouts in security settings APIs +area: Security +type: bug +issues: [] diff --git a/docs/changelog/109276.yaml b/docs/changelog/109276.yaml new file mode 100644 index 0000000000000..d73e68e3c8f7b --- /dev/null +++ b/docs/changelog/109276.yaml @@ -0,0 +1,5 @@ +pr: 109276 +summary: Add remove index setting command +area: Infra/Settings +type: enhancement +issues: [] diff --git a/docs/changelog/109312.yaml b/docs/changelog/109312.yaml new file mode 100644 index 0000000000000..594d3f90e8fd1 --- /dev/null +++ b/docs/changelog/109312.yaml @@ -0,0 +1,5 @@ +pr: 109312 +summary: Enable fallback synthetic source for `point` and `shape` +area: Mapping +type: feature +issues: [] diff --git a/docs/changelog/109320.yaml b/docs/changelog/109320.yaml new file mode 100644 index 0000000000000..84aff5b1d769d --- /dev/null +++ b/docs/changelog/109320.yaml @@ -0,0 +1,5 @@ +pr: 109320 +summary: Reset retryable index requests after failures +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/109332.yaml b/docs/changelog/109332.yaml new file mode 100644 index 0000000000000..3d03523fd518b --- /dev/null +++ b/docs/changelog/109332.yaml @@ -0,0 +1,5 @@ +pr: 109332 +summary: "ES|QL: vectorize eval" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/109357.yaml b/docs/changelog/109357.yaml new file mode 100644 index 0000000000000..17951882103b3 --- /dev/null +++ b/docs/changelog/109357.yaml @@ -0,0 +1,5 @@ +pr: 109357 +summary: Fix task cancellation authz on fulfilling cluster +area: Authorization +type: bug +issues: [] diff --git a/docs/changelog/109358.yaml b/docs/changelog/109358.yaml new file mode 100644 index 0000000000000..af47b4129d874 --- /dev/null +++ b/docs/changelog/109358.yaml @@ -0,0 +1,5 @@ +pr: 109358 +summary: Use the multi node routing action for internal inference services +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/109370.yaml b/docs/changelog/109370.yaml new file mode 100644 index 0000000000000..32b190d1a1c94 --- /dev/null +++ b/docs/changelog/109370.yaml @@ -0,0 +1,6 @@ +pr: 109370 +summary: Enable fallback synthetic source by default +area: Mapping +type: feature +issues: + - 106460 diff --git a/docs/changelog/109384.yaml b/docs/changelog/109384.yaml new file mode 100644 index 0000000000000..303da23d57d8e --- /dev/null +++ b/docs/changelog/109384.yaml @@ -0,0 +1,5 @@ +pr: 109384 +summary: Fix serialising inference delete response +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/109423.yaml b/docs/changelog/109423.yaml new file mode 100644 index 0000000000000..5f594ea482338 --- /dev/null +++ b/docs/changelog/109423.yaml @@ -0,0 +1,5 @@ +pr: 109423 +summary: Correct how hex strings are handled when dynamically updating vector dims +area: Vector Search +type: bug +issues: [] diff --git a/docs/changelog/109440.yaml b/docs/changelog/109440.yaml new file mode 100644 index 0000000000000..c1e9aef8110fc --- /dev/null +++ b/docs/changelog/109440.yaml @@ -0,0 +1,5 @@ +pr: 109440 +summary: Fix task cancellation on remote cluster when original request fails +area: Network +type: bug +issues: [] diff --git a/docs/changelog/109449.yaml b/docs/changelog/109449.yaml new file mode 100644 index 0000000000000..90cb908227f1b --- /dev/null +++ b/docs/changelog/109449.yaml @@ -0,0 +1,6 @@ +pr: 109449 +summary: Reset max page size to settings value +area: Transform +type: bug +issues: + - 109308 diff --git a/docs/changelog/109470.yaml b/docs/changelog/109470.yaml new file mode 100644 index 0000000000000..837c1664b775a --- /dev/null +++ b/docs/changelog/109470.yaml @@ -0,0 +1,5 @@ +pr: 109470 +summary: Enabling profiling for `RankBuilders` and adding tests for RRF +area: Ranking +type: enhancement +issues: [] diff --git a/docs/changelog/109481.yaml b/docs/changelog/109481.yaml new file mode 100644 index 0000000000000..e8251788a90bd --- /dev/null +++ b/docs/changelog/109481.yaml @@ -0,0 +1,5 @@ +pr: 109481 +summary: Fork freeing search/scroll contexts to GENERIC pool +area: Search +type: bug +issues: [] diff --git a/docs/changelog/97072.yaml b/docs/changelog/97072.yaml deleted file mode 100644 index 686b30952b646..0000000000000 --- a/docs/changelog/97072.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97072 -summary: Log when update AffixSetting using addAffixMapUpdateConsumer -area: Infra/Logging -type: bug -issues: [] diff --git a/docs/changelog/97561.yaml b/docs/changelog/97561.yaml deleted file mode 100644 index cacefbf7e4ca3..0000000000000 --- a/docs/changelog/97561.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 97561 -summary: Add index forecasts to /_cat/allocation output -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/99048.yaml b/docs/changelog/99048.yaml deleted file mode 100644 index 722c145dae78f..0000000000000 --- a/docs/changelog/99048.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 99048 -summary: String sha512() painless function -area: Infra/Scripting -type: enhancement -issues: - - 97691 diff --git a/docs/painless/painless-guide/painless-execute-script.asciidoc b/docs/painless/painless-guide/painless-execute-script.asciidoc index 1c7ec61ffa75b..4417daeb63efa 100644 --- a/docs/painless/painless-guide/painless-execute-script.asciidoc +++ b/docs/painless/painless-guide/painless-execute-script.asciidoc @@ -123,6 +123,10 @@ alias. For example, `remote1:my_index` indicates that you want to execute the painless script against the "my_index" index on the "remote1" cluster. This request will be forwarded to the "remote1" cluster if you have {ref}/remote-clusters-connect.html[configured a connection] to that remote cluster. + +NOTE: Wildcards are not accepted in the index expression for this endpoint. The +expression `*:myindex` will return the error "No such remote cluster" and the +expression `logs*` or `remote1:logs*` will return the error "index not found". ==== `params`:: (`Map`, read-only) diff --git a/docs/reference/commands/node-tool.asciidoc b/docs/reference/commands/node-tool.asciidoc index 810de4a71fffb..cdd2bb8f0f9d7 100644 --- a/docs/reference/commands/node-tool.asciidoc +++ b/docs/reference/commands/node-tool.asciidoc @@ -31,6 +31,10 @@ This tool has a number of modes: from the cluster state in case where it contains incompatible settings that prevent the cluster from forming. +* `elasticsearch-node remove-index-settings` can be used to remove index settings + from the cluster state in case where it contains incompatible index settings that + prevent the cluster from forming. + * `elasticsearch-node remove-customs` can be used to remove custom metadata from the cluster state in case where it contains broken metadata that prevents the cluster state from being loaded. @@ -107,6 +111,26 @@ The intended use is: * Repeat for all other master-eligible nodes * Start the nodes +[discrete] +==== Removing index settings + +There may be situations where an index contains index settings +that prevent the cluster from forming. Since the cluster cannot form, +it is not possible to remove these settings using the +<> API. + +The `elasticsearch-node remove-index-settings` tool allows you to forcefully remove +those index settings from the on-disk cluster state. The tool takes a +list of index settings as parameters that should be removed, and also supports +wildcard patterns. + +The intended use is: + +* Stop the node +* Run `elasticsearch-node remove-index-settings name-of-index-setting-to-remove` on the node +* Repeat for all nodes +* Start the nodes + [discrete] ==== Removing custom metadata from the cluster state @@ -436,6 +460,37 @@ You can also use wildcards to remove multiple settings, for example using node$ ./bin/elasticsearch-node remove-settings xpack.monitoring.* ---- +[discrete] +==== Removing index settings + +If your indices contain index settings that prevent the cluster +from forming, you can run the following command to remove one +or more index settings. + +[source,txt] +---- +node$ ./bin/elasticsearch-node remove-index-settings index.my_plugin.foo + + WARNING: Elasticsearch MUST be stopped before running this tool. + +You should only run this tool if you have incompatible index settings in the +cluster state that prevent the cluster from forming. +This tool can cause data loss and its use should be your last resort. + +Do you want to proceed? + +Confirm [y/N] y + +Index settings were successfully removed from the cluster state +---- + +You can also use wildcards to remove multiple index settings, for example using + +[source,txt] +---- +node$ ./bin/elasticsearch-node remove-index-settings index.my_plugin.* +---- + [discrete] ==== Removing custom metadata from the cluster state diff --git a/docs/reference/connector/apis/connector-apis.asciidoc b/docs/reference/connector/apis/connector-apis.asciidoc index 9d23fcf8d336e..41186ff6326f2 100644 --- a/docs/reference/connector/apis/connector-apis.asciidoc +++ b/docs/reference/connector/apis/connector-apis.asciidoc @@ -40,6 +40,8 @@ beta:[] beta:[] * <> beta:[] +* <> +beta:[] * <> beta:[] * <> @@ -120,6 +122,7 @@ include::list-connectors-api.asciidoc[] include::update-connector-api-key-id-api.asciidoc[] include::update-connector-configuration-api.asciidoc[] include::update-connector-index-name-api.asciidoc[] +include::update-connector-features-api.asciidoc[] include::update-connector-filtering-api.asciidoc[] include::update-connector-name-description-api.asciidoc[] include::update-connector-pipeline-api.asciidoc[] diff --git a/docs/reference/connector/apis/update-connector-features-api.asciidoc b/docs/reference/connector/apis/update-connector-features-api.asciidoc new file mode 100644 index 0000000000000..0d3457b9bd584 --- /dev/null +++ b/docs/reference/connector/apis/update-connector-features-api.asciidoc @@ -0,0 +1,138 @@ +[[update-connector-features-api]] +=== Update connector features API +++++ +Update connector features +++++ + +beta::[] + +Manages the `features` of a connector. This endpoint can be used to control the following aspects of a connector: + +* document-level security +* incremental syncs +* advanced sync rules +* basic sync rules + +Normally, the running connector service automatically manages these features. However, you can use this API to override the default behavior. + +To get started with Connector APIs, check out the {enterprise-search-ref}/connectors-tutorial-api.html[tutorial^]. + +[[update-connector-features-api-request]] +==== {api-request-title} + +`PUT _connector//_features` + +[[update-connector-features-api-prereq]] +==== {api-prereq-title} + +* To sync data using self-managed connectors, you need to deploy the {enterprise-search-ref}/build-connector.html[Elastic connector service] on your own infrastructure. This service runs automatically on Elastic Cloud for native connectors. +* The `connector_id` parameter should reference an existing connector. + +[[update-connector-features-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[update-connector-features-api-request-body]] +==== {api-request-body-title} + +`features`:: +(Required, object) An object containing connector features. + +* `document_level_security` (Optional, object) Controls whether document-level security is enabled with the `enabled` flag. +* `incremental_sync` (Optional, object) Controls whether incremental syncs are enabled with the `enabled` flag. +* `native_connector_api_keys`(Optional, object) Controls whether native connector API keys are enabled with the `enabled` flag. +* `sync_rules` (Optional, object) Controls sync rules. +** `advanced` (Optional, object) Controls whether advanced sync rules are enabled with the `enabled` flag. +** `basic`(Optional, object) Controls whether basic sync rules are enabled with the `enabled` flag. + + + +[[update-connector-features-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector `features` was successfully updated. + +`400`:: +The `connector_id` was not provided or the request payload was malformed. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[update-connector-features-api-example]] +==== {api-examples-title} + +The following example updates the `features` field for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_features +{ + "features": { + "document_level_security": { + "enabled": true + }, + "incremental_sync": { + "enabled": true + }, + "sync_rules": { + "advanced": { + "enabled": false + }, + "basic": { + "enabled": true + } + } + } +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- + +The endpoint supports partial updates of the `features` field. For example, to update only the `document_level_security` feature, you can send the following request: + +[source,console] +---- +PUT _connector/my-connector/_features +{ + "features": { + "document_level_security": { + "enabled": true + } + } +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/docs/reference/data-streams/tsds.asciidoc b/docs/reference/data-streams/tsds.asciidoc index e943a25f1fb05..2e81e5b7e3848 100644 --- a/docs/reference/data-streams/tsds.asciidoc +++ b/docs/reference/data-streams/tsds.asciidoc @@ -285,7 +285,7 @@ field values that are older than 2 hours (the `index.look_back_time` default). A TSDS is designed to ingest current metrics data. When the TSDS is first created the initial backing index has: -* an `index.time_series.start_time` value set to `now - index.look_ahead_time` +* an `index.time_series.start_time` value set to `now - index.look_back_time` * an `index.time_series.end_time` value set to `now + index.look_ahead_time` Only data that falls inside that range can be indexed. diff --git a/docs/reference/esql/esql-commands.asciidoc b/docs/reference/esql/esql-commands.asciidoc index 1f07361b89aac..bed79299b1cc1 100644 --- a/docs/reference/esql/esql-commands.asciidoc +++ b/docs/reference/esql/esql-commands.asciidoc @@ -39,6 +39,9 @@ image::images/esql/processing-command.svg[A processing command changing an input * <> * <> * <> +ifeval::["{release-state}"=="unreleased"] +* experimental:[] <> +endif::[] * experimental:[] <> * <> * <> @@ -58,6 +61,9 @@ include::processing-commands/eval.asciidoc[] include::processing-commands/grok.asciidoc[] include::processing-commands/keep.asciidoc[] include::processing-commands/limit.asciidoc[] +ifeval::["{release-state}"=="unreleased"] +include::processing-commands/lookup.asciidoc[] +endif::[] include::processing-commands/mv_expand.asciidoc[] include::processing-commands/rename.asciidoc[] include::processing-commands/sort.asciidoc[] diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index fbac57d88118e..2cdd97ceab176 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -78,6 +78,11 @@ For syntax, refer to <>. `query`:: (Required, string) {esql} query to run. For syntax, refer to <>. +ifeval::["{release-state}"=="unreleased"] +`table`:: +(Optional, object) Named "table" parameters that can be referenced by the <> command. +endif::[] + [discrete] [role="child_attributes"] [[esql-query-api-response-body]] diff --git a/docs/reference/esql/functions/description/mv_append.asciidoc b/docs/reference/esql/functions/description/mv_append.asciidoc new file mode 100644 index 0000000000000..26b549713e301 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_append.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Concatenates values of two multi-value fields. diff --git a/docs/reference/esql/functions/description/repeat.asciidoc b/docs/reference/esql/functions/description/repeat.asciidoc new file mode 100644 index 0000000000000..e008eca90e9e4 --- /dev/null +++ b/docs/reference/esql/functions/description/repeat.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a string constructed by concatenating `string` with itself the specified `number` of times. diff --git a/docs/reference/esql/functions/examples/repeat.asciidoc b/docs/reference/esql/functions/examples/repeat.asciidoc new file mode 100644 index 0000000000000..97bede2517f10 --- /dev/null +++ b/docs/reference/esql/functions/examples/repeat.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=repeat] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=repeat-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index d9659fa03e809..f00f471e63ecc 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -261,6 +261,24 @@ ], "variadic" : true, "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "version", + "optional" : false, + "description" : "Expression to evaluate." + }, + { + "name" : "rest", + "type" : "version", + "optional" : true, + "description" : "Other expression to evaluate." + } + ], + "variadic" : true, + "returnType" : "version" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_append.json b/docs/reference/esql/functions/kibana/definition/mv_append.json new file mode 100644 index 0000000000000..8ee4e7297cc3a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_append.json @@ -0,0 +1,242 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_append", + "description" : "Concatenates values of two multi-value fields.", + "signatures" : [ + { + "params" : [ + { + "name" : "field1", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "geo_point", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "geo_shape", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "ip", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field1", + "type" : "version", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json index c8d221f933de1..7ab287bc94d34 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -17,6 +17,30 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, { "params" : [ { @@ -41,6 +65,30 @@ "variadic" : false, "returnType" : "double" }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_zip.json b/docs/reference/esql/functions/kibana/definition/mv_zip.json index 94c13f14c43e5..b6e5c86a3f0b8 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_zip.json +++ b/docs/reference/esql/functions/kibana/definition/mv_zip.json @@ -17,6 +17,222 @@ "type" : "keyword", "optional" : false, "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : true, + "description" : "Delimiter. Optional; if omitted, `,` is used as a default delimiter." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "delim", + "type" : "text", + "optional" : true, + "description" : "Delimiter. Optional; if omitted, `,` is used as a default delimiter." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : true, + "description" : "Delimiter. Optional; if omitted, `,` is used as a default delimiter." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "delim", + "type" : "text", + "optional" : true, + "description" : "Delimiter. Optional; if omitted, `,` is used as a default delimiter." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : true, + "description" : "Delimiter. Optional; if omitted, `,` is used as a default delimiter." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "delim", + "type" : "text", + "optional" : true, + "description" : "Delimiter. Optional; if omitted, `,` is used as a default delimiter." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "Multivalue expression." + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "Multivalue expression." }, { "name" : "delim", diff --git a/docs/reference/esql/functions/kibana/definition/repeat.json b/docs/reference/esql/functions/kibana/definition/repeat.json new file mode 100644 index 0000000000000..2ac94e12c1796 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/repeat.json @@ -0,0 +1,47 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "repeat", + "description" : "Returns a string constructed by concatenating `string` with itself the specified `number` of times.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "String expression." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Number times to repeat." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "String expression." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Number times to repeat." + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ], + "examples" : [ + "ROW a = \"Hello!\"\n| EVAL triple_a = REPEAT(a, 3);" + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/mv_append.md b/docs/reference/esql/functions/kibana/docs/mv_append.md new file mode 100644 index 0000000000000..36b285be1877c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_append.md @@ -0,0 +1,7 @@ + + +### MV_APPEND +Concatenates values of two multi-value fields. + diff --git a/docs/reference/esql/functions/kibana/docs/repeat.md b/docs/reference/esql/functions/kibana/docs/repeat.md new file mode 100644 index 0000000000000..cc46e8282d9fe --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/repeat.md @@ -0,0 +1,11 @@ + + +### REPEAT +Returns a string constructed by concatenating `string` with itself the specified `number` of times. + +``` +ROW a = "Hello!" +| EVAL triple_a = REPEAT(a, 3); +``` diff --git a/docs/reference/esql/functions/layout/mv_append.asciidoc b/docs/reference/esql/functions/layout/mv_append.asciidoc new file mode 100644 index 0000000000000..4d4dbd7a24f9d --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_append.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-mv_append]] +=== `MV_APPEND` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_append.svg[Embedded,opts=inline] + +include::../parameters/mv_append.asciidoc[] +include::../description/mv_append.asciidoc[] +include::../types/mv_append.asciidoc[] diff --git a/docs/reference/esql/functions/layout/repeat.asciidoc b/docs/reference/esql/functions/layout/repeat.asciidoc new file mode 100644 index 0000000000000..c001b22260485 --- /dev/null +++ b/docs/reference/esql/functions/layout/repeat.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-repeat]] +=== `REPEAT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/repeat.svg[Embedded,opts=inline] + +include::../parameters/repeat.asciidoc[] +include::../description/repeat.asciidoc[] +include::../types/repeat.asciidoc[] +include::../examples/repeat.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/mv_append.asciidoc b/docs/reference/esql/functions/parameters/mv_append.asciidoc new file mode 100644 index 0000000000000..e08d697c25098 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_append.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field1`:: + + +`field2`:: + diff --git a/docs/reference/esql/functions/parameters/repeat.asciidoc b/docs/reference/esql/functions/parameters/repeat.asciidoc new file mode 100644 index 0000000000000..263191340f5d9 --- /dev/null +++ b/docs/reference/esql/functions/parameters/repeat.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`string`:: +String expression. + +`number`:: +Number times to repeat. diff --git a/docs/reference/esql/functions/signature/mv_append.svg b/docs/reference/esql/functions/signature/mv_append.svg new file mode 100644 index 0000000000000..0f45435425c65 --- /dev/null +++ b/docs/reference/esql/functions/signature/mv_append.svg @@ -0,0 +1 @@ +MV_APPEND(field1,field2) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/repeat.svg b/docs/reference/esql/functions/signature/repeat.svg new file mode 100644 index 0000000000000..591c20d567845 --- /dev/null +++ b/docs/reference/esql/functions/signature/repeat.svg @@ -0,0 +1 @@ +REPEAT(string,number) \ No newline at end of file diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index bfe7f1f0fecd8..d4b120ad1c45b 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -15,6 +15,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -34,6 +35,7 @@ include::layout/left.asciidoc[] include::layout/length.asciidoc[] include::layout/locate.asciidoc[] include::layout/ltrim.asciidoc[] +include::layout/repeat.asciidoc[] include::layout/replace.asciidoc[] include::layout/right.asciidoc[] include::layout/rtrim.asciidoc[] diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index a5d8f85aa564e..841d836f6837e 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -21,4 +21,5 @@ long | long | long long | | long text | text | text text | | text +version | version | version |=== diff --git a/docs/reference/esql/functions/types/mv_append.asciidoc b/docs/reference/esql/functions/types/mv_append.asciidoc new file mode 100644 index 0000000000000..49dcef6dc8860 --- /dev/null +++ b/docs/reference/esql/functions/types/mv_append.asciidoc @@ -0,0 +1,21 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field1 | field2 | result +boolean | boolean | boolean +cartesian_point | cartesian_point | cartesian_point +cartesian_shape | cartesian_shape | cartesian_shape +datetime | datetime | datetime +double | double | double +geo_point | geo_point | geo_point +geo_shape | geo_shape | geo_shape +integer | integer | integer +ip | ip | ip +keyword | keyword | keyword +long | long | long +text | text | text +version | version | version +|=== diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index 705745d76dbab..a6b78f781f17a 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -6,8 +6,12 @@ |=== field | result boolean | boolean +cartesian_point | cartesian_point +cartesian_shape | cartesian_shape datetime | datetime double | double +geo_point | geo_point +geo_shape | geo_shape integer | integer ip | ip keyword | keyword diff --git a/docs/reference/esql/functions/types/mv_zip.asciidoc b/docs/reference/esql/functions/types/mv_zip.asciidoc index 514041202a1d5..5e3e1b57d6a55 100644 --- a/docs/reference/esql/functions/types/mv_zip.asciidoc +++ b/docs/reference/esql/functions/types/mv_zip.asciidoc @@ -6,5 +6,15 @@ |=== string1 | string2 | delim | result keyword | keyword | keyword | keyword +keyword | keyword | text | keyword +keyword | keyword | | keyword +keyword | text | keyword | keyword +keyword | text | text | keyword +keyword | text | | keyword +text | keyword | keyword | keyword +text | keyword | text | keyword +text | keyword | | keyword +text | text | keyword | keyword text | text | text | keyword +text | text | | keyword |=== diff --git a/docs/reference/esql/functions/types/repeat.asciidoc b/docs/reference/esql/functions/types/repeat.asciidoc new file mode 100644 index 0000000000000..49e4e80094d7b --- /dev/null +++ b/docs/reference/esql/functions/types/repeat.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +string | number | result +keyword | integer | keyword +text | integer | keyword +|=== diff --git a/docs/reference/esql/processing-commands/lookup.asciidoc b/docs/reference/esql/processing-commands/lookup.asciidoc new file mode 100644 index 0000000000000..5507c6cc29d1b --- /dev/null +++ b/docs/reference/esql/processing-commands/lookup.asciidoc @@ -0,0 +1,65 @@ +[discrete] +[[esql-lookup]] +=== `LOOKUP` + +experimental::["LOOKUP is a highly experimental and only available in SNAPSHOT versions."] + +**Syntax** + +[source,esql] +---- +LOOKUP table ON match_field1[, match_field2, ...] +---- + +*Parameters* + +`table`:: +The name of the `table` provided in the request to match. + +`match_field`:: +The fields in the input to match against the table. + +*Description* + +`LOOKUP` matches values from the input against a `table` provided in the request, +adding the other fields from the `table` to the output. + +*Examples* + +// tag::examples[] +[source,console] +---- +POST /_query?format=txt +{ + "query": """ + FROM library + | SORT page_count DESC + | KEEP name, author + | LOOKUP era ON author + | LIMIT 5 + """, + "tables": { + "era": { + "author:keyword": ["Frank Herbert", "Peter F. Hamilton", "Vernor Vinge", "Alastair Reynolds", "James S.A. Corey"], + "era:keyword" : [ "The New Wave", "Diamond", "Diamond", "Diamond", "Hadron"] + } + } +} +---- +// TEST[setup:library] + +Which returns: + +[source,text] +---- + name | author | era +--------------------+-----------------+--------------- +Pandora's Star |Peter F. Hamilton|Diamond +A Fire Upon the Deep|Vernor Vinge |Diamond +Dune |Frank Herbert |The New Wave +Revelation Space |Alastair Reynolds|Diamond +Leviathan Wakes |James S.A. Corey |Hadron +---- +// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/] +// TESTRESPONSE[non_json] +// end::examples[] diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index e826956440497..40b4ff4bb9dc8 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -304,7 +304,7 @@ are ignored for this index. [[index-max-regex-length]] `index.max_regex_length`:: - The maximum length of regex that can be used in Regexp Query. + The maximum length of value that can be used in `regexp` or `prefix` query. Defaults to `1000`. [[index-query-default-field]] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 354cee3f6a990..5060e47447f03 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -7,21 +7,17 @@ experimental[] Creates an {infer} endpoint to perform an {infer} task. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure -OpenAI, Google AI Studio or Hugging Face. For built-in models and models -uploaded though Eland, the {infer} APIs offer an alternative way to use and -manage trained models. However, if you do not plan to use the {infer} APIs to -use these models or if you want to use non-NLP models, use the +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, Azure OpenAI, Google AI Studio or Hugging Face. +For built-in models and models uploaded though Eland, the {infer} APIs offer an alternative way to use and manage trained models. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. - [discrete] [[put-inference-api-request]] ==== {api-request-title} `PUT /_inference//` - [discrete] [[put-inference-api-prereqs]] ==== {api-prereq-title} @@ -29,7 +25,6 @@ use these models or if you want to use non-NLP models, use the * Requires the `manage_inference` <> (the built-in `inference_admin` role grants this privilege) - [discrete] [[put-inference-api-desc]] ==== {api-description-title} @@ -46,27 +41,26 @@ The following services are available through the {infer} API: * ELSER * Google AI Studio * Hugging Face +* Mistral * OpenAI - [discrete] [[put-inference-api-path-params]] ==== {api-path-parms-title} - ``:: (Required, string) The unique identifier of the {infer} endpoint. ``:: (Required, string) -The type of the {infer} task that the model will perform. Available task types: +The type of the {infer} task that the model will perform. +Available task types: * `completion`, * `rerank`, * `sparse_embedding`, * `text_embedding`. - [discrete] [[put-inference-api-request-body]] ==== {api-request-body-title} @@ -78,21 +72,19 @@ Available services: * `azureopenai`: specify the `completion` or `text_embedding` task type to use the Azure OpenAI service. * `azureaistudio`: specify the `completion` or `text_embedding` task type to use the Azure AI Studio service. -* `cohere`: specify the `completion`, `text_embedding` or the `rerank` task type to use the -Cohere service. -* `elasticsearch`: specify the `text_embedding` task type to use the E5 -built-in model or text embedding models uploaded by Eland. +* `cohere`: specify the `completion`, `text_embedding` or the `rerank` task type to use the Cohere service. +* `elasticsearch`: specify the `text_embedding` task type to use the E5 built-in model or text embedding models uploaded by Eland. * `elser`: specify the `sparse_embedding` task type to use the ELSER service. -* `googleaistudio`: specify the `completion` task to use the Google AI Studio service. -* `hugging_face`: specify the `text_embedding` task type to use the Hugging Face -service. -* `openai`: specify the `completion` or `text_embedding` task type to use the -OpenAI service. +* `googleaistudio`: specify the `completion` or `text_embeddig` task to use the Google AI Studio service. +* `hugging_face`: specify the `text_embedding` task type to use the Hugging Face service. +* `mistral`: specify the `text_embedding` task type to use the Mistral service. +* `openai`: specify the `completion` or `text_embedding` task type to use the OpenAI service. `service_settings`:: (Required, object) -Settings used to install the {infer} model. These settings are specific to the +Settings used to install the {infer} model. +These settings are specific to the `service` you specified. + .`service_settings` for the `azureaistudio` service @@ -104,11 +96,10 @@ Settings used to install the {infer} model. These settings are specific to the A valid API key of your Azure AI Studio model deployment. This key can be found on the overview page for your deployment in the management section of your https://ai.azure.com/[Azure AI Studio] account. -IMPORTANT: You need to provide the API key only once, during the {infer} model -creation. The <> does not retrieve your API key. After -creating the {infer} model, you cannot change the associated API key. If you -want to use a different API key, delete the {infer} model and recreate it with -the same name and the updated API key. +IMPORTANT: You need to provide the API key only once, during the {infer} model creation. +The <> does not retrieve your API key. +After creating the {infer} model, you cannot change the associated API key. +If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. `target`::: (Required, string) @@ -142,11 +133,13 @@ For "real-time" endpoints which are billed per hour of usage, specify `realtime` By default, the `azureaistudio` service sets the number of requests allowed per minute to `240`. This helps to minimize the number of rate limit errors returned from Azure AI Studio. To modify this, set the `requests_per_minute` setting of this object in your service settings: -``` ++ +[source,text] +---- "rate_limit": { "requests_per_minute": <> } -``` +---- ===== + .`service_settings` for the `azureopenai` service @@ -181,6 +174,22 @@ Your Azure OpenAI deployments can be found though the https://oai.azure.com/[Azu The Azure API version ID to use. We recommend using the https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#embeddings[latest supported non-preview version]. +`rate_limit`::: +(Optional, object) +The `azureopenai` service sets a default number of requests allowed per minute depending on the task type. +For `text_embedding` it is set to `1440`. +For `completion` it is set to `120`. +This helps to minimize the number of rate limit errors returned from Azure. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +[source,text] +---- +"rate_limit": { + "requests_per_minute": <> +} +---- ++ +More information about the rate limits for Azure can be found in the https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits[Quota limits docs] and https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota?tabs=rest[How to change the quotas]. ===== + .`service_settings` for the `cohere` service @@ -188,24 +197,24 @@ We recommend using the https://learn.microsoft.com/en-us/azure/ai-services/opena ===== `api_key`::: (Required, string) -A valid API key of your Cohere account. You can find your Cohere API keys or you -can create a new one +A valid API key of your Cohere account. +You can find your Cohere API keys or you can create a new one https://dashboard.cohere.com/api-keys[on the API keys settings page]. -IMPORTANT: You need to provide the API key only once, during the {infer} model -creation. The <> does not retrieve your API key. After -creating the {infer} model, you cannot change the associated API key. If you -want to use a different API key, delete the {infer} model and recreate it with -the same name and the updated API key. +IMPORTANT: You need to provide the API key only once, during the {infer} model creation. +The <> does not retrieve your API key. +After creating the {infer} model, you cannot change the associated API key. +If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. `embedding_type`:: (Optional, string) -Only for `text_embedding`. Specifies the types of embeddings you want to get -back. Defaults to `float`. +Only for `text_embedding`. +Specifies the types of embeddings you want to get back. +Defaults to `float`. Valid values are: - * `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). - * `float`: use it for the default float embeddings. - * `int8`: use it for signed int8 embeddings. +* `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). +* `float`: use it for the default float embeddings. +* `int8`: use it for signed int8 embeddings. `model_id`:: (Optional, string) @@ -214,50 +223,68 @@ To review the available `rerank` models, refer to the https://docs.cohere.com/reference/rerank-1[Cohere docs]. To review the available `text_embedding` models, refer to the -https://docs.cohere.com/reference/embed[Cohere docs]. The default value for +https://docs.cohere.com/reference/embed[Cohere docs]. +The default value for `text_embedding` is `embed-english-v2.0`. + +`rate_limit`::: +(Optional, object) +By default, the `cohere` service sets the number of requests allowed per minute to `10000`. +This value is the same for all task types. +This helps to minimize the number of rate limit errors returned from Cohere. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +[source,text] +---- +"rate_limit": { + "requests_per_minute": <> +} +---- ++ +More information about Cohere's rate limits can be found in https://docs.cohere.com/docs/going-live#production-key-specifications[Cohere's production key docs]. + ===== + .`service_settings` for the `elasticsearch` service [%collapsible%closed] ===== + `model_id`::: (Required, string) -The name of the model to use for the {infer} task. It can be the -ID of either a built-in model (for example, `.multilingual-e5-small` for E5) or -a text embedding model already +The name of the model to use for the {infer} task. +It can be the ID of either a built-in model (for example, `.multilingual-e5-small` for E5) or a text embedding model already {ml-docs}/ml-nlp-import-model.html#ml-nlp-import-script[uploaded through Eland]. `num_allocations`::: (Required, integer) -The number of model allocations to create. `num_allocations` must not exceed the -number of available processors per node divided by the `num_threads`. +The number of model allocations to create. `num_allocations` must not exceed the number of available processors per node divided by the `num_threads`. `num_threads`::: (Required, integer) -The number of threads to use by each model allocation. `num_threads` must not -exceed the number of available processors per node divided by the number of -allocations. Must be a power of 2. Max allowed value is 32. +The number of threads to use by each model allocation. `num_threads` must not exceed the number of available processors per node divided by the number of allocations. +Must be a power of 2. Max allowed value is 32. + ===== + .`service_settings` for the `elser` service [%collapsible%closed] ===== + `num_allocations`::: (Required, integer) -The number of model allocations to create. `num_allocations` must not exceed the -number of available processors per node divided by the `num_threads`. +The number of model allocations to create. `num_allocations` must not exceed the number of available processors per node divided by the `num_threads`. `num_threads`::: (Required, integer) -The number of threads to use by each model allocation. `num_threads` must not -exceed the number of available processors per node divided by the number of -allocations. Must be a power of 2. Max allowed value is 32. +The number of threads to use by each model allocation. `num_threads` must not exceed the number of available processors per node divided by the number of allocations. +Must be a power of 2. Max allowed value is 32. + ===== + .`service_settings` for the `googleiastudio` service [%collapsible%closed] ===== + `api_key`::: (Required, string) A valid API key for the Google Gemini API. @@ -274,76 +301,148 @@ This helps to minimize the number of rate limit errors returned from Google AI S To modify this, set the `requests_per_minute` setting of this object in your service settings: + -- -``` +[source,text] +---- "rate_limit": { "requests_per_minute": <> } -``` +---- -- + ===== + .`service_settings` for the `hugging_face` service [%collapsible%closed] ===== + `api_key`::: (Required, string) -A valid access token of your Hugging Face account. You can find your Hugging -Face access tokens or you can create a new one +A valid access token of your Hugging Face account. +You can find your Hugging Face access tokens or you can create a new one https://huggingface.co/settings/tokens[on the settings page]. -IMPORTANT: You need to provide the API key only once, during the {infer} model -creation. The <> does not retrieve your API key. After -creating the {infer} model, you cannot change the associated API key. If you -want to use a different API key, delete the {infer} model and recreate it with -the same name and the updated API key. +IMPORTANT: You need to provide the API key only once, during the {infer} model creation. +The <> does not retrieve your API key. +After creating the {infer} model, you cannot change the associated API key. +If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. `url`::: (Required, string) The URL endpoint to use for the requests. + +`rate_limit`::: +(Optional, object) +By default, the `huggingface` service sets the number of requests allowed per minute to `3000`. +This helps to minimize the number of rate limit errors returned from Hugging Face. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +[source,text] +---- +"rate_limit": { + "requests_per_minute": <> +} +---- + +===== ++ +.`service_settings` for the `mistral` service +[%collapsible%closed] +===== + +`api_key`::: +(Required, string) +A valid API key for your Mistral account. +You can find your Mistral API keys or you can create a new one +https://console.mistral.ai/api-keys/[on the API Keys page]. + +`model`::: +(Required, string) +The name of the model to use for the {infer} task. +Refer to the https://docs.mistral.ai/getting-started/models/[Mistral models documentation] +for the list of available text embedding models. + +`max_input_tokens`::: +(Optional, integer) +Allows you to specify the maximum number of tokens per input before chunking occurs. + +`rate_limit`::: +(Optional, object) +By default, the `mistral` service sets the number of requests allowed per minute to `240`. +This helps to minimize the number of rate limit errors returned from the Mistral API. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +[source,text] +---- +"rate_limit": { + "requests_per_minute": <> +} +---- + ===== + .`service_settings` for the `openai` service [%collapsible%closed] ===== + `api_key`::: (Required, string) -A valid API key of your OpenAI account. You can find your OpenAI API keys in -your OpenAI account under the +A valid API key of your OpenAI account. +You can find your OpenAI API keys in your OpenAI account under the https://platform.openai.com/api-keys[API keys section]. -IMPORTANT: You need to provide the API key only once, during the {infer} model -creation. The <> does not retrieve your API key. After -creating the {infer} model, you cannot change the associated API key. If you -want to use a different API key, delete the {infer} model and recreate it with -the same name and the updated API key. +IMPORTANT: You need to provide the API key only once, during the {infer} model creation. +The <> does not retrieve your API key. +After creating the {infer} model, you cannot change the associated API key. +If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. `model_id`::: (Required, string) -The name of the model to use for the {infer} task. Refer to the +The name of the model to use for the {infer} task. +Refer to the https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] for the list of available text embedding models. `organization_id`::: (Optional, string) -The unique identifier of your organization. You can find the Organization ID in -your OpenAI account under +The unique identifier of your organization. +You can find the Organization ID in your OpenAI account under https://platform.openai.com/account/organization[**Settings** > **Organizations**]. `url`::: (Optional, string) -The URL endpoint to use for the requests. Can be changed for testing purposes. +The URL endpoint to use for the requests. +Can be changed for testing purposes. Defaults to `https://api.openai.com/v1/embeddings`. +`rate_limit`::: +(Optional, object) +The `openai` service sets a default number of requests allowed per minute depending on the task type. +For `text_embedding` it is set to `3000`. +For `completion` it is set to `500`. +This helps to minimize the number of rate limit errors returned from Azure. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +[source,text] +---- +"rate_limit": { + "requests_per_minute": <> +} +---- ++ +More information about the rate limits for OpenAI can be found in your https://platform.openai.com/account/limits[Account limits]. + ===== `task_settings`:: (Optional, object) -Settings to configure the {infer} task. These settings are specific to the +Settings to configure the {infer} task. +These settings are specific to the `` you specified. + .`task_settings` for the `completion` task type [%collapsible%closed] ===== + `do_sample`::: (Optional, float) For the `azureaistudio` service only. @@ -358,8 +457,8 @@ Defaults to 64. `user`::: (Optional, string) -For `openai` service only. Specifies the user issuing the request, which can be -used for abuse detection. +For `openai` service only. +Specifies the user issuing the request, which can be used for abuse detection. `temperature`::: (Optional, float) @@ -378,45 +477,46 @@ Should not be used if `temperature` is specified. .`task_settings` for the `rerank` task type [%collapsible%closed] ===== + `return_documents`:: (Optional, boolean) -For `cohere` service only. Specify whether to return doc text within the -results. +For `cohere` service only. +Specify whether to return doc text within the results. `top_n`:: (Optional, integer) -The number of most relevant documents to return, defaults to the number of the -documents. +The number of most relevant documents to return, defaults to the number of the documents. + ===== + .`task_settings` for the `text_embedding` task type [%collapsible%closed] ===== + `input_type`::: (Optional, string) -For `cohere` service only. Specifies the type of input passed to the model. +For `cohere` service only. +Specifies the type of input passed to the model. Valid values are: - * `classification`: use it for embeddings passed through a text classifier. - * `clusterning`: use it for the embeddings run through a clustering algorithm. - * `ingest`: use it for storing document embeddings in a vector database. - * `search`: use it for storing embeddings of search queries run against a - vector database to find relevant documents. +* `classification`: use it for embeddings passed through a text classifier. +* `clusterning`: use it for the embeddings run through a clustering algorithm. +* `ingest`: use it for storing document embeddings in a vector database. +* `search`: use it for storing embeddings of search queries run against a vector database to find relevant documents. `truncate`::: (Optional, string) -For `cohere` service only. Specifies how the API handles inputs longer than the -maximum token length. Defaults to `END`. Valid values are: - * `NONE`: when the input exceeds the maximum input token length an error is - returned. - * `START`: when the input exceeds the maximum input token length the start of - the input is discarded. - * `END`: when the input exceeds the maximum input token length the end of - the input is discarded. +For `cohere` service only. +Specifies how the API handles inputs longer than the maximum token length. +Defaults to `END`. +Valid values are: +* `NONE`: when the input exceeds the maximum input token length an error is returned. +* `START`: when the input exceeds the maximum input token length the start of the input is discarded. +* `END`: when the input exceeds the maximum input token length the end of the input is discarded. `user`::: (optional, string) -For `openai`, `azureopenai` and `azureaistudio` services only. Specifies the user issuing the -request, which can be used for abuse detection. +For `openai`, `azureopenai` and `azureaistudio` services only. +Specifies the user issuing the request, which can be used for abuse detection. ===== [discrete] @@ -470,7 +570,6 @@ PUT _inference/completion/azure_ai_studio_completion The list of chat completion models that you can choose from in your deployment can be found in the https://ai.azure.com/explore/models?selectedTask=chat-completion[Azure AI Studio model explorer]. - [discrete] [[inference-example-azureopenai]] ===== Azure OpenAI service @@ -519,7 +618,6 @@ The list of chat completion models that you can choose from in your Azure OpenAI * https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#gpt-4-and-gpt-4-turbo-models[GPT-4 and GPT-4 Turbo models] * https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#gpt-35[GPT-3.5] - [discrete] [[inference-example-cohere]] ===== Cohere service @@ -565,7 +663,6 @@ PUT _inference/rerank/cohere-rerank For more examples, also review the https://docs.cohere.com/docs/elasticsearch-and-cohere#rerank-search-results-with-cohere-and-elasticsearch[Cohere documentation]. - [discrete] [[inference-example-e5]] ===== E5 via the `elasticsearch` service @@ -586,10 +683,9 @@ PUT _inference/text_embedding/my-e5-model } ------------------------------------------------------------ // TEST[skip:TBD] -<1> The `model_id` must be the ID of one of the built-in E5 models. Valid values -are `.multilingual-e5-small` and `.multilingual-e5-small_linux-x86_64`. For -further details, refer to the {ml-docs}/ml-nlp-e5.html[E5 model documentation]. - +<1> The `model_id` must be the ID of one of the built-in E5 models. +Valid values are `.multilingual-e5-small` and `.multilingual-e5-small_linux-x86_64`. +For further details, refer to the {ml-docs}/ml-nlp-e5.html[E5 model documentation]. [discrete] [[inference-example-elser]] @@ -597,8 +693,7 @@ further details, refer to the {ml-docs}/ml-nlp-e5.html[E5 model documentation]. The following example shows how to create an {infer} endpoint called `my-elser-model` to perform a `sparse_embedding` task type. -Refer to the {ml-docs}/ml-nlp-elser.html[ELSER model documentation] for more -info. +Refer to the {ml-docs}/ml-nlp-elser.html[ELSER model documentation] for more info. [source,console] ------------------------------------------------------------ @@ -672,16 +767,17 @@ PUT _inference/text_embedding/hugging-face-embeddings } ------------------------------------------------------------ // TEST[skip:TBD] -<1> A valid Hugging Face access token. You can find on the +<1> A valid Hugging Face access token. +You can find on the https://huggingface.co/settings/tokens[settings page of your account]. <2> The {infer} endpoint URL you created on Hugging Face. Create a new {infer} endpoint on -https://ui.endpoints.huggingface.co/[the Hugging Face endpoint page] to get an -endpoint URL. Select the model you want to use on the new endpoint creation page -- for example `intfloat/e5-small-v2` - then select the `Sentence Embeddings` -task under the Advanced configuration section. Create the endpoint. Copy the URL -after the endpoint initialization has been finished. +https://ui.endpoints.huggingface.co/[the Hugging Face endpoint page] to get an endpoint URL. +Select the model you want to use on the new endpoint creation page - for example `intfloat/e5-small-v2` - then select the `Sentence Embeddings` +task under the Advanced configuration section. +Create the endpoint. +Copy the URL after the endpoint initialization has been finished. [discrete] [[inference-example-hugging-face-supported-models]] @@ -695,7 +791,6 @@ The list of recommended models for the Hugging Face service: * https://huggingface.co/intfloat/multilingual-e5-base[multilingual-e5-base] * https://huggingface.co/intfloat/multilingual-e5-small[multilingual-e5-small] - [discrete] [[inference-example-eland]] ===== Models uploaded by Eland via the elasticsearch service @@ -716,10 +811,30 @@ PUT _inference/text_embedding/my-msmarco-minilm-model } ------------------------------------------------------------ // TEST[skip:TBD] -<1> The `model_id` must be the ID of a text embedding model which has already -been +<1> The `model_id` must be the ID of a text embedding model which has already been {ml-docs}/ml-nlp-import-model.html#ml-nlp-import-script[uploaded through Eland]. +[discrete] +[[inference-example-mistral]] +===== Mistral Service + +The following example shows how to create an {infer} endpoint called +`mistral-embeddings-test` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/mistral-embeddings-test +{ + "service": "mistral", + "service_settings": { + "api_key": "", + "model": "mistral-embed" <1> + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The `model` must be the ID of a text embedding model which can be found in the +https://docs.mistral.ai/getting-started/models/[Mistral models documentation] [discrete] [[inference-example-openai]] @@ -756,4 +871,3 @@ PUT _inference/completion/openai-completion } ------------------------------------------------------------ // TEST[skip:TBD] - diff --git a/docs/reference/ingest/processors/geoip.asciidoc b/docs/reference/ingest/processors/geoip.asciidoc index e85165a28dcb4..738ac234d6162 100644 --- a/docs/reference/ingest/processors/geoip.asciidoc +++ b/docs/reference/ingest/processors/geoip.asciidoc @@ -72,6 +72,7 @@ depend on what has been found and which properties were configured in `propertie `residential_proxy`, `domain`, `isp`, `isp_organization`, `mobile_country_code`, `mobile_network_code`, `user_type`, and `connection_type`. The fields actually added depend on what has been found and which properties were configured in `properties`. +preview::["Do not use the GeoIP2 Anonymous IP, GeoIP2 Connection Type, GeoIP2 Domain, GeoIP2 ISP, and GeoIP2 Enterprise databases in production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] Here is an example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field: diff --git a/docs/reference/migration/migrate_8_14.asciidoc b/docs/reference/migration/migrate_8_14.asciidoc index bdff8bef246b5..2e6cd439ebed0 100644 --- a/docs/reference/migration/migrate_8_14.asciidoc +++ b/docs/reference/migration/migrate_8_14.asciidoc @@ -21,8 +21,25 @@ and prevent them from operating normally. Before upgrading to 8.14, review these changes and take the described steps to mitigate the impact. + +There are no notable breaking changes in {es} 8.14. +But there are some less critical breaking changes. + [discrete] -[[breaking-changes-8.14-0]] +[[breaking_814_rest_api_changes]] +==== REST API changes + +[[prevent_dls_fls_if_replication_assigned]] +.Prevent DLS/FLS if `replication` is assigned +[%collapsible] +==== +*Details* + +For cross-cluster API keys, {es} no longer allows specifying document-level security (DLS) or field-level security (FLS) in the `search` field, if `replication` is also specified. {es} likewise blocks the use of any existing cross-cluster API keys that meet this condition. + +*Impact* + +Remove any document-level security (DLS) or field-level security (FLS) definitions from the `search` field for cross-cluster API keys that also have a `replication` field, or create two separate cross-cluster API keys, one for search and one for replication. +==== + [discrete] [[breaking_814_dls_changes]] @@ -41,3 +58,33 @@ When Document Level Security (DLS) is applied to the validate query API with the *Impact* + If needed, test workflows with DLS enabled to ensure that the stricter security rules do not impact your application. ==== + + +[discrete] +[[deprecated-8.14]] +=== Deprecations + +The following functionality has been deprecated in {es} 8.14 +and will be removed in a future version. +While this won't have an immediate impact on your applications, +we strongly encourage you to take the described steps to update your code +after upgrading to 8.14. + +To find out if you are using any deprecated functionality, +enable <>. + +[discrete] +[[deprecations_814_mapping]] +==== Mapping deprecations + +[[deprecate_allowing_fields_in_scenarios_where_it_ignored]] +.Deprecate allowing `fields` in scenarios where it is ignored +[%collapsible] +==== +*Details* + +The following mapped types have always ignored `fields` when using multi-fields. This deprecation makes this clearer and we will completely disallow `fields` for these mapped types in the future. + +*Impact* + +In the future, `join`, `aggregate_metric_double`, and `constant_keyword`, will all disallow supplying `fields` as a parameter in the mapping. +==== + diff --git a/docs/reference/modules/network/threading.asciidoc b/docs/reference/modules/network/threading.asciidoc index abf00b521b5cc..832ffc0c1588f 100644 --- a/docs/reference/modules/network/threading.asciidoc +++ b/docs/reference/modules/network/threading.asciidoc @@ -109,10 +109,49 @@ the `transport_worker` threads are too busy. It is more reliable to use profiling trace. These tools are independent of any work the JVM is performing. It may also be possible to identify some reasons for delays from the server -logs, particularly looking at warnings from -`org.elasticsearch.transport.InboundHandler` and -`org.elasticsearch.transport.OutboundHandler`. Warnings about long processing -times from the `InboundHandler` are particularly indicative of incorrect -threading behaviour, whereas the transmission time reported by the -`OutboundHandler` includes time spent waiting for network congestion and the -`transport_worker` thread is free to do other work during this time. +logs. See for instance the following loggers: + +`org.elasticsearch.transport.InboundHandler`:: This logger reports a warning if +processing an inbound message occupies a network thread for unreasonably long, +which is almost certainly a bug. The warning includes some information which +can be used to identify the message that took unreasonably long to process. + +`org.elasticsearch.transport.OutboundHandler`:: This logger reports a warning +if sending an outbound message takes longer than expected. This duration +includes time spent waiting for network congestion to clear, and time spent +processing other work on the same network thread, so does not always indicate +the presence of a bug related to the outbound message specified in the log +entry. + +`org.elasticsearch.common.network.ThreadWatchdog`:: This logger reports a +warning and a thread dump when it notices that a network thread has not made +progress between two consecutive checks, which is almost certainly a bug: ++ +-- +[source,text] +---- +[WARN ][o.e.c.n.ThreadWatchdog ] the following threads are active but did not make progress in the preceding [5s]: [elasticsearch[instance-0000000004][transport_worker][T#1]]] +[WARN ][o.e.c.n.ThreadWatchdog ] hot threads dump due to active threads not making progress [part 1]: H4sIAAAAAAAA/+1aa2/bOBb93l8hYLUYFWgYvWw5AQbYpEkn6STZbJyiwAwGA1qiY8US6ZJUHvPr90qk/JJky41TtDMuUIci... +[WARN ][o.e.c.n.ThreadWatchdog ] hot threads dump due to active threads not making progress [part 2]: LfXL/x70a3eL8ve6Ral74ZBrp5x7HmUD9KXQz1MaXUNfFC6SeEysxSw1cNXL9JXYl3AigAE7ywbm/AZ+ll3Ox4qXJHNjVr6h... +[WARN ][o.e.c.n.ThreadWatchdog ] hot threads dump due to active threads not making progress (gzip compressed, base64-encoded, and split into 2 parts on preceding log lines; ... +---- + +To reconstruct the thread dump, base64-decode the data and decompress it using `gzip`. For instance, on Unix-like systems: + +[source,sh] +---- +cat watchdog.log | sed -e 's/.*://' | base64 --decode | gzip --decompress +---- + +This mechanism can be controlled with the following settings: + +`network.thread.watchdog.interval`::: +(<>, <>) +Defines the interval between watchdog checks. Defaults to `5s`. Set to `0` to +disable the network thread watchdog. + +`network.thread.watchdog.quiet_time`::: +(<>, <>) +Defines the interval between watchdog warnings. Defaults to `10m`. + +-- diff --git a/docs/reference/query-dsl/semantic-query.asciidoc b/docs/reference/query-dsl/semantic-query.asciidoc index ccd94f0ecb77a..23bcb4a52ef38 100644 --- a/docs/reference/query-dsl/semantic-query.asciidoc +++ b/docs/reference/query-dsl/semantic-query.asciidoc @@ -100,9 +100,13 @@ GET my-index/_search } }, { - "semantic": { - "field": "semantic_field", - "query": "shoes" + "standard": { + "query": { + "semantic": { + "field": "semantic_field", + "query": "shoes" + } + } } } ], diff --git a/docs/reference/release-notes/8.14.0.asciidoc b/docs/reference/release-notes/8.14.0.asciidoc index a203c983927cd..42f2f86a123ed 100644 --- a/docs/reference/release-notes/8.14.0.asciidoc +++ b/docs/reference/release-notes/8.14.0.asciidoc @@ -1,8 +1,350 @@ [[release-notes-8.14.0]] == {es} version 8.14.0 -coming[8.14.0] - Also see <>. +[[breaking-8.14.0]] +[float] +=== Breaking changes + +Security:: +* Prevent DLS/FLS if `replication` is assigned {es-pull}108600[#108600] +* Apply stricter Document Level Security (DLS) rules for the validate query API with the rewrite parameter {es-pull}105709[#105709] +* Apply stricter Document Level Security (DLS) rules for terms aggregations when min_doc_count is set to 0 {es-pull}105714[#105714] + +[[bug-8.14.0]] +[float] +=== Bug fixes + +Aggregations:: +* Cross check livedocs for terms aggs when index access control list is non-null {es-pull}105714[#105714] +* ESQL: Enable VALUES agg for datetime {es-pull}107016[#107016] +* Fix IOOBE in TTest aggregation when using filters {es-pull}109034[#109034] +* Validate stats formatting in standard `InternalStats` constructor {es-pull}107678[#107678] (issue: {es-issue}107671[#107671]) + +Application:: +* [Bugfix] Connector API - fix status serialisation issue in termquery {es-pull}108365[#108365] +* [Connector API] Fix bug with filtering validation toXContent {es-pull}107467[#107467] +* [Connector API] Fix bug with parsing *_doc_count nullable fields {es-pull}108854[#108854] +* [Connector API] Fix bug with with wrong target index for access control sync {es-pull}109097[#109097] + +Authorization:: +* Users with monitor privileges can access async_search/status endpoint even when setting keep_alive {es-pull}107383[#107383] + +CAT APIs:: +* Fix numeric sorts in `_cat/nodes` {es-pull}106189[#106189] (issue: {es-issue}48070[#48070]) + +CCR:: +* Add ?master_timeout query parameter to ccr apis {es-pull}105168[#105168] + +CRUD:: +* Fix `noop_update_total` is not being updated when using the `_bulk` {es-pull}105745[#105745] (issue: {es-issue}105742[#105742]) +* Use correct system index bulk executor {es-pull}106150[#106150] + +Cluster Coordination:: +* Fix support for infinite `?master_timeout` {es-pull}107050[#107050] + +Data streams:: +* Add non-indexed fields to ecs templates {es-pull}106714[#106714] +* Fix bulk NPE when retrying failure redirect after cluster block {es-pull}107598[#107598] +* Improve error message when rolling over DS alias {es-pull}106708[#106708] (issue: {es-issue}106137[#106137]) +* Only skip deleting a downsampled index if downsampling is in progress as part of DSL retention {es-pull}109020[#109020] + +Downsampling:: +* Fix downsample action request serialization {es-pull}106919[#106919] (issue: {es-issue}106917[#106917]) + +EQL:: +* Use #addWithoutBreaking when adding a negative number of bytes to the circuit breaker in `SequenceMatcher` {es-pull}107655[#107655] + +ES|QL:: +* ESQL: Allow reusing BUCKET grouping expressions in aggs {es-pull}107578[#107578] +* ESQL: Disable quoting in FROM command {es-pull}108431[#108431] +* ESQL: Fix MV_DEDUPE when using data from an index {es-pull}107577[#107577] (issue: {es-issue}104745[#104745]) +* ESQL: Fix error message when failing to resolve aggregate groupings {es-pull}108101[#108101] (issue: {es-issue}108053[#108053]) +* ESQL: Fix treating all fields as MV in COUNT pushdown {es-pull}106720[#106720] +* ESQL: Re-enable logical dependency check {es-pull}105860[#105860] +* ESQL: median, count and `count_distinct` over constants {es-pull}107414[#107414] (issues: {es-issue}105248[#105248], {es-issue}104900[#104900]) +* ES|QL fix no-length substring with supplementary (4-byte) character {es-pull}107183[#107183] +* ES|QL: Fix usage of IN operator with TEXT fields {es-pull}106654[#106654] (issue: {es-issue}105379[#105379]) +* ES|QL: Improve support for TEXT fields in functions {es-pull}106810[#106810] +* Fix docs generation of signatures for variadic functions {es-pull}107865[#107865] +* [ESQL] Mark `date_diff` as requiring all three arguments {es-pull}108834[#108834] (issue: {es-issue}108383[#108383]) + +Health:: +* Don't stop checking if the `HealthNode` persistent task is present {es-pull}105449[#105449] (issue: {es-issue}98926[#98926]) +* Health monitor concurrency fixes {es-pull}105674[#105674] (issue: {es-issue}105065[#105065]) + +Highlighting:: +* Check preTags and postTags params for empty values {es-pull}106396[#106396] (issue: {es-issue}69009[#69009]) +* added fix for inconsistent text trimming in Unified Highlighter {es-pull}99961[#99961] (issue: {es-issue}101803[#101803]) + +Infra/CLI:: +* Workaround G1 bug for JDK 22 and 22.0.1 {es-pull}108571[#108571] + +Infra/Core:: +* Add a check for the same feature being declared regular and historical {es-pull}106285[#106285] +* Fix `AffixSetting.exists` to include secure settings {es-pull}106745[#106745] +* Fix regression in get index settings (human=true) where the version was not displayed in human-readable format {es-pull}107447[#107447] +* Nativeaccess: try to load all located libsystemds {es-pull}108238[#108238] (issue: {es-issue}107878[#107878]) +* Update several references to `IndexVersion.toString` to use `toReleaseVersion` {es-pull}107828[#107828] (issue: {es-issue}107821[#107821]) +* Update several references to `TransportVersion.toString` to use `toReleaseVersion` {es-pull}107902[#107902] + +Infra/Logging:: +* Log when update AffixSetting using addAffixMapUpdateConsumer {es-pull}97072[#97072] + +Infra/Node Lifecycle:: +* Consider `ShardRouting` roles when calculating shard copies in shutdown status {es-pull}106063[#106063] +* Wait indefintely for http connections on shutdown by default {es-pull}106511[#106511] + +Infra/Scripting:: +* Guard against a null scorer in painless execute {es-pull}109048[#109048] (issue: {es-issue}43541[#43541]) +* Painless: Apply true regex limit factor with FIND and MATCH operation {es-pull}105670[#105670] + +Ingest Node:: +* Catching `StackOverflowErrors` from bad regexes in `GsubProcessor` {es-pull}106851[#106851] +* Fix `uri_parts` processor behaviour for missing extensions {es-pull}105689[#105689] (issue: {es-issue}105612[#105612]) +* Remove leading is_ prefix from Enterprise geoip docs {es-pull}108518[#108518] +* Slightly better geoip `databaseType` validation {es-pull}106889[#106889] + +License:: +* Fix lingering license warning header {es-pull}108031[#108031] (issue: {es-issue}107573[#107573]) + +Machine Learning:: +* Fix NPE in ML assignment notifier {es-pull}107312[#107312] +* Fix `startOffset` must be non-negative error in XLMRoBERTa tokenizer {es-pull}107891[#107891] (issue: {es-issue}104626[#104626]) +* Fix the position of spike, dip and distribution changes bucket when the sibling aggregation includes empty buckets {es-pull}106472[#106472] +* Make OpenAI embeddings parser more flexible {es-pull}106808[#106808] + +Mapping:: +* Dedupe terms in terms queries {es-pull}106381[#106381] +* Extend support of `allowedFields` to `getMatchingFieldNames` and `getAllFields` {es-pull}106862[#106862] +* Fix for raw mapping merge of fields named "properties" {es-pull}108867[#108867] (issue: {es-issue}108866[#108866]) +* Handle infinity during synthetic source construction for scaled float field {es-pull}107494[#107494] (issue: {es-issue}107101[#107101]) +* Handle pass-through subfields with deep nesting {es-pull}106767[#106767] +* Wrap "Pattern too complex" exception into an `IllegalArgumentException` {es-pull}109173[#109173] + +Network:: +* Fix HTTP corner-case response leaks {es-pull}105617[#105617] + +Search:: +* Add `internalClusterTest` for and fix leak in `ExpandSearchPhase` {es-pull}108562[#108562] (issue: {es-issue}108369[#108369]) +* Avoid attempting to load the same empty field twice in fetch phase {es-pull}107551[#107551] +* Bugfix: Disable eager loading `BitSetFilterCache` on Indexing Nodes {es-pull}105791[#105791] +* Cross-cluster painless/execute actions should check permissions only on target remote cluster {es-pull}105360[#105360] +* Fix error 500 on invalid `ParentIdQuery` {es-pull}105693[#105693] (issue: {es-issue}105366[#105366]) +* Fix range queries for float/half_float fields when bounds are out of type's range {es-pull}106691[#106691] +* Fixing NPE when requesting [_none_] for `stored_fields` {es-pull}104711[#104711] +* Fork when handling remote field-caps responses {es-pull}107370[#107370] +* Handle parallel calls to `createWeight` when profiling is on {es-pull}108041[#108041] (issues: {es-issue}104131[#104131], {es-issue}104235[#104235]) +* Harden field-caps request dispatcher {es-pull}108736[#108736] +* Replace `UnsupportedOperationException` with `IllegalArgumentException` for non-existing columns {es-pull}107038[#107038] +* Unable to retrieve multiple stored field values {es-pull}106575[#106575] +* Validate `model_id` is required when using the `learning_to_rank` rescorer {es-pull}107743[#107743] + +Security:: +* Disable validate when rewrite parameter is sent and the index access control list is non-null {es-pull}105709[#105709] +* Fix field caps and field level security {es-pull}106731[#106731] + +Snapshot/Restore:: +* Fix double-pausing shard snapshot {es-pull}109148[#109148] (issue: {es-issue}109143[#109143]) +* Treat 404 as empty register in `AzureBlobStore` {es-pull}108900[#108900] (issue: {es-issue}108504[#108504]) +* `SharedBlobCacheService.maybeFetchRegion` should use `computeCacheFileRegionSize` {es-pull}106685[#106685] + +TSDB:: +* Flip dynamic mapping condition when create tsid {es-pull}105636[#105636] + +Transform:: +* Consolidate permissions checks {es-pull}106413[#106413] (issue: {es-issue}105794[#105794]) +* Disable PIT for remote clusters {es-pull}107969[#107969] +* Make force-stopping the transform always remove persistent task from cluster state {es-pull}106989[#106989] (issue: {es-issue}106811[#106811]) +* Only trigger action once per thread {es-pull}107232[#107232] (issue: {es-issue}107215[#107215]) +* [Transform] Auto retry Transform start {es-pull}106243[#106243] + +Vector Search:: +* Fix multithreading copies in lib vec {es-pull}108802[#108802] +* [8.14] Fix multithreading copies in lib vec {es-pull}108810[#108810] + +[[deprecation-8.14.0]] +[float] +=== Deprecations + +Mapping:: +* Deprecate allowing `fields` in scenarios where it is ignored {es-pull}106031[#106031] + +[[enhancement-8.14.0]] +[float] +=== Enhancements + +Aggregations:: +* Add a `PriorityQueue` backed by `BigArrays` {es-pull}106361[#106361] +* All new `shard_seed` parameter for `random_sampler` agg {es-pull}104830[#104830] + +Allocation:: +* Add allocation stats {es-pull}105894[#105894] +* Add index forecasts to /_cat/allocation output {es-pull}97561[#97561] + +Application:: +* [Profiling] Add TopN Functions API {es-pull}106860[#106860] +* [Profiling] Allow to override index settings {es-pull}106172[#106172] +* [Profiling] Speed up serialization of flamegraph {es-pull}105779[#105779] + +Authentication:: +* Support Profile Activate with JWTs with client authn {es-pull}105439[#105439] (issue: {es-issue}105342[#105342]) + +Authorization:: +* Allow users to get status of own async search tasks {es-pull}106638[#106638] +* [Security Solution] Add `read` permission for third party agent indices for `kibana_system` {es-pull}107046[#107046] + +Data streams:: +* Add data stream lifecycle to kibana reporting template {es-pull}106259[#106259] + +ES|QL:: +* Add ES|QL Locate function {es-pull}106899[#106899] (issue: {es-issue}106818[#106818]) +* Add ES|QL signum function {es-pull}106866[#106866] +* Add status for enrich operator {es-pull}106036[#106036] +* Add two new OGC functions ST_X and ST_Y {es-pull}105768[#105768] +* Adjust array resizing in block builder {es-pull}106934[#106934] +* Bulk loading enrich fields in ESQL {es-pull}106796[#106796] +* ENRICH support for TEXT fields {es-pull}106435[#106435] (issue: {es-issue}105384[#105384]) +* ESQL: Add timers to many status results {es-pull}105421[#105421] +* ESQL: Allow grouping key inside stats expressions {es-pull}106579[#106579] +* ESQL: Introduce expression validation phase {es-pull}105477[#105477] (issue: {es-issue}105425[#105425]) +* ESQL: Log queries at debug level {es-pull}108257[#108257] +* ESQL: Regex improvements {es-pull}106429[#106429] +* ESQL: Sum of constants {es-pull}105454[#105454] +* ESQL: Support ST_DISJOINT {es-pull}107007[#107007] +* ESQL: Support partially folding CASE {es-pull}106094[#106094] +* ESQL: Use faster field caps {es-pull}105067[#105067] +* ESQL: extend BUCKET with spans {es-pull}107272[#107272] +* ESQL: perform a reduction on the data node {es-pull}106516[#106516] +* Expand support for ENRICH to full set supported by ES ingest processors {es-pull}106186[#106186] (issue: {es-issue}106162[#106162]) +* Introduce ordinal bytesref block {es-pull}106852[#106852] (issue: {es-issue}106387[#106387]) +* Leverage ordinals in enrich lookup {es-pull}107449[#107449] +* Serialize big array blocks {es-pull}106373[#106373] +* Serialize big array vectors {es-pull}106327[#106327] +* Specialize serialization for `ArrayVectors` {es-pull}105893[#105893] +* Specialize serialization of array blocks {es-pull}106102[#106102] +* Speed up serialization of `BytesRefArray` {es-pull}106053[#106053] +* Support ST_CONTAINS and ST_WITHIN {es-pull}106503[#106503] +* Support ST_INTERSECTS between geometry column and other geometry or string {es-pull}104907[#104907] (issue: {es-issue}104874[#104874]) + +Engine:: +* Add metric for calculating index flush time excluding waiting on locks {es-pull}107196[#107196] + +Highlighting:: +* Enable 'encoder' and 'tags_schema' highlighting settings at field level {es-pull}107224[#107224] (issue: {es-issue}94028[#94028]) + +ILM+SLM:: +* Add a flag to re-enable writes on the final index after an ILM shrink action. {es-pull}107121[#107121] (issue: {es-issue}106599[#106599]) + +Indices APIs:: +* Wait forever for `IndexTemplateRegistry` asset installation {es-pull}105985[#105985] + +Infra/CLI:: +* Enhance search tier GC options {es-pull}106526[#106526] +* Increase KDF iteration count in `KeyStoreWrapper` {es-pull}107107[#107107] + +Infra/Core:: +* Add pluggable `BuildVersion` in `NodeMetadata` {es-pull}105757[#105757] + +Infra/Metrics:: +* Infrastructure for metering the update requests {es-pull}105063[#105063] +* `DocumentParsingObserver` to accept an `indexName` to allow skipping system indices {es-pull}107041[#107041] + +Infra/Scripting:: +* String sha512() painless function {es-pull}99048[#99048] (issue: {es-issue}97691[#97691]) + +Ingest Node:: +* Add support for the 'Anonymous IP' database to the geoip processor {es-pull}107287[#107287] (issue: {es-issue}90789[#90789]) +* Add support for the 'Enterprise' database to the geoip processor {es-pull}107377[#107377] +* Adding `cache_stats` to geoip stats API {es-pull}107334[#107334] +* Support data streams in enrich policy indices {es-pull}107291[#107291] (issue: {es-issue}98836[#98836]) + +Machine Learning:: +* Add GET `_inference` for all inference endpoints {es-pull}107517[#107517] +* Added a timeout parameter to the inference API {es-pull}107242[#107242] +* Enable retrying on 500 error response from Cohere text embedding API {es-pull}105797[#105797] + +Mapping:: +* Make int8_hnsw our default index for new dense-vector fields {es-pull}106836[#106836] + +Ranking:: +* Add retrievers using the parser-only approach {es-pull}105470[#105470] + +Search:: +* Add Lucene spanish plural stemmer {es-pull}106952[#106952] +* Add `modelId` and `modelText` to `KnnVectorQueryBuilder` {es-pull}106068[#106068] +* Add a SIMD (Neon) optimised vector distance function for int8 {es-pull}106133[#106133] +* Add transport version for search load autoscaling {es-pull}106377[#106377] +* CCS with `minimize_roundtrips` performs incremental merges of each `SearchResponse` {es-pull}105781[#105781] +* Track ongoing search tasks {es-pull}107129[#107129] + +Security:: +* Invalidating cross cluster API keys requires `manage_security` {es-pull}107411[#107411] +* Show owner `realm_type` for returned API keys {es-pull}105629[#105629] + +Snapshot/Restore:: +* Add setting for max connections to S3 {es-pull}107533[#107533] +* Distinguish different snapshot failures by log level {es-pull}105622[#105622] + +Stats:: +* (API+) CAT Nodes alias for shard header to match CAT Allocation {es-pull}105847[#105847] +* Add total size in bytes to doc stats {es-pull}106840[#106840] (issue: {es-issue}97670[#97670]) + +TSDB:: +* Improve short-circuiting downsample execution {es-pull}106563[#106563] +* Support non-keyword dimensions as routing fields in TSDB {es-pull}105501[#105501] +* Text fields are stored by default in TSDB indices {es-pull}106338[#106338] (issue: {es-issue}97039[#97039]) + +Transform:: +* Check node shutdown before fail {es-pull}107358[#107358] (issue: {es-issue}100891[#100891]) +* Do not log error on node restart when the transform is already failed {es-pull}106171[#106171] (issue: {es-issue}106168[#106168]) + +[[feature-8.14.0]] +[float] +=== New features + +Application:: +* Allow `typed_keys` for search application Search API {es-pull}108007[#108007] +* [Connector API] Support cleaning up sync jobs when deleting a connector {es-pull}107253[#107253] + +ES|QL:: +* ESQL: Values aggregation function {es-pull}106065[#106065] (issue: {es-issue}103600[#103600]) +* ESQL: allow sorting by expressions and not only regular fields {es-pull}107158[#107158] +* Support ES|QL requests through the `NodeClient::execute` {es-pull}106244[#106244] + +Indices APIs:: +* Add granular error list to alias action response {es-pull}106514[#106514] (issue: {es-issue}94478[#94478]) + +Machine Learning:: +* Add Cohere rerank to `_inference` service {es-pull}106378[#106378] +* Add support for Azure OpenAI embeddings to inference service {es-pull}107178[#107178] +* Create default word based chunker {es-pull}107303[#107303] +* Text structure endpoints to determine the structure of a list of messages and of an indexed field {es-pull}105660[#105660] + +Mapping:: +* Flatten object mappings when subobjects is false {es-pull}103542[#103542] (issues: {es-issue}99860[#99860], {es-issue}103497[#103497]) + +Security:: +* Get and Query API Key with profile uid {es-pull}106531[#106531] + +Vector Search:: +* Adding support for hex-encoded byte vectors on knn-search {es-pull}105393[#105393] + +[[upgrade-8.14.0]] +[float] +=== Upgrades + +Infra/Core:: +* Upgrade jna to 5.12.1 {es-pull}105717[#105717] + +Ingest Node:: +* Updating the tika version to 2.9.1 in the ingest attachment plugin {es-pull}106315[#106315] + +Network:: +* Upgrade to Netty 4.1.107 {es-pull}105517[#105517] + +Packaging:: +* Update bundled JDK to Java 22 (again) {es-pull}108654[#108654] + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index d39be07b0bf02..e6016fe438e24 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -44,38 +44,32 @@ faster indexing and similar retrieval latencies. {es-pull}103374[#103374] -[discrete] -[[query_phase_knn_supports_query_vector_builder]] -=== Query phase KNN now supports query_vector_builder -It is now possible to pass `model_text` and `model_id` within a `knn` query -in the [query DSL](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-knn-query.html) to convert a text query into a dense vector and run the -nearest neighbor query on it, instead of requiring the dense vector to be -directly passed (within the `query_vector` parameter). Similar to the -[top-level knn query](https://www.elastic.co/guide/en/elasticsearch/reference/current/knn-search.html) (executed in the DFS phase), it is possible to supply -a `query_vector_builder` object containing a `text_embedding` object with -`model_text` (the text query to be converted into a dense vector) and -`model_id` (the identifier of a deployed model responsible for transforming -the text query into a dense vector). Note that an embedding model with the -referenced `model_id` needs to be [deployed on a ML node](https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-deploy-models.html). -in the cluster. +// end::notable-highlights[] -{es-pull}106068[#106068] [discrete] -[[simd_neon_optimised_vector_distance_function_for_merging_int8_scalar_quantized_vectors_has_been_added]] -=== A SIMD (Neon) optimised vector distance function for merging int8 Scalar Quantized vectors has been added -An optimised int8 vector distance implementation for aarch64 has been added. -This implementation is currently only used during merging. -The vector distance implementation outperforms Lucene's Pamana Vector -implementation for binary comparisons by approx 5x (depending on the number -of dimensions). It does so by means of SIMD (Neon) intrinsics compiled into a -separate native library and link by Panama's FFI. Comparisons are performed on -off-heap mmap'ed vector data. -Macro benchmarks, SO_Dense_Vector with scalar quantization enabled, shows -significant improvements in merge times, approximately 3 times faster. +[[new_custom_parser_for_iso_8601_datetimes]] +=== New custom parser for ISO-8601 datetimes +This introduces a new custom parser for ISO-8601 datetimes, for the `iso8601`, `strict_date_optional_time`, and +`strict_date_optional_time_nanos` built-in date formats. This provides a performance improvement over the +default Java date-time parsing. Whilst it maintains much of the same behaviour, +the new parser does not accept nonsensical date-time strings that have multiple fractional seconds fields +or multiple timezone specifiers. If the new parser fails to parse a string, it will then use the previous parser +to parse it. If a large proportion of the input data consists of these invalid strings, this may cause +a small performance degradation. If you wish to force the use of the old parsers regardless, +set the JVM property `es.datetime.java_time_parsers=true` on all ES nodes. -{es-pull}106133[#106133] +{es-pull}106486[#106486] -// end::notable-highlights[] +[discrete] +[[preview_support_for_connection_type_domain_isp_databases_in_geoip_processor]] +=== Preview: Support for the 'Connection Type, 'Domain', and 'ISP' databases in the geoip processor +As a Technical Preview, the {ref}/geoip-processor.html[`geoip`] processor can now use the commercial +https://dev.maxmind.com/geoip/docs/databases/connection-type[GeoIP2 'Connection Type'], +https://dev.maxmind.com/geoip/docs/databases/domain[GeoIP2 'Domain'], +and +https://dev.maxmind.com/geoip/docs/databases/isp[GeoIP2 'ISP'] +databases from MaxMind. +{es-pull}108683[#108683] diff --git a/docs/reference/search/rrf.asciidoc b/docs/reference/search/rrf.asciidoc index 04f698f06d879..ba0f6c018b0eb 100644 --- a/docs/reference/search/rrf.asciidoc +++ b/docs/reference/search/rrf.asciidoc @@ -115,7 +115,6 @@ The `rrf` retriever does not currently support: * <> * <> * <> -* <> * <> Using unsupported features as part of a search with an `rrf` retriever results @@ -519,6 +518,161 @@ We end with `_id: 3` as `_rank: 1`, `_id: 2` as `_rank: 2`, and `_id: 4` as `_rank: 3`. This ranking matches the result set from the original RRF search as expected. +==== Explain in RRF + +In addition to individual query scoring details, we can make use of the `explain=true` parameter to get information on how +the RRF scores for each document were computed. Working with the example above, and by adding +`explain=true` to the search request, we'd now have a response that looks like the following: + +[source,js] +---- +{ + "hits": + [ + { + "_index": "example-index", + "_id": "3", + "_score": 0.8333334, + "_rank": 1, + "_explanation": + { + "value": 0.8333334, <1> + "description": "rrf score: [0.8333334] computed for initial ranks [2, 1] with rankConstant: [1] as sum of [1 / (rank + rankConstant)] for each query", <2> + "details": <3> + [ + { + "value": 2, <4> + "description": "rrf score: [0.33333334], for rank [2] in query at index [0] computed as [1 / (2 + 1]), for matching query with score: ", + "details": <5> + [ + { + "value": 0.15876243, + "description": "weight(text:rrf in 0) [PerFieldSimilarity], result of:", + "details": + [ + ... + ] + } + ] + }, + { + "value": 1, <6> + "description": "rrf score: [0.5], for rank [1] in query at index [1] computed as [1 / (1 + 1]), for matching query with score: ", + "details": + [ + { + "value": 1, + "description": "within top k documents", + "details": + [] + } + ] + } + ] + } + } + ... + ] +} +---- +// NOTCONSOLE + +<1> the final RRF score for document with `_id=3` +<2> a description on how this score was computed based on the ranks of this document in each individual query +<3> details on how the RRF score was computed for each of the queries +<4> the `value` heres specifies the `rank` of this document in the specific query +<5> standard `explain` output of the underlying query, describing matching terms and weights +<6> the `value` heres specifies the `rank` of this document for the second (`knn`) query + +In addition to the above, explain in RRF also supports <> using the `_name` parameter. +Using named queries allows for easier and more intuitive understanding of the RRF score computation, +especially when dealing with multiple queries. So, we would now have: + +[source,js] +---- +GET example-index/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "term": { + "text": "rrf" + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [3], + "k": 5, + "num_candidates": 5, + "_name": "my_knn_query" <1> + } + } + ], + "rank_window_size": 5, + "rank_constant": 1 + } + }, + "size": 3, + "aggs": { + "int_count": { + "terms": { + "field": "integer" + } + } + } +} +---- +// NOTCONSOLE +<1> Here we specify a `_name` for the `knn` retriever + +The response would now include the named query in the explanation: +[source,js] +---- +{ + "hits": + [ + { + "_index": "example-index", + "_id": "3", + "_score": 0.8333334, + "_rank": 1, + "_explanation": + { + "value": 0.8333334, + "description": "rrf score: [0.8333334] computed for initial ranks [2, 1] with rankConstant: [1] as sum of [1 / (rank + rankConstant)] for each query", + "details": + [ + { + "value": 2, + "description": "rrf score: [0.33333334], for rank [2] in query at index [0] computed as [1 / (2 + 1]), for matching query with score: ", + "details": + [ + ... + ] + }, + { + "value": 1, + "description": "rrf score: [0.5], for rank [1] in query [my_knn_query] computed as [1 / (1 + 1]), for matching query with score: ", <1> + "details": + [ + ... + ] + } + ] + } + } + ... + ] +} +---- +// NOTCONSOLE +<1> Instead of the anonymous `at index n` , we now have a reference to the named query `my_knn_query`. ==== Pagination in RRF diff --git a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc index c3eefec86e6f3..e7f503a4a6c70 100644 --- a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc @@ -13,6 +13,8 @@ query. The instructions in this tutorial shows you how to use ELSER to perform semantic search on your data. +IMPORTANT: For the easiest way to perform semantic search in the {stack}, refer to the <> end-to-end tutorial. + NOTE: Only the first 512 extracted tokens per field are considered during semantic search with ELSER. Refer to {ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[this page] for more diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc index 89464d46744b2..6ecfea0a02dbc 100644 --- a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -1,20 +1,24 @@ [[semantic-search-inference]] === Tutorial: semantic search with the {infer} API + ++++ Semantic search with the {infer} API ++++ -The instructions in this tutorial shows you how to use the {infer} API with -various services to perform semantic search on your data. The following examples -use Cohere's `embed-english-v3.0` model, the `all-mpnet-base-v2` model from -HuggingFace, and OpenAI's `text-embedding-ada-002` second generation embedding -model. You can use any Cohere and OpenAI models, they are all supported by the -{infer} API. For a list of supported models available on HuggingFace, refer to +The instructions in this tutorial shows you how to use the {infer} API workflow with various services to perform semantic search on your data. + +IMPORTANT: For the easiest way to perform semantic search in the {stack}, refer to the <> end-to-end tutorial. + +The following examples use Cohere's `embed-english-v3.0` model, the `all-mpnet-base-v2` model from HuggingFace, and OpenAI's `text-embedding-ada-002` second generation embedding model. +You can use any Cohere and OpenAI models, they are all supported by the {infer} API. +For a list of supported models available on HuggingFace, refer to <>. -Click the name of the service you want to use on any of the widgets below to -review the corresponding instructions. +Azure based examples use models available through https://ai.azure.com/explore/models?selectedTask=embeddings[Azure AI Studio] +or https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models[Azure OpenAI]. +Mistral examples use the `mistral-embed` model from https://docs.mistral.ai/getting-started/models/[the Mistral API]. +Click the name of the service you want to use on any of the widgets below to review the corresponding instructions. [discrete] [[infer-service-requirements]] @@ -22,7 +26,6 @@ review the corresponding instructions. include::{es-ref-dir}/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc[] - [discrete] [[infer-text-embedding-task]] ==== Create an inference endpoint @@ -31,49 +34,42 @@ Create an {infer} endpoint by using the <>: include::{es-ref-dir}/tab-widgets/inference-api/infer-api-task-widget.asciidoc[] - [discrete] [[infer-service-mappings]] ==== Create the index mapping -The mapping of the destination index - the index that contains the embeddings -that the model will create based on your input text - must be created. The -destination index must have a field with the <> +The mapping of the destination index - the index that contains the embeddings that the model will create based on your input text - must be created. +The destination index must have a field with the <> field type to index the output of the used model. include::{es-ref-dir}/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc[] - [discrete] [[infer-service-inference-ingest-pipeline]] ==== Create an ingest pipeline with an inference processor Create an <> with an -<> and use the model you created above to -infer against the data that is being ingested in the pipeline. +<> and use the model you created above to infer against the data that is being ingested in the pipeline. include::{es-ref-dir}/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc[] - [discrete] [[infer-load-data]] ==== Load data -In this step, you load the data that you later use in the {infer} ingest -pipeline to create embeddings from it. +In this step, you load the data that you later use in the {infer} ingest pipeline to create embeddings from it. -Use the `msmarco-passagetest2019-top1000` data set, which is a subset of the MS -MARCO Passage Ranking data set. It consists of 200 queries, each accompanied by -a list of relevant text passages. All unique passages, along with their IDs, -have been extracted from that data set and compiled into a +Use the `msmarco-passagetest2019-top1000` data set, which is a subset of the MS MARCO Passage Ranking data set. +It consists of 200 queries, each accompanied by a list of relevant text passages. +All unique passages, along with their IDs, have been extracted from that data set and compiled into a https://github.com/elastic/stack-docs/blob/main/docs/en/stack/ml/nlp/data/msmarco-passagetest2019-unique.tsv[tsv file]. Download the file and upload it to your cluster using the {kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[Data Visualizer] -in the {ml-app} UI. Assign the name `id` to the first column and `content` to -the second column. The index name is `test-data`. Once the upload is complete, -you can see an index named `test-data` with 182469 documents. - +in the {ml-app} UI. +Assign the name `id` to the first column and `content` to the second column. +The index name is `test-data`. +Once the upload is complete, you can see an index named `test-data` with 182469 documents. [discrete] [[reindexing-data-infer]] @@ -92,8 +88,7 @@ GET _tasks/ ---- // TEST[skip:TBD] -You can also cancel the reindexing process if you don't want to wait until the -reindexing process is fully complete which might take hours for large data sets: +You can also cancel the reindexing process if you don't want to wait until the reindexing process is fully complete which might take hours for large data sets: [source,console] ---- @@ -106,17 +101,14 @@ POST _tasks//_cancel [[infer-semantic-search]] ==== Semantic search -After the data set has been enriched with the embeddings, you can query the data -using {ref}/knn-search.html#knn-semantic-search[semantic search]. Pass a -`query_vector_builder` to the k-nearest neighbor (kNN) vector search API, and -provide the query text and the model you have used to create the embeddings. +After the data set has been enriched with the embeddings, you can query the data using {ref}/knn-search.html#knn-semantic-search[semantic search]. +Pass a +`query_vector_builder` to the k-nearest neighbor (kNN) vector search API, and provide the query text and the model you have used to create the embeddings. -NOTE: If you cancelled the reindexing process, you run the query only a part of -the data which affects the quality of your results. +NOTE: If you cancelled the reindexing process, you run the query only a part of the data which affects the quality of your results. include::{es-ref-dir}/tab-widgets/inference-api/infer-api-search-widget.asciidoc[] - [discrete] [[infer-interactive-tutorials]] ==== Interactive tutorials @@ -124,4 +116,4 @@ include::{es-ref-dir}/tab-widgets/inference-api/infer-api-search-widget.asciidoc You can also find tutorials in an interactive Colab notebook format using the {es} Python client: * https://colab.research.google.com/github/elastic/elasticsearch-labs/blob/main/notebooks/integrations/cohere/inference-cohere.ipynb[Cohere {infer} tutorial notebook] -* https://colab.research.google.com/github/elastic/elasticsearch-labs/blob/main/notebooks/search/07-inference.ipynb[OpenAI {infer} tutorial notebook] \ No newline at end of file +* https://colab.research.google.com/github/elastic/elasticsearch-labs/blob/main/notebooks/search/07-inference.ipynb[OpenAI {infer} tutorial notebook] diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index 7dd7fbaf9823c..2d776077e13c5 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -1,25 +1,29 @@ [[semantic-search]] == Semantic search -Semantic search is a search method that helps you find data based on the intent -and contextual meaning of a search query, instead of a match on query terms +Semantic search is a search method that helps you find data based on the intent and contextual meaning of a search query, instead of a match on query terms (lexical search). -{es} provides semantic search capabilities using {ml-docs}/ml-nlp.html[natural -language processing (NLP)] and vector search. Deploying an NLP model to {es} -enables it to extract text embeddings out of text. Embeddings are vectors that -provide a numeric representation of a text. Pieces of content with similar -meaning have similar representations. +{es} provides various semantic search capabilities using {ml-docs}/ml-nlp.html[natural language processing (NLP)] and vector search. +Using an NLP model enables you to extract text embeddings out of text. +Embeddings are vectors that provide a numeric representation of a text. +Pieces of content with similar meaning have similar representations. +NLP models can be used in the {stack} various ways, you can: + +* deploy models in {es} +* use the <> (recommended) +* use the <> + [[semantic-search-diagram]] .A simplified representation of encoding textual concepts as vectors image::images/search/vector-search-oversimplification.png[A simplified representation of encoding textual concepts as vectors,align="center"] -At query time, {es} can use the same NLP model to convert a query into -embeddings, enabling you to find documents with similar text embeddings. +At query time, {es} can use the same NLP model to convert a query into embeddings, enabling you to find documents with similar text embeddings. + +This guide shows you how to implement semantic search with {es}: From selecting an NLP model, to writing queries. -This guide shows you how to implement semantic search with {es}, from selecting -an NLP model, to writing queries. +IMPORTANT: For the easiest way to perform semantic search in the {stack}, refer to the <> end-to-end tutorial. [discrete] [[semantic-search-select-nlp-model]] diff --git a/docs/reference/setup/install/package-security.asciidoc b/docs/reference/setup/install/package-security.asciidoc index 40bd49d064b43..3b9f4fc1740ed 100644 --- a/docs/reference/setup/install/package-security.asciidoc +++ b/docs/reference/setup/install/package-security.asciidoc @@ -11,8 +11,9 @@ the `elastic` built-in superuser. and TLS is enabled and configured with these keys and certificates. The password and certificate and keys are output to your terminal. +You can reset the password for the `elastic` user with the <> command. -We recommend storing the `elastic` password as an environment variable in your shell. Example: +We recommend storing the `elastic` password as an environment variable in your shell. For example: [source,sh] ---- diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc index 80f6da2cf602a..c8a42c4d0585a 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc @@ -31,6 +31,12 @@ id="infer-api-ingest-azure-ai-studio"> Azure AI Studio +

+
diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc index 4f85c69c7605e..a239c79e5a6d1 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc @@ -138,3 +138,29 @@ PUT _ingest/pipeline/azure_ai_studio_embeddings and the `output_field` that will contain the {infer} results. // end::azure-ai-studio[] + +// tag::mistral[] + +[source,console] +-------------------------------------------------- +PUT _ingest/pipeline/mistral_embeddings +{ + "processors": [ + { + "inference": { + "model_id": "mistral_embeddings", <1> + "input_output": { <2> + "input_field": "content", + "output_field": "content_embedding" + } + } + } + ] +} +-------------------------------------------------- +<1> The name of the inference endpoint you created by using the +<>, it's referred to as `inference_id` in that step. +<2> Configuration object that defines the `input_field` for the {infer} process +and the `output_field` that will contain the {infer} results. + +// end::mistral[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc index f6aa44a2b60a7..80c7c7ef23ee3 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc @@ -31,6 +31,12 @@ id="infer-api-mapping-azure-ai-studio"> Azure AI Studio +
+ aria-labelledby="infer-api-mapping-hf" + hidden=""> ++++ include::infer-api-mapping.asciidoc[tag=hugging-face] @@ -83,6 +90,17 @@ include::infer-api-mapping.asciidoc[tag=azure-openai] include::infer-api-mapping.asciidoc[tag=azure-ai-studio] +++++ +
+ diff --git a/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc index 8038dea713117..a1bce38a02ad2 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc @@ -173,3 +173,37 @@ the {infer} pipeline configuration in the next step. <6> The field type which is text in this example. // end::azure-ai-studio[] + +// tag::mistral[] + +[source,console] +-------------------------------------------------- +PUT mistral-embeddings +{ + "mappings": { + "properties": { + "content_embedding": { <1> + "type": "dense_vector", <2> + "dims": 1024, <3> + "element_type": "float", + "similarity": "dot_product" <4> + }, + "content": { <5> + "type": "text" <6> + } + } + } +} +-------------------------------------------------- +<1> The name of the field to contain the generated tokens. It must be referenced +in the {infer} pipeline configuration in the next step. +<2> The field to contain the tokens is a `dense_vector` field. +<3> The output dimensions of the model. This value may be found on the https://docs.mistral.ai/getting-started/models/[Mistral model reference]. +<4> For Mistral embeddings, the `dot_product` function should be used to +calculate similarity. +<5> The name of the field from which to create the dense vector representation. +In this example, the name of the field is `content`. It must be referenced in +the {infer} pipeline configuration in the next step. +<6> The field type which is text in this example. + +// end::mistral[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc index a35ea4e3b0207..4face6a105819 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc @@ -27,10 +27,16 @@ +
+
diff --git a/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc index a862e864fb068..927e47ea4d67c 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc @@ -131,3 +131,26 @@ might affect the throughput of the reindexing process. If this happens, change `size` to `3` or a similar value in magnitude. // end::azure-ai-studio[] + +// tag::mistral[] + +[source,console] +---- +POST _reindex?wait_for_completion=false +{ + "source": { + "index": "test-data", + "size": 50 <1> + }, + "dest": { + "index": "mistral-embeddings", + "pipeline": "mistral_embeddings" + } +} +---- +// TEST[skip:TBD] +<1> The default batch size for reindexing is 1000. Reducing `size` to a smaller +number makes the update of the reindexing process quicker which enables you to +follow the progress closely and detect errors early. + +// end::mistral[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc index 85b15678d1681..9981eb90d4929 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc @@ -31,6 +31,12 @@ id="infer-api-requirements-azure-ai-studio"> Azure AI Studio +
+
diff --git a/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc index 3ffcc6e4dd2b1..435e53bbc0bc0 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc @@ -33,3 +33,9 @@ You can apply for access to Azure OpenAI by completing the form at https://aka.m * A deployed https://ai.azure.com/explore/models?selectedTask=embeddings[embeddings] or https://ai.azure.com/explore/models?selectedTask=chat-completion[chat completion] model. // end::azure-ai-studio[] + +// tag::mistral[] +* A Mistral Account on https://console.mistral.ai/[La Plateforme] +* An API key generated for your account + +// end::mistral[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc index 17b747e86be4a..6a67b28f91601 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc @@ -31,6 +31,12 @@ id="infer-api-search-azure-ai-studio"> Azure AI Studio +
+
diff --git a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc index 4f1a24959de9f..523c2301e75ff 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc @@ -340,3 +340,68 @@ query from the `azure-ai-studio-embeddings` index sorted by their proximity to t // NOTCONSOLE // end::azure-ai-studio[] + +// tag::mistral[] + +[source,console] +-------------------------------------------------- +GET mistral-embeddings/_search +{ + "knn": { + "field": "content_embedding", + "query_vector_builder": { + "text_embedding": { + "model_id": "mistral_embeddings", + "model_text": "Calculate fuel cost" + } + }, + "k": 10, + "num_candidates": 100 + }, + "_source": [ + "id", + "content" + ] +} +-------------------------------------------------- +// TEST[skip:TBD] + +As a result, you receive the top 10 documents that are closest in meaning to the +query from the `mistral-embeddings` index sorted by their proximity to the query: + +[source,consol-result] +-------------------------------------------------- +"hits": [ + { + "_index": "mistral-embeddings", + "_id": "DDd5OowBHxQKHyc3TDSC", + "_score": 0.83704096, + "_source": { + "id": 862114, + "body": "How to calculate fuel cost for a road trip. By Tara Baukus Mello • Bankrate.com. Dear Driving for Dollars, My family is considering taking a long road trip to finish off the end of the summer, but I'm a little worried about gas prices and our overall fuel cost.It doesn't seem easy to calculate since we'll be traveling through many states and we are considering several routes.y family is considering taking a long road trip to finish off the end of the summer, but I'm a little worried about gas prices and our overall fuel cost. It doesn't seem easy to calculate since we'll be traveling through many states and we are considering several routes." + } + }, + { + "_index": "mistral-embeddings", + "_id": "ajd5OowBHxQKHyc3TDSC", + "_score": 0.8345704, + "_source": { + "id": 820622, + "body": "Home Heating Calculator. Typically, approximately 50% of the energy consumed in a home annually is for space heating. When deciding on a heating system, many factors will come into play: cost of fuel, installation cost, convenience and life style are all important.This calculator can help you estimate the cost of fuel for different heating appliances.hen deciding on a heating system, many factors will come into play: cost of fuel, installation cost, convenience and life style are all important. This calculator can help you estimate the cost of fuel for different heating appliances." + } + }, + { + "_index": "mistral-embeddings", + "_id": "Djd5OowBHxQKHyc3TDSC", + "_score": 0.8327426, + "_source": { + "id": 8202683, + "body": "Fuel is another important cost. This cost will depend on your boat, how far you travel, and how fast you travel. A 33-foot sailboat traveling at 7 knots should be able to travel 300 miles on 50 gallons of diesel fuel.If you are paying $4 per gallon, the trip would cost you $200.Most boats have much larger gas tanks than cars.uel is another important cost. This cost will depend on your boat, how far you travel, and how fast you travel. A 33-foot sailboat traveling at 7 knots should be able to travel 300 miles on 50 gallons of diesel fuel." + } + }, + (...) + ] +-------------------------------------------------- +// NOTCONSOLE + +// end::mistral[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc index 3bccb140d44f6..1f3ad645d7c29 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc @@ -31,6 +31,12 @@ id="infer-api-task-azure-ai-studio"> Azure AI Studio +
+
diff --git a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc index 5692388a18531..18fa3ba541bff 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc @@ -157,3 +157,23 @@ Also, when using this model the recommended similarity measure to use in the `dense_vector` field mapping is `dot_product`. // end::azure-ai-studio[] + +// tag::mistral[] + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/mistral_embeddings <1> +{ + "service": "mistral", + "service_settings": { + "api_key": "", <2> + "model": "" <3> + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The task type is `text_embedding` in the path and the `inference_id` which is the unique identifier of the {infer} endpoint is `mistral_embeddings`. +<2> The API key for accessing the Mistral API. You can find this in your Mistral account's API Keys page. +<3> The Mistral embeddings model name, for example `mistral-embed`. + +// end::mistral[] diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index 6b5a541d15661..5a32d2e0a58cd 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -17,7 +17,7 @@ commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.17.5" forbiddenApis = "de.thetaphi:forbiddenapis:3.6" -gradle-enterprise = "com.gradle:gradle-enterprise-gradle-plugin:3.16.2" +gradle-enterprise = "com.gradle:develocity-gradle-plugin:3.17.4" hamcrest = "org.hamcrest:hamcrest:2.1" httpcore = "org.apache.httpcomponents:httpcore:4.4.12" httpclient = "org.apache.httpcomponents:httpclient:4.5.14" @@ -38,7 +38,7 @@ maven-model = "org.apache.maven:maven-model:3.6.2" mockito-core = "org.mockito:mockito-core:1.9.5" nebula-info = "com.netflix.nebula:gradle-info-plugin:11.3.3" reflections = "org.reflections:reflections:0.9.12" -shadow-plugin = "com.github.johnrengelman:shadow:8.1.1" +shadow-plugin = "com.github.breskeby:shadow:3b035f2" spock-core = { group = "org.spockframework", name="spock-core", version.ref="spock" } spock-junit4 = { group = "org.spockframework", name="spock-junit4", version.ref="spock" } spock-platform = { group = "org.spockframework", name="spock-bom", version.ref="spock" } diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 41c3bafde5e33..29e0afe51364f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -376,9 +376,14 @@ - - - + + + + + + + + @@ -736,9 +741,9 @@ - - - + + + @@ -851,14 +856,9 @@ - - - - - - - - + + + @@ -4077,6 +4077,11 @@ + + + + + @@ -4182,6 +4187,11 @@ + + + + + diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index fcbbad6dd644c..515ab9d5f1822 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip +distributionSha256Sum=f8b4f4772d302c8ff580bc40d0f56e715de69b163546944f787c87abf209c961 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.8-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew index 1aa94a4269074..b740cf13397ab 100755 --- a/gradlew +++ b/gradlew @@ -55,7 +55,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. diff --git a/libs/core/src/main/java/org/elasticsearch/core/TimeValue.java b/libs/core/src/main/java/org/elasticsearch/core/TimeValue.java index df7c47943289d..26d93bca6b09a 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/TimeValue.java +++ b/libs/core/src/main/java/org/elasticsearch/core/TimeValue.java @@ -88,6 +88,13 @@ public static TimeValue timeValueDays(long days) { return new TimeValue(days, TimeUnit.DAYS); } + /** + * @return the {@link TimeValue} object that has the least duration. + */ + public static TimeValue min(TimeValue time1, TimeValue time2) { + return time1.compareTo(time2) < 0 ? time1 : time2; + } + /** * @return the unit used for the this time value, see {@link #duration()} */ diff --git a/libs/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/libs/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java index b6481db9b9951..dd2755ac1f9f7 100644 --- a/libs/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java +++ b/libs/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java @@ -17,6 +17,7 @@ import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.object.HasToString.hasToString; @@ -231,6 +232,12 @@ public void testRejectsNegativeValuesAtCreation() { assertThat(ex.getMessage(), containsString("duration cannot be negative")); } + public void testMin() { + assertThat(TimeValue.min(TimeValue.ZERO, TimeValue.timeValueNanos(1)), is(TimeValue.timeValueNanos(0))); + assertThat(TimeValue.min(TimeValue.MAX_VALUE, TimeValue.timeValueNanos(1)), is(TimeValue.timeValueNanos(1))); + assertThat(TimeValue.min(TimeValue.MINUS_ONE, TimeValue.timeValueHours(1)), is(TimeValue.MINUS_ONE)); + } + private TimeUnit randomTimeUnitObject() { return randomFrom( TimeUnit.NANOSECONDS, diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentType.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentType.java index 242da6fd705dd..71392aeff542b 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentType.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentType.java @@ -24,7 +24,7 @@ public enum XContentType implements MediaType { /** * A JSON based content type. */ - JSON(0) { + JSON(0, JsonXContent.jsonXContent) { @Override public String mediaTypeWithoutParameters() { return "application/json"; @@ -40,11 +40,6 @@ public String queryParameter() { return "json"; } - @Override - public XContent xContent() { - return JsonXContent.jsonXContent; - } - @Override public Set headerValues() { return Set.of(new HeaderValue("application/json"), new HeaderValue("application/x-ndjson"), new HeaderValue("application/*")); @@ -53,7 +48,7 @@ public Set headerValues() { /** * The jackson based smile binary format. Fast and compact binary format. */ - SMILE(1) { + SMILE(1, SmileXContent.smileXContent) { @Override public String mediaTypeWithoutParameters() { return "application/smile"; @@ -64,11 +59,6 @@ public String queryParameter() { return "smile"; } - @Override - public XContent xContent() { - return SmileXContent.smileXContent; - } - @Override public Set headerValues() { return Set.of(new HeaderValue("application/smile")); @@ -77,7 +67,7 @@ public Set headerValues() { /** * A YAML based content type. */ - YAML(2) { + YAML(2, YamlXContent.yamlXContent) { @Override public String mediaTypeWithoutParameters() { return "application/yaml"; @@ -88,11 +78,6 @@ public String queryParameter() { return "yaml"; } - @Override - public XContent xContent() { - return YamlXContent.yamlXContent; - } - @Override public Set headerValues() { return Set.of(new HeaderValue("application/yaml")); @@ -101,7 +86,7 @@ public Set headerValues() { /** * A CBOR based content type. */ - CBOR(3) { + CBOR(3, CborXContent.cborXContent) { @Override public String mediaTypeWithoutParameters() { return "application/cbor"; @@ -112,11 +97,6 @@ public String queryParameter() { return "cbor"; } - @Override - public XContent xContent() { - return CborXContent.cborXContent; - } - @Override public Set headerValues() { return Set.of(new HeaderValue("application/cbor")); @@ -125,7 +105,7 @@ public Set headerValues() { /** * A versioned JSON based content type. */ - VND_JSON(4) { + VND_JSON(4, JsonXContent.jsonXContent) { @Override public String mediaTypeWithoutParameters() { return VENDOR_APPLICATION_PREFIX + "json"; @@ -136,11 +116,6 @@ public String queryParameter() { return "vnd_json"; } - @Override - public XContent xContent() { - return JsonXContent.jsonXContent; - } - @Override public Set headerValues() { return Set.of( @@ -157,7 +132,7 @@ public XContentType canonical() { /** * Versioned jackson based smile binary format. Fast and compact binary format. */ - VND_SMILE(5) { + VND_SMILE(5, SmileXContent.smileXContent) { @Override public String mediaTypeWithoutParameters() { return VENDOR_APPLICATION_PREFIX + "smile"; @@ -168,11 +143,6 @@ public String queryParameter() { return "vnd_smile"; } - @Override - public XContent xContent() { - return SmileXContent.smileXContent; - } - @Override public Set headerValues() { return Set.of(new HeaderValue(VENDOR_APPLICATION_PREFIX + "smile", Map.of(COMPATIBLE_WITH_PARAMETER_NAME, VERSION_PATTERN))); @@ -186,7 +156,7 @@ public XContentType canonical() { /** * A Versioned YAML based content type. */ - VND_YAML(6) { + VND_YAML(6, YamlXContent.yamlXContent) { @Override public String mediaTypeWithoutParameters() { return VENDOR_APPLICATION_PREFIX + "yaml"; @@ -197,11 +167,6 @@ public String queryParameter() { return "vnd_yaml"; } - @Override - public XContent xContent() { - return YamlXContent.yamlXContent; - } - @Override public Set headerValues() { return Set.of(new HeaderValue(VENDOR_APPLICATION_PREFIX + "yaml", Map.of(COMPATIBLE_WITH_PARAMETER_NAME, VERSION_PATTERN))); @@ -215,7 +180,7 @@ public XContentType canonical() { /** * A Versioned CBOR based content type. */ - VND_CBOR(7) { + VND_CBOR(7, CborXContent.cborXContent) { @Override public String mediaTypeWithoutParameters() { return VENDOR_APPLICATION_PREFIX + "cbor"; @@ -226,11 +191,6 @@ public String queryParameter() { return "vnd_cbor"; } - @Override - public XContent xContent() { - return CborXContent.cborXContent; - } - @Override public Set headerValues() { return Set.of(new HeaderValue(VENDOR_APPLICATION_PREFIX + "cbor", Map.of(COMPATIBLE_WITH_PARAMETER_NAME, VERSION_PATTERN))); @@ -275,8 +235,11 @@ public static XContentType fromMediaType(String mediaTypeHeaderValue) throws Ill private final int index; - XContentType(int index) { + private final XContent xContent; + + XContentType(int index, XContent xContent) { this.index = index; + this.xContent = xContent; } public static Byte parseVersion(String mediaType) { @@ -296,7 +259,9 @@ public String mediaType() { return mediaTypeWithoutParameters(); } - public abstract XContent xContent(); + public final XContent xContent() { + return xContent; + } public abstract String mediaTypeWithoutParameters(); diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml index 7800923ab1580..1703d4908a753 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml @@ -294,11 +294,11 @@ setup: --- "Configure with no synthetic source": - requires: - cluster_features: ["gte_v8.9.0"] - reason: "Error message fix in 8.9" + cluster_features: ["gte_v8.15.0"] + reason: "Error message changed in 8.15.0" - do: - catch: '/Time series indices only support synthetic source./' + catch: '/Indices with with index mode \[time_series\] only support synthetic source/' indices.create: index: tsdb_error body: diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java new file mode 100644 index 0000000000000..8a343ff9cf853 --- /dev/null +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java @@ -0,0 +1,406 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.datastreams.CreateDataStreamAction; +import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.core.Strings; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.indices.InvalidIndexTemplateException; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.xcontent.XContentType; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.is; + +public class LogsDataStreamIT extends ESSingleNodeTestCase { + + private static final String LOGS_OR_STANDARD_MAPPING = """ + { + "properties": { + "@timestamp" : { + "type": "date" + }, + "hostname": { + "type": "keyword" + }, + "pid": { + "type": "long" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip_address": { + "type": "ip" + } + } + }"""; + + private static final String TIME_SERIES_MAPPING = """ + { + "properties": { + "@timestamp" : { + "type": "date" + }, + "hostname": { + "type": "keyword", + "time_series_dimension": "true" + }, + "pid": { + "type": "long", + "time_series_dimension": "true" + }, + "method": { + "type": "keyword" + }, + "ip_address": { + "type": "ip" + }, + "cpu_usage": { + "type": "float", + "time_series_metric": "gauge" + } + } + }"""; + + private static final String LOG_DOC_TEMPLATE = """ + { + "@timestamp": "%s", + "hostname": "%s", + "pid": "%d", + "method": "%s", + "message": "%s", + "ip_address": "%s" + } + """; + + private static final String TIME_SERIES_DOC_TEMPLATE = """ + { + "@timestamp": "%s", + "hostname": "%s", + "pid": "%d", + "method": "%s", + "ip_address": "%s", + "cpu_usage": "%f" + } + """; + + private static String toIsoTimestamp(final Instant instant) { + return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); + } + + private static String createLogDocument( + final Instant timestamp, + final String hostname, + long pid, + final String method, + final String message, + final String ipAddress + ) { + return Strings.format(LOG_DOC_TEMPLATE, toIsoTimestamp(timestamp), hostname, pid, method, message, ipAddress); + } + + private static String createTimeSeriesDocument( + final Instant timestamp, + final String hostname, + long pid, + final String method, + final String ipAddress, + double cpuUsage + ) { + return Strings.format(TIME_SERIES_DOC_TEMPLATE, toIsoTimestamp(timestamp), hostname, pid, method, ipAddress, cpuUsage); + } + + @Override + protected Collection> getPlugins() { + return List.of(DataStreamsPlugin.class, InternalSettingsPlugin.class); + } + + public void testLogsIndexModeDataStreamIndexing() throws IOException, ExecutionException, InterruptedException { + putComposableIndexTemplate( + client(), + "logs-composable-template", + LOGS_OR_STANDARD_MAPPING, + Map.of("index.mode", "logs"), + List.of("logs-*-*") + ); + final String dataStreamName = generateDataStreamName("logs"); + createDataStream(client(), dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + rolloverDataStream(dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + } + + public void testIndexModeLogsAndStandardSwitching() throws IOException, ExecutionException, InterruptedException { + final List indexModes = new ArrayList<>(); + final String dataStreamName = generateDataStreamName("logs"); + indexModes.add(IndexMode.STANDARD); + putComposableIndexTemplate( + client(), + "logs-composable-template", + LOGS_OR_STANDARD_MAPPING, + Map.of("index.mode", "standard"), + List.of("logs-*-*") + ); + createDataStream(client(), dataStreamName); + for (int i = 0; i < randomIntBetween(5, 10); i++) { + final IndexMode indexMode = i % 2 == 0 ? IndexMode.LOGS : IndexMode.STANDARD; + indexModes.add(indexMode); + updateComposableIndexTemplate( + client(), + "logs-composable-template", + LOGS_OR_STANDARD_MAPPING, + Map.of("index.mode", indexMode.getName()), + List.of("logs-*-*") + ); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + rolloverDataStream(dataStreamName); + } + assertDataStreamBackingIndicesModes(dataStreamName, indexModes); + } + + public void testIndexModeLogsAndTimeSeriesSwitching() throws IOException, ExecutionException, InterruptedException { + final String dataStreamName = generateDataStreamName("custom"); + final List indexPatterns = List.of("custom-*-*"); + final Map logsSettings = Map.of("index.mode", "logs"); + final Map timeSeriesSettings = Map.of("index.mode", "time_series", "index.routing_path", "hostname"); + + putComposableIndexTemplate(client(), "custom-composable-template", LOGS_OR_STANDARD_MAPPING, logsSettings, indexPatterns); + createDataStream(client(), dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + + updateComposableIndexTemplate(client(), "custom-composable-template", TIME_SERIES_MAPPING, timeSeriesSettings, indexPatterns); + rolloverDataStream(dataStreamName); + indexTimeSeriesDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + + updateComposableIndexTemplate(client(), "custom-composable-template", LOGS_OR_STANDARD_MAPPING, logsSettings, indexPatterns); + rolloverDataStream(dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + + assertDataStreamBackingIndicesModes(dataStreamName, List.of(IndexMode.LOGS, IndexMode.TIME_SERIES, IndexMode.LOGS)); + } + + public void testInvalidIndexModeTimeSeriesSwitchWithoutROutingPath() throws IOException, ExecutionException, InterruptedException { + final String dataStreamName = generateDataStreamName("custom"); + final List indexPatterns = List.of("custom-*-*"); + final Map logsSettings = Map.of("index.mode", "logs"); + final Map timeSeriesSettings = Map.of("index.mode", "time_series"); + + putComposableIndexTemplate(client(), "custom-composable-template", LOGS_OR_STANDARD_MAPPING, logsSettings, indexPatterns); + createDataStream(client(), dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + + expectThrows( + InvalidIndexTemplateException.class, + () -> updateComposableIndexTemplate( + client(), + "custom-composable-template", + LOGS_OR_STANDARD_MAPPING, + timeSeriesSettings, + indexPatterns + ) + ); + } + + public void testInvalidIndexModeTimeSeriesSwitchWithoutDimensions() throws IOException, ExecutionException, InterruptedException { + final String dataStreamName = generateDataStreamName("custom"); + final List indexPatterns = List.of("custom-*-*"); + final Map logsSettings = Map.of("index.mode", "logs"); + final Map timeSeriesSettings = Map.of("index.mode", "time_series", "index.routing_path", "hostname"); + + putComposableIndexTemplate(client(), "custom-composable-template", LOGS_OR_STANDARD_MAPPING, logsSettings, indexPatterns); + createDataStream(client(), dataStreamName); + indexLogOrStandardDocuments(client(), randomIntBetween(10, 20), randomIntBetween(32, 64), dataStreamName); + + final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + updateComposableIndexTemplate( + client(), + "custom-composable-template", + LOGS_OR_STANDARD_MAPPING, + timeSeriesSettings, + indexPatterns + ); + + }); + assertThat( + exception.getCause().getCause().getMessage(), + Matchers.equalTo( + "All fields that match routing_path must be configured with [time_series_dimension: true] or flattened fields with " + + "a list of dimensions in [time_series_dimensions] and without the [script] parameter. [hostname] was not a dimension." + ) + ); + } + + private void assertDataStreamBackingIndicesModes(final String dataStreamName, final List modes) { + final GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); + final GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) + .actionGet(); + final DataStream dataStream = getDataStreamResponse.getDataStreams().get(0).getDataStream(); + final DataStream.DataStreamIndices backingIndices = dataStream.getBackingIndices(); + final Iterator indexModesIterator = modes.iterator(); + assertThat(backingIndices.getIndices().size(), Matchers.equalTo(modes.size())); + for (final Index index : backingIndices.getIndices()) { + final GetSettingsResponse getSettingsResponse = indicesAdmin().getSettings( + new GetSettingsRequest().indices(index.getName()).includeDefaults(true) + ).actionGet(); + final Settings settings = getSettingsResponse.getIndexToSettings().get(index.getName()); + assertThat(settings.get("index.mode"), Matchers.equalTo(indexModesIterator.next().getName())); + } + } + + final String generateDataStreamName(final String prefix) { + return String.format(Locale.ROOT, "%s-%s-%s", prefix, randomFrom("apache", "nginx", "system"), randomFrom("dev", "qa", "prod")); + } + + private void rolloverDataStream(final String dataStreamName) { + assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet()); + } + + private void indexLogOrStandardDocuments( + final Client client, + int numBulkRequests, + int numDocsPerBulkRequest, + final String dataStreamName + ) { + { + for (int i = 0; i < numBulkRequests; i++) { + BulkRequest bulkRequest = new BulkRequest(dataStreamName); + for (int j = 0; j < numDocsPerBulkRequest; j++) { + var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); + final String doc = createLogDocument( + Instant.now(), + randomAlphaOfLength(7), + randomIntBetween(100, 200), + randomFrom("POST", "PUT", "GET"), + randomAlphaOfLengthBetween(256, 512), + InetAddresses.toAddrString(randomIp(randomBoolean())) + ); + indexRequest.source(doc, XContentType.JSON); + bulkRequest.add(indexRequest); + } + final BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); + assertThat(bulkResponse.hasFailures(), is(false)); + } + final BroadcastResponse refreshResponse = client.admin().indices().refresh(new RefreshRequest(dataStreamName)).actionGet(); + assertThat(refreshResponse.getStatus(), is(RestStatus.OK)); + } + } + + private void indexTimeSeriesDocuments( + final Client client, + int numBulkRequests, + int numDocsPerBulkRequest, + final String dataStreamName + ) { + { + for (int i = 0; i < numBulkRequests; i++) { + BulkRequest bulkRequest = new BulkRequest(dataStreamName); + for (int j = 0; j < numDocsPerBulkRequest; j++) { + var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); + final String doc = createTimeSeriesDocument( + Instant.now(), + randomAlphaOfLength(12), + randomIntBetween(100, 200), + randomFrom("POST", "PUT", "GET"), + InetAddresses.toAddrString(randomIp(randomBoolean())), + randomDoubleBetween(0.0D, 1.0D, false) + ); + indexRequest.source(doc, XContentType.JSON); + bulkRequest.add(indexRequest); + } + final BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); + assertThat(bulkResponse.hasFailures(), is(false)); + } + final BroadcastResponse refreshResponse = client.admin().indices().refresh(new RefreshRequest(dataStreamName)).actionGet(); + assertThat(refreshResponse.getStatus(), is(RestStatus.OK)); + } + } + + private void createDataStream(final Client client, final String dataStreamName) throws InterruptedException, ExecutionException { + final CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName); + final AcknowledgedResponse createDataStreamResponse = client.execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest) + .get(); + assertThat(createDataStreamResponse.isAcknowledged(), is(true)); + } + + private static void updateComposableIndexTemplate( + final Client client, + final String templateName, + final String mapping, + final Map settings, + final List indexPatterns + ) throws IOException { + putComposableIndexTemplate(client, templateName, mapping, settings, indexPatterns); + } + + private static void putComposableIndexTemplate( + final Client client, + final String templateName, + final String mapping, + final Map settings, + final List indexPatterns + ) throws IOException { + final Settings.Builder templateSettings = Settings.builder(); + for (Map.Entry setting : settings.entrySet()) { + templateSettings.put(setting.getKey(), setting.getValue()); + } + final TransportPutComposableIndexTemplateAction.Request putComposableTemplateRequest = + new TransportPutComposableIndexTemplateAction.Request(templateName); + putComposableTemplateRequest.indexTemplate( + ComposableIndexTemplate.builder() + .indexPatterns(indexPatterns) + .template(new Template(templateSettings.build(), new CompressedXContent(mapping), null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .build() + ); + final AcknowledgedResponse putComposableTemplateResponse = client.execute( + TransportPutComposableIndexTemplateAction.TYPE, + putComposableTemplateRequest + ).actionGet(); + assertThat(putComposableTemplateResponse.isAcknowledged(), is(true)); + } +} diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java new file mode 100644 index 0000000000000..c18bcf750242f --- /dev/null +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamRestIT.java @@ -0,0 +1,293 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; +import java.net.InetAddress; +import java.time.Instant; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class LogsDataStreamRestIT extends ESRestTestCase { + + private static final String DATA_STREAM_NAME = "logs-apache-dev"; + private RestClient client; + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.security.enabled", "false") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Before + public void setup() throws Exception { + client = client(); + waitForLogs(client); + } + + private static void waitForLogs(RestClient client) throws Exception { + assertBusy(() -> { + try { + Request request = new Request("GET", "_index_template/logs"); + assertOK(client.performRequest(request)); + } catch (ResponseException e) { + fail(e.getMessage()); + } + }); + } + + private static final String LOGS_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 201, + "composed_of": [ "logs@mappings", "logs@settings" ], + "template": { + "settings": { + "index": { + "mode": "logs" + } + }, + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "hostname": { + "type": "keyword" + }, + "pid": { + "type": "long" + }, + "method": { + "type": "keyword" + }, + "message": { + "type": "text" + }, + "ip_address": { + "type": "ip" + } + } + } + } + }"""; + + private static final String STANDARD_TEMPLATE = """ + { + "index_patterns": [ "logs-*-*" ], + "data_stream": {}, + "priority": 201, + "template": { + "settings": { + "index": { + "mode": "standard" + } + }, + "mappings": { + "properties": { + "@timestamp" : { + "type": "date" + }, + "hostname": { + "type": "keyword", + "time_series_dimension": "true" + }, + "pid": { + "type": "long", + "time_series_dimension": "true" + }, + "method": { + "type": "keyword" + }, + "ip_address": { + "type": "ip" + } + } + } + } + }"""; + + private static final String DOC_TEMPLATE = """ + { + "@timestamp": "%s", + "hostname": "%s", + "pid": "%d", + "method": "%s", + "message": "%s", + "ip_address": "%s" + } + """; + + public void testLogsIndexing() throws IOException { + putTemplate(client, "custom-template", LOGS_TEMPLATE); + createDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()) + ) + ); + assertDataStreamBackingIndexMode("logs", 0); + rolloverDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()) + ) + ); + assertDataStreamBackingIndexMode("logs", 1); + } + + public void testLogsStandardIndexModeSwitch() throws IOException { + putTemplate(client, "custom-template", LOGS_TEMPLATE); + createDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()) + ) + ); + assertDataStreamBackingIndexMode("logs", 0); + + putTemplate(client, "custom-template", STANDARD_TEMPLATE); + rolloverDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(64), + randomIp(randomBoolean()) + ) + ); + assertDataStreamBackingIndexMode("standard", 1); + + putTemplate(client, "custom-template", LOGS_TEMPLATE); + rolloverDataStream(client, DATA_STREAM_NAME); + indexDocument( + client, + DATA_STREAM_NAME, + document( + Instant.now(), + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomFrom("PUT", "POST", "GET"), + randomAlphaOfLength(32), + randomIp(randomBoolean()) + ) + ); + assertDataStreamBackingIndexMode("logs", 2); + } + + private void assertDataStreamBackingIndexMode(final String indexMode, int backingIndex) throws IOException { + assertThat(getSettings(client, getWriteBackingIndex(client, DATA_STREAM_NAME, backingIndex)).get("index.mode"), is(indexMode)); + } + + private String document( + final Instant timestamp, + final String hostname, + long pid, + final String method, + final String message, + final InetAddress ipAddress + ) { + return String.format( + Locale.ROOT, + DOC_TEMPLATE, + DateFormatter.forPattern(FormatNames.DATE.getName()).format(timestamp), + hostname, + pid, + method, + message, + InetAddresses.toAddrString(ipAddress) + ); + } + + private static void createDataStream(final RestClient client, final String dataStreamName) throws IOException { + Request request = new Request("PUT", "_data_stream/" + dataStreamName); + assertOK(client.performRequest(request)); + } + + private static void putTemplate(final RestClient client, final String templateName, final String mappings) throws IOException { + final Request request = new Request("PUT", "/_index_template/" + templateName); + request.setJsonEntity(mappings); + assertOK(client.performRequest(request)); + } + + private static void indexDocument(final RestClient client, String dataStreamName, String doc) throws IOException { + final Request request = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); + request.setJsonEntity(doc); + assertOK(client.performRequest(request)); + } + + private static void rolloverDataStream(final RestClient client, final String dataStreamName) throws IOException { + final Request request = new Request("POST", "/" + dataStreamName + "/_rollover"); + final Response response = client.performRequest(request); + assertOK(response); + assertThat(entityAsMap(response).get("rolled_over"), is(true)); + } + + @SuppressWarnings("unchecked") + private static String getWriteBackingIndex(final RestClient client, final String dataStreamName, int backingIndex) throws IOException { + final Request request = new Request("GET", "_data_stream/" + dataStreamName); + final List dataStreams = (List) entityAsMap(client.performRequest(request)).get("data_streams"); + final Map dataStream = (Map) dataStreams.get(0); + final List> backingIndices = (List>) dataStream.get("indices"); + return backingIndices.get(backingIndex).get("index_name"); + } + + @SuppressWarnings("unchecked") + private static Map getSettings(final RestClient client, final String indexName) throws IOException { + final Request request = new Request("GET", "/" + indexName + "/_settings?flat_settings"); + return ((Map>) entityAsMap(client.performRequest(request)).get(indexName)).get("settings"); + } +} diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java index aa63c2949ac33..1621a235187a1 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.Plugin; @@ -99,7 +100,11 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter newDocumentSizeReporter(String indexName, DocumentSizeAccumulator documentSizeAccumulator) { + public DocumentSizeReporter newDocumentSizeReporter( + String indexName, + IndexMode indexMode, + DocumentSizeAccumulator documentSizeAccumulator + ) { return DocumentSizeReporter.EMPTY_INSTANCE; } }; diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/30_synthetic_source.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/30_synthetic_source.yml new file mode 100644 index 0000000000000..1e0b90ebb9e0f --- /dev/null +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/30_synthetic_source.yml @@ -0,0 +1,49 @@ +setup: + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + pagerank: + type: rank_feature + +--- +"synthetic source sanity test": + - do: + index: + index: test + id: "1" + body: + pagerank: 10 + + - do: + index: + index: test + id: "2" + body: + pagerank: null + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: { _source.pagerank: 10 } + + - do: + get: + index: test + id: "2" + + - match: { _source.pagerank: null } + diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/20_synthetic_source.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/20_synthetic_source.yml new file mode 100644 index 0000000000000..c64e35cc2cea4 --- /dev/null +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/20_synthetic_source.yml @@ -0,0 +1,56 @@ +setup: + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + tags: + type: rank_features + +--- +"synthetic source sanity test": + - do: + index: + index: test + id: "1" + body: + tags: + foo: 3 + bar: 5 + + - do: + index: + index: test + id: "2" + body: + tags: [] + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: + _source: + tags: + foo: 3 + bar: 5 + + - do: + get: + index: test + id: "2" + + - match: { _source.tags: [] } + + diff --git a/modules/parent-join/build.gradle b/modules/parent-join/build.gradle index 903192e6ce25b..844478c83e7c7 100644 --- a/modules/parent-join/build.gradle +++ b/modules/parent-join/build.gradle @@ -16,7 +16,7 @@ esplugin { restResources { restApi { - include '_common', 'bulk', 'cluster', 'nodes', 'indices', 'index', 'search' + include '_common', 'bulk', 'cluster', 'get', 'nodes', 'indices', 'index', 'search' } } diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source.yml index 4ebc6cf4e9d69..12d0f1bbae6c7 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/60_synthetic_source.yml @@ -1,10 +1,9 @@ -unsupported: +supported: - requires: - cluster_features: ["gte_v8.3.0"] - reason: introduced in 8.3.0 + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 - do: - catch: bad_request indices.create: index: test body: @@ -16,3 +15,42 @@ unsupported: type: join relations: parent: child + + - do: + index: + index: test + id: "1" + body: {"foo": "bar", "join_field": {"name" : "parent"} } + + - do: + index: + index: test + id: "2" + routing: "1" + body: {"zab": "baz", "join_field": { "name" : "child", "parent": "1"} } + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: + _source: + foo: "bar" + join_field: + name: "parent" + + - do: + get: + index: test + id: "2" + + - match: + _source: + join_field: + name: "child" + parent: "1" + zab: "baz" diff --git a/modules/percolator/build.gradle b/modules/percolator/build.gradle index a871056539d38..b9b257a42e051 100644 --- a/modules/percolator/build.gradle +++ b/modules/percolator/build.gradle @@ -20,7 +20,7 @@ dependencies { restResources { restApi { - include '_common', 'indices', 'index', 'search', 'msearch' + include '_common', 'get', 'indices', 'index', 'search', 'msearch' } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index 138007c104d2b..da8d7de27d317 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -101,7 +101,8 @@ public void process(HitContext hit) throws IOException { percolatorLeafReaderContext, slot, leafStoredFields.storedFields(), - Source.fromBytes(document) + Source.fromBytes(document), + null ); processor.process(subContext); for (Map.Entry entry : subContext.hit().getHighlightFields().entrySet()) { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java index 82ec63b785e56..1ebf0b4a28ed6 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java @@ -56,7 +56,7 @@ public void testHitsExecute() throws Exception { LeafReaderContext context = reader.leaves().get(0); // A match: { - HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null)); + HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null), null); PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value")); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); @@ -87,7 +87,7 @@ public void testHitsExecute() throws Exception { // No match: { - HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null)); + HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null), null); PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value")); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value1", new WhitespaceAnalyzer()); @@ -117,7 +117,7 @@ public void testHitsExecute() throws Exception { // No query: { - HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null)); + HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null), null); PercolateQuery.QueryStore queryStore = ctx -> docId -> null; MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); diff --git a/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml b/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml index 11c2993f4d344..a5576d203314f 100644 --- a/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml +++ b/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml @@ -126,3 +126,41 @@ document: foo.bar: value - match: { hits.total.value: 1 } + +--- +"Synthetic source": + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: queries_index + body: + mappings: + _source: + mode: synthetic + properties: + query: + type: percolator + + - do: + index: + index: queries_index + id: test_percolator + body: + query: + match_all: {} + + - do: + indices.refresh: {} + + - do: + get: + index: queries_index + id: "test_percolator" + + - match: + _source: + query: + match_all: {} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java index 9cf210c2a8aab..cfbd9ad68a317 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.ReleasableBytesReference; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; @@ -56,6 +57,7 @@ public class Netty4HttpPipeliningHandler extends ChannelDuplexHandler { private static final Logger logger = LogManager.getLogger(Netty4HttpPipeliningHandler.class); private final int maxEventsHeld; + private final ThreadWatchdog.ActivityTracker activityTracker; private final PriorityQueue> outboundHoldingQueue; private record ChunkedWrite(PromiseCombiner combiner, ChannelPromise onDone, ChunkedRestResponseBodyPart responseBodyPart) {} @@ -90,31 +92,41 @@ private record ChunkedWrite(PromiseCombiner combiner, ChannelPromise onDone, Chu * @param maxEventsHeld the maximum number of channel events that will be retained prior to aborting the channel connection; this is * required as events cannot queue up indefinitely */ - public Netty4HttpPipeliningHandler(final int maxEventsHeld, final Netty4HttpServerTransport serverTransport) { + public Netty4HttpPipeliningHandler( + final int maxEventsHeld, + final Netty4HttpServerTransport serverTransport, + final ThreadWatchdog.ActivityTracker activityTracker + ) { this.maxEventsHeld = maxEventsHeld; + this.activityTracker = activityTracker; this.outboundHoldingQueue = new PriorityQueue<>(1, Comparator.comparingInt(t -> t.v1().getSequence())); this.serverTransport = serverTransport; } @Override public void channelRead(final ChannelHandlerContext ctx, final Object msg) { - assert msg instanceof FullHttpRequest : "Should have fully aggregated message already but saw [" + msg + "]"; - final FullHttpRequest fullHttpRequest = (FullHttpRequest) msg; - final Netty4HttpRequest netty4HttpRequest; - if (fullHttpRequest.decoderResult().isFailure()) { - final Throwable cause = fullHttpRequest.decoderResult().cause(); - final Exception nonError; - if (cause instanceof Error) { - ExceptionsHelper.maybeDieOnAnotherThread(cause); - nonError = new Exception(cause); + activityTracker.startActivity(); + try { + assert msg instanceof FullHttpRequest : "Should have fully aggregated message already but saw [" + msg + "]"; + final FullHttpRequest fullHttpRequest = (FullHttpRequest) msg; + final Netty4HttpRequest netty4HttpRequest; + if (fullHttpRequest.decoderResult().isFailure()) { + final Throwable cause = fullHttpRequest.decoderResult().cause(); + final Exception nonError; + if (cause instanceof Error) { + ExceptionsHelper.maybeDieOnAnotherThread(cause); + nonError = new Exception(cause); + } else { + nonError = (Exception) cause; + } + netty4HttpRequest = new Netty4HttpRequest(readSequence++, fullHttpRequest, nonError); } else { - nonError = (Exception) cause; + netty4HttpRequest = new Netty4HttpRequest(readSequence++, fullHttpRequest); } - netty4HttpRequest = new Netty4HttpRequest(readSequence++, fullHttpRequest, nonError); - } else { - netty4HttpRequest = new Netty4HttpRequest(readSequence++, fullHttpRequest); + handlePipelinedRequest(ctx, netty4HttpRequest); + } finally { + activityTracker.stopActivity(); } - handlePipelinedRequest(ctx, netty4HttpRequest); } // protected so tests can override it diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 277015e240d57..f48a3143fd016 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -38,6 +38,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -94,6 +95,7 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { private final TLSConfig tlsConfig; private final AcceptChannelHandler.AcceptPredicate acceptChannelPredicate; private final HttpValidator httpValidator; + private final ThreadWatchdog threadWatchdog; private final int readTimeoutMillis; private final int maxCompositeBufferComponents; @@ -130,6 +132,7 @@ public Netty4HttpServerTransport( this.tlsConfig = tlsConfig; this.acceptChannelPredicate = acceptChannelPredicate; this.httpValidator = httpValidator; + this.threadWatchdog = networkService.getThreadWatchdog(); this.pipeliningMaxEvents = SETTING_PIPELINING_MAX_EVENTS.get(settings); @@ -381,7 +384,15 @@ protected boolean isContentAlwaysEmpty(HttpResponse msg) { if (handlingSettings.compression()) { ch.pipeline().addLast("encoder_compress", new HttpContentCompressor(handlingSettings.compressionLevel())); } - ch.pipeline().addLast("pipelining", new Netty4HttpPipeliningHandler(transport.pipeliningMaxEvents, transport)); + ch.pipeline() + .addLast( + "pipelining", + new Netty4HttpPipeliningHandler( + transport.pipeliningMaxEvents, + transport, + transport.threadWatchdog.getActivityTrackerForCurrentThread() + ) + ); transport.serverAcceptedChannel(nettyHttpChannel); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java index 8924bc1924adf..e39a60e0efd58 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageInboundHandler.java @@ -15,6 +15,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasables; import org.elasticsearch.transport.InboundPipeline; @@ -30,9 +31,16 @@ public class Netty4MessageInboundHandler extends ChannelInboundHandlerAdapter { private final InboundPipeline pipeline; - public Netty4MessageInboundHandler(Netty4Transport transport, InboundPipeline inboundPipeline) { + private final ThreadWatchdog.ActivityTracker activityTracker; + + public Netty4MessageInboundHandler( + Netty4Transport transport, + InboundPipeline inboundPipeline, + ThreadWatchdog.ActivityTracker activityTracker + ) { this.transport = transport; this.pipeline = inboundPipeline; + this.activityTracker = activityTracker; } @Override @@ -44,8 +52,11 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception final ByteBuf buffer = (ByteBuf) msg; Netty4TcpChannel channel = ctx.channel().attr(Netty4Transport.CHANNEL_KEY).get(); final BytesReference wrapped = Netty4Utils.toBytesReference(buffer); + activityTracker.startActivity(); try (ReleasableBytesReference reference = new ReleasableBytesReference(wrapped, new ByteBufRefCounted(buffer))) { pipeline.handleBytes(channel, reference); + } finally { + activityTracker.stopActivity(); } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index 6d8f950ef1cf4..d85bf32da263f 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -78,6 +79,8 @@ public class Netty4Transport extends TcpTransport { private volatile SharedGroupFactory.SharedGroup sharedGroup; protected final boolean remoteClusterPortEnabled; + private final ThreadWatchdog threadWatchdog; + public Netty4Transport( Settings settings, TransportVersion version, @@ -92,6 +95,7 @@ public Netty4Transport( Netty4Utils.setAvailableProcessors(EsExecutors.allocatedProcessors(settings)); NettyAllocator.logAllocatorDescriptionIfNeeded(); this.sharedGroupFactory = sharedGroupFactory; + this.threadWatchdog = networkService.getThreadWatchdog(); // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one this.receivePredictorMin = Netty4Plugin.NETTY_RECEIVE_PREDICTOR_MIN.get(settings); @@ -125,6 +129,7 @@ protected void doStart() { bindServer(profileSettings); } } + threadWatchdog.run(settings, threadPool, lifecycle); success = true; } finally { if (success == false) { @@ -354,7 +359,14 @@ private void setupPipeline(Channel ch, boolean isRemoteClusterServerChannel) { pipeline.addLast("logging", ESLoggingHandler.INSTANCE); } pipeline.addLast("chunked_writer", new Netty4WriteThrottlingHandler(getThreadPool().getThreadContext())); - pipeline.addLast("dispatcher", new Netty4MessageInboundHandler(this, getInboundPipeline(ch, isRemoteClusterServerChannel))); + pipeline.addLast( + "dispatcher", + new Netty4MessageInboundHandler( + this, + getInboundPipeline(ch, isRemoteClusterServerChannel), + threadWatchdog.getActivityTrackerForCurrentThread() + ) + ); } protected InboundPipeline getInboundPipeline(Channel ch, boolean isRemoteClusterServerChannel) { diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java index 4dca3d17bf072..b2158384fa1cf 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java @@ -34,6 +34,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.bytes.ZeroBytesReference; +import org.elasticsearch.common.network.ThreadWatchdog; +import org.elasticsearch.common.network.ThreadWatchdogHelper; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.http.HttpResponse; import org.elasticsearch.rest.ChunkedRestResponseBodyPart; @@ -53,11 +55,14 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import java.util.stream.IntStream; import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -120,7 +125,7 @@ public void testThatPipeliningWorksWithFastSerializedRequests() throws Interrupt } private EmbeddedChannel makeEmbeddedChannelWithSimulatedWork(int numberOfRequests) { - return new EmbeddedChannel(new Netty4HttpPipeliningHandler(numberOfRequests, null) { + return new EmbeddedChannel(new Netty4HttpPipeliningHandler(numberOfRequests, null, new ThreadWatchdog.ActivityTracker()) { @Override protected void handlePipelinedRequest(ChannelHandlerContext ctx, Netty4HttpRequest pipelinedRequest) { ctx.fireChannelRead(pipelinedRequest); @@ -186,7 +191,9 @@ public void testThatPipeliningClosesConnectionWithTooManyEvents() throws Interru public void testPipeliningRequestsAreReleased() { final int numberOfRequests = 10; - final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(numberOfRequests + 1, null)); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel( + new Netty4HttpPipeliningHandler(numberOfRequests + 1, null, new ThreadWatchdog.ActivityTracker()) + ); for (int i = 0; i < numberOfRequests; i++) { embeddedChannel.writeInbound(createHttpRequest("/" + i)); @@ -473,6 +480,30 @@ public void testPipeliningRequestsAreReleasedAfterFailureOnChunked() { assertThat(messagesSeen.get(1), instanceOf(DefaultHttpContent.class)); } + public void testActivityTracking() { + final var watchdog = new ThreadWatchdog(); + final var activityTracker = watchdog.getActivityTrackerForCurrentThread(); + final var requestHandled = new AtomicBoolean(); + final var handler = new Netty4HttpPipeliningHandler(Integer.MAX_VALUE, mock(Netty4HttpServerTransport.class), activityTracker) { + @Override + protected void handlePipelinedRequest(ChannelHandlerContext ctx, Netty4HttpRequest pipelinedRequest) { + // thread is not idle while handling the request + assertThat(ThreadWatchdogHelper.getStuckThreadNames(watchdog), empty()); + assertThat(ThreadWatchdogHelper.getStuckThreadNames(watchdog), equalTo(List.of(Thread.currentThread().getName()))); + ctx.fireChannelRead(pipelinedRequest); + assertTrue(requestHandled.compareAndSet(false, true)); + } + }; + + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new ChannelDuplexHandler(), handler); + embeddedChannel.writeInbound(createHttpRequest("/test")); + assertTrue(requestHandled.get()); + + // thread is now idle + assertThat(ThreadWatchdogHelper.getStuckThreadNames(watchdog), empty()); + assertThat(ThreadWatchdogHelper.getStuckThreadNames(watchdog), empty()); + } + // assert that a message of the given number of repeated chunks is found at the given index in the list and each chunk is equal to // the given BytesReference private static void assertChunkedMessageAtIndex(List messagesSeen, int index, int chunks, BytesReference chunkBytes) { @@ -494,7 +525,11 @@ private static void assertDoneWithClosedChannel(ChannelPromise chunkedWritePromi } private Netty4HttpPipeliningHandler getTestHttpHandler() { - return new Netty4HttpPipeliningHandler(Integer.MAX_VALUE, mock(Netty4HttpServerTransport.class)) { + return new Netty4HttpPipeliningHandler( + Integer.MAX_VALUE, + mock(Netty4HttpServerTransport.class), + new ThreadWatchdog.ActivityTracker() + ) { @Override protected void handlePipelinedRequest(ChannelHandlerContext ctx, Netty4HttpRequest pipelinedRequest) { ctx.fireChannelRead(pipelinedRequest); diff --git a/muted-tests.yml b/muted-tests.yml index ee7413303de85..d82c823f664be 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -37,20 +37,31 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/esql/esql-async-query-api/line_17} issue: https://github.com/elastic/elasticsearch/issues/109260 -- class: org.elasticsearch.xpack.ccr.LocalIndexFollowingIT - method: testRemoveRemoteConnection - issue: https://github.com/elastic/elasticsearch/issues/109163 -- class: org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT - method: org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/109266 - class: "org.elasticsearch.index.engine.frozen.FrozenIndexIT" issue: "https://github.com/elastic/elasticsearch/issues/109315" method: "testTimestampFieldTypeExposedByAllIndicesServices" - class: "org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109318" -- class: "org.elasticsearch.upgrades.AggregationsIT" - issue: "https://github.com/elastic/elasticsearch/issues/109322" - method: "testTerms" + method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling\ + \ (too complex pattern)}" +- class: "org.elasticsearch.xpack.ml.integration.ClassificationHousePricingIT" + issue: "https://github.com/elastic/elasticsearch/issues/101598" + method: "testFeatureImportanceValues" +- class: "org.elasticsearch.client.RestClientSingleHostIntegTests" + issue: "https://github.com/elastic/elasticsearch/issues/102717" + method: "testRequestResetAndAbort" +- class: "org.elasticsearch.xpack.deprecation.DeprecationHttpIT" + issue: "https://github.com/elastic/elasticsearch/issues/108628" + method: "testDeprecatedSettingsReturnWarnings" +- class: "org.elasticsearch.xpack.inference.InferenceCrudIT" + issue: "https://github.com/elastic/elasticsearch/issues/109391" + method: "testDeleteEndpointWhileReferencedByPipeline" +- class: org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAppendTests + method: testEvaluateBlockWithoutNulls {TestCase=, } + issue: https://github.com/elastic/elasticsearch/issues/109409 +- class: "org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT" + issue: "https://github.com/elastic/elasticsearch/issues/109478" + method: "test {yaml=reference/esql/processing-commands/lookup/line_31}" # Examples: # diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index fcbbad6dd644c..515ab9d5f1822 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip +distributionSha256Sum=f8b4f4772d302c8ff580bc40d0f56e715de69b163546944f787c87abf209c961 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.8-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/examples/settings.gradle b/plugins/examples/settings.gradle index af2596fdbafe3..09abbfa6b5863 100644 --- a/plugins/examples/settings.gradle +++ b/plugins/examples/settings.gradle @@ -7,7 +7,7 @@ */ plugins { - id "com.gradle.enterprise" version "3.16.2" + id "com.gradle.develocity" version "3.17.4" } // Include all subdirectories as example projects diff --git a/qa/ccs-common-rest/build.gradle b/qa/ccs-common-rest/build.gradle index 41dba06649ea1..e5e8c5a489d5b 100644 --- a/qa/ccs-common-rest/build.gradle +++ b/qa/ccs-common-rest/build.gradle @@ -10,7 +10,7 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' restResources { restApi { - include '_common', 'bulk', 'count', 'cluster', 'field_caps', 'knn_search', 'index', 'indices', 'msearch', + include '_common', 'bulk', 'count', 'cluster', 'field_caps', 'get', 'knn_search', 'index', 'indices', 'msearch', 'search', 'async_search', 'graph', '*_point_in_time', 'info', 'scroll', 'clear_scroll', 'search_mvt', 'eql', 'sql' } restTests { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_features.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_features.json new file mode 100644 index 0000000000000..b488e19262c2e --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_features.json @@ -0,0 +1,38 @@ +{ + "connector.update_features": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-features-api.html", + "description": "Updates the connector features in the connector document." + }, + "stability": "experimental", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_features", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "An object containing the connector's features definition.", + "required": true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json index 262899d5559a1..745136848786c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete.json @@ -1,16 +1,18 @@ { - "inference.delete":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-inference-api.html", - "description":"Delete an inference endpoint" + "inference.delete": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-inference-api.html", + "description": "Delete an inference endpoint" }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] + "stability": "experimental", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ] }, - "url":{ - "paths":[ + "url": { + "paths": [ { "path": "/_inference/{inference_id}", "methods": [ @@ -24,22 +26,34 @@ } }, { - "path":"/_inference/{task_type}/{inference_id}", - "methods":[ + "path": "/_inference/{task_type}/{inference_id}", + "methods": [ "DELETE" ], - "parts":{ - "task_type":{ - "type":"string", - "description":"The task type" + "parts": { + "task_type": { + "type": "string", + "description": "The task type" }, - "inference_id":{ - "type":"string", - "description":"The inference Id" + "inference_id": { + "type": "string", + "description": "The inference Id" } } } ] + }, + "params": { + "dry_run": { + "type": "boolean", + "description": "If true the endpoint will not be deleted and a list of ingest processors which reference this endpoint will be returned.", + "required": false + }, + "force": { + "type": "boolean", + "description": "If true the endpoint will be forcefully stopped (regardless of whether or not it is referenced by any ingest processors or semantic text fields).", + "required": false + } } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json index 09cc8e322f5cb..6339d8a6dee9c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_settings.json @@ -18,6 +18,11 @@ } ] }, - "params":{} + "params":{ + "master_timeout":{ + "type":"time", + "description":"Timeout for connection to master" + } + } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json index fb76ca28f8210..998548408c5db 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.update_settings.json @@ -18,7 +18,16 @@ } ] }, - "params":{}, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Timeout for connection to master" + }, + "timeout":{ + "type":"time", + "description":"Timeout for acknowledgements from all nodes" + } + }, "body":{ "description": "An object with the new settings for each index, if any", "required": true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json new file mode 100644 index 0000000000000..ca3fde65f6363 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/transform.get_node_stats.json @@ -0,0 +1,23 @@ +{ + "transform.get_node_stats":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-node-stats.html", + "description":"Retrieves transform usage information for transform nodes." + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_transform/_node_stats", + "methods":[ + "GET" + ] + } + ] + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml index fc8df138f94a2..b2b9e1b90cb3b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml @@ -1108,3 +1108,35 @@ flattened field with ignore_above: key7: "key7" - is_false: fields + +--- +completion: + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + completion: + type: completion + + - do: + index: + index: test + id: 1 + refresh: true + body: + completion: "the quick brown fox" + + - do: + get: + index: test + id: 1 + + - match: { _source.completion: "the quick brown fox" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml index 95075da20fe5e..128903f4faac8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml @@ -77,6 +77,10 @@ create logs index: - is_true: test - match: { test.settings.index.mode: "logs" } + - do: + indices.get_mapping: + index: test + - match: { test.mappings._source.mode: synthetic } --- using default timestamp field mapping: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml index 44f17e2269027..be1e619d046ac 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml @@ -161,3 +161,43 @@ setup: - match: { hits.hits.0._id: "3" } - match: { hits.hits.1._id: "2" } - match: { hits.hits.2._id: "1" } +--- +"Dynamic dimensions for hex-encoded string": + - requires: + cluster_features: "gte_v8.15.0" + reason: 'hex encoding for byte vectors fixed in 8.15' + + - do: + indices.create: + index: knn_hex_vector_index_dyn_dims + body: + settings: + number_of_shards: 1 + mappings: + properties: + my_vector_byte: + type: dense_vector + index : false + element_type: byte + my_vector_byte_indexed: + type: dense_vector + index: true + element_type: byte + similarity : l2_norm + + # [-128, 127, 10] - is encoded as '807f0a' + - do: + index: + index: knn_hex_vector_index_dyn_dims + id: "1" + body: + my_vector_byte: "807f0a" + my_vector_byte_indexed: "807f0a" + + # assert the index is created with 3 dimensions + - do: + indices.get_mapping: + index: knn_hex_vector_index_dyn_dims + + - match: { knn_hex_vector_index_dyn_dims.mappings.properties.my_vector_byte.dims: 3 } + - match: { knn_hex_vector_index_dyn_dims.mappings.properties.my_vector_byte_indexed.dims: 3 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml index fa89a43561764..00cea2e330d81 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/90_sparse_vector.yml @@ -209,3 +209,92 @@ query: exists: field: ml.tokens + +--- +"sparse_vector synthetic source": + + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + ml.tokens: + type: sparse_vector + + - match: { acknowledged: true } + + - do: + index: + index: test + id: "1" + body: + ml: + tokens: + running: 2.4097164 + good: 2.170997 + run: 2.052153 + race: 1.4575411 + for: 1.1908325 + + - match: { result: "created" } + + - do: + index: + index: test + id: "2" + body: + ml: + tokens: [] + + - match: { result: "created" } + + - do: + index: + index: test + id: "3" + body: + ml: + tokens: {} + + - match: { result: "created" } + + - do: + indices.refresh: { } + + - do: + get: + index: test + id: "1" + + - match: + _source: + ml: + tokens: + running: 2.4097164 + good: 2.170997 + run: 2.052153 + race: 1.4575411 + for: 1.1908325 + + - do: + get: + index: test + id: "2" + + - match: + _source.ml.tokens: [] + + - do: + get: + index: test + id: "3" + + - match: + _source.ml.tokens: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml index bea52c22e151f..f14614a820176 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml @@ -161,3 +161,31 @@ setup: ]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[\\t])*))*\\>(?:(?:\\r\\n)?[ \\t])*)(?:,\\s*( | \".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[\\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:( | \\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[\\t]))*\"(?:(?:\\r\\n)?[\\t/" + +--- +"Prefix length limit": + + - requires: + cluster_features: "gte_v8.15.0" + reason: "Limit for value in prefix query was introduced in 8.15" + + - do: + catch: /The length of prefix \[1110\] used in the Prefix Query request has exceeded the allowed maximum of \[1000\]\. This maximum can be set by changing the \[index.max_regex_length\] index level setting\./ + search: + rest_total_hits_as_int: true + index: test_1 + body: + query: + prefix: + foo: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" diff --git a/server/build.gradle b/server/build.gradle index 03713bc3d2837..5831930421c60 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -254,12 +254,12 @@ tasks.named("thirdPartyAudit").configure { tasks.named("dependencyLicenses").configure { mapping from: /lucene-.*/, to: 'lucene' mapping from: /log4j-.*/, to: 'log4j' - dependencies = project.configurations.runtimeClasspath.fileCollection { - it.group.startsWith('org.elasticsearch') == false || - // keep the following org.elasticsearch jars in - (it.name == 'jna' || - it.name == 'securesm') - } + + configureDependencies( + project.configurations.runtimeClasspath, project.configurations.resolveableCompileOnly, identifier -> { + return identifier instanceof ModuleComponentIdentifier + (identifier.moduleIdentifier.name == 'jna' || identifier.moduleIdentifier.name == 'securesm') + }) } tasks.named("licenseHeaders").configure { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java index 26b33acfcbe98..897f10b031dcb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java @@ -100,10 +100,10 @@ public void testUnassignedPrimaryWithExistingIndex() throws Exception { // verify unassigned info assertNotNull(unassignedInfo); - assertEquals(Reason.NODE_LEFT, unassignedInfo.getReason()); + assertEquals(Reason.NODE_LEFT, unassignedInfo.reason()); assertTrue( - unassignedInfo.getLastAllocationStatus() == AllocationStatus.FETCHING_SHARD_DATA - || unassignedInfo.getLastAllocationStatus() == AllocationStatus.NO_VALID_SHARD_COPY + unassignedInfo.lastAllocationStatus() == AllocationStatus.FETCHING_SHARD_DATA + || unassignedInfo.lastAllocationStatus() == AllocationStatus.NO_VALID_SHARD_COPY ); // verify cluster info @@ -190,8 +190,8 @@ public void testUnassignedReplicaDelayedAllocation() throws Exception { // verify unassigned info assertNotNull(unassignedInfo); - assertEquals(Reason.NODE_LEFT, unassignedInfo.getReason()); - assertEquals(AllocationStatus.NO_ATTEMPT, unassignedInfo.getLastAllocationStatus()); + assertEquals(Reason.NODE_LEFT, unassignedInfo.reason()); + assertEquals(AllocationStatus.NO_ATTEMPT, unassignedInfo.lastAllocationStatus()); // verify cluster info verifyClusterInfo(clusterInfo, includeDiskInfo, 2); @@ -320,8 +320,8 @@ public void testUnassignedReplicaWithPriorCopy() throws Exception { // verify unassigned info assertNotNull(unassignedInfo); - assertEquals(Reason.NODE_LEFT, unassignedInfo.getReason()); - assertEquals(AllocationStatus.NO_ATTEMPT, unassignedInfo.getLastAllocationStatus()); + assertEquals(Reason.NODE_LEFT, unassignedInfo.reason()); + assertEquals(AllocationStatus.NO_ATTEMPT, unassignedInfo.lastAllocationStatus()); // verify cluster info verifyClusterInfo(clusterInfo, includeDiskInfo, 3); @@ -432,8 +432,8 @@ public void testAllocationFilteringOnIndexCreation() throws Exception { // verify unassigned info assertNotNull(unassignedInfo); - assertEquals(Reason.INDEX_CREATED, unassignedInfo.getReason()); - assertEquals(AllocationStatus.DECIDERS_NO, unassignedInfo.getLastAllocationStatus()); + assertEquals(Reason.INDEX_CREATED, unassignedInfo.reason()); + assertEquals(AllocationStatus.DECIDERS_NO, unassignedInfo.lastAllocationStatus()); // verify cluster info verifyClusterInfo(clusterInfo, includeDiskInfo, 2); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index aa4fee3a3f94d..61e5c1bfcc811 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -386,9 +386,9 @@ public void testCreateShrinkIndexFails() throws Exception { assertTrue(routingTables.index("target").shard(0).shard(0).unassigned()); assertEquals( UnassignedInfo.Reason.ALLOCATION_FAILED, - routingTables.index("target").shard(0).shard(0).unassignedInfo().getReason() + routingTables.index("target").shard(0).shard(0).unassignedInfo().reason() ); - assertEquals(1, routingTables.index("target").shard(0).shard(0).unassignedInfo().getNumFailedAllocations()); + assertEquals(1, routingTables.index("target").shard(0).shard(0).unassignedInfo().failedAllocations()); }); // now relocate them all to the right node updateIndexSettings(Settings.builder().put("index.routing.allocation.require._name", mergeNode), "source"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveIndexSettingsCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveIndexSettingsCommandIT.java new file mode 100644 index 0000000000000..a5e445270ccc4 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveIndexSettingsCommandIT.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.cluster.coordination; + +import joptsimple.OptionSet; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.cli.ProcessInfo; +import org.elasticsearch.cli.UserException; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) +public class RemoveIndexSettingsCommandIT extends ESIntegTestCase { + + static final Setting FOO = Setting.intSetting("index.foo", 1, Setting.Property.IndexScope, Setting.Property.Dynamic); + static final Setting BAR = Setting.intSetting("index.bar", 2, Setting.Property.IndexScope, Setting.Property.Final); + + public static class ExtraSettingsPlugin extends Plugin { + @Override + public List> getSettings() { + return List.of(FOO, BAR); + } + } + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), ExtraSettingsPlugin.class); + } + + public void testRemoveSettingsAbortedByUser() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + var node = internalCluster().startNode(); + createIndex("test-index", Settings.builder().put(FOO.getKey(), 101).put(BAR.getKey(), 102).build()); + ensureYellow("test-index"); + Settings dataPathSettings = internalCluster().dataPathSettings(node); + ensureStableCluster(1); + internalCluster().stopRandomDataNode(); + + Settings nodeSettings = Settings.builder().put(internalCluster().getDefaultSettings()).put(dataPathSettings).build(); + ElasticsearchException error = expectThrows( + ElasticsearchException.class, + () -> removeIndexSettings(TestEnvironment.newEnvironment(nodeSettings), true, "index.foo") + ); + assertThat(error.getMessage(), equalTo(ElasticsearchNodeCommand.ABORTED_BY_USER_MSG)); + internalCluster().startNode(nodeSettings); + } + + public void testRemoveSettingsSuccessful() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + var node = internalCluster().startNode(); + Settings dataPathSettings = internalCluster().dataPathSettings(node); + + int numIndices = randomIntBetween(1, 10); + int[] barValues = new int[numIndices]; + for (int i = 0; i < numIndices; i++) { + String index = "test-index-" + i; + barValues[i] = between(1, 1000); + createIndex(index, Settings.builder().put(FOO.getKey(), between(1, 1000)).put(BAR.getKey(), barValues[i]).build()); + } + int moreIndices = randomIntBetween(1, 10); + for (int i = 0; i < moreIndices; i++) { + createIndex("more-index-" + i, Settings.EMPTY); + } + internalCluster().stopNode(node); + + Environment environment = TestEnvironment.newEnvironment( + Settings.builder().put(internalCluster().getDefaultSettings()).put(dataPathSettings).build() + ); + + MockTerminal terminal = removeIndexSettings(environment, false, "index.foo"); + assertThat(terminal.getOutput(), containsString(RemoveIndexSettingsCommand.SETTINGS_REMOVED_MSG)); + for (int i = 0; i < numIndices; i++) { + assertThat(terminal.getOutput(), containsString("Index setting [index.foo] will be removed from index [[test-index-" + i)); + } + for (int i = 0; i < moreIndices; i++) { + assertThat(terminal.getOutput(), not(containsString("Index setting [index.foo] will be removed from index [[more-index-" + i))); + } + Settings nodeSettings = Settings.builder().put(internalCluster().getDefaultSettings()).put(dataPathSettings).build(); + internalCluster().startNode(nodeSettings); + + Map getIndexSettings = client().admin().indices().prepareGetSettings("test-index-*").get().getIndexToSettings(); + for (int i = 0; i < numIndices; i++) { + String index = "test-index-" + i; + Settings indexSettings = getIndexSettings.get(index); + assertFalse(indexSettings.hasValue("index.foo")); + assertThat(indexSettings.get("index.bar"), equalTo(Integer.toString(barValues[i]))); + } + getIndexSettings = client().admin().indices().prepareGetSettings("more-index-*").get().getIndexToSettings(); + for (int i = 0; i < moreIndices; i++) { + assertNotNull(getIndexSettings.get("more-index-" + i)); + } + } + + public void testSettingDoesNotMatch() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + var node = internalCluster().startNode(); + createIndex("test-index", Settings.builder().put(FOO.getKey(), 101).put(BAR.getKey(), 102).build()); + ensureYellow("test-index"); + Settings dataPathSettings = internalCluster().dataPathSettings(node); + ensureStableCluster(1); + internalCluster().stopRandomDataNode(); + + Settings nodeSettings = Settings.builder().put(internalCluster().getDefaultSettings()).put(dataPathSettings).build(); + UserException error = expectThrows( + UserException.class, + () -> removeIndexSettings(TestEnvironment.newEnvironment(nodeSettings), true, "index.not_foo") + ); + assertThat(error.getMessage(), containsString("No index setting matching [index.not_foo] were found on this node")); + internalCluster().startNode(nodeSettings); + } + + private MockTerminal executeCommand(ElasticsearchNodeCommand command, Environment environment, boolean abort, String... args) + throws Exception { + final MockTerminal terminal = MockTerminal.create(); + final OptionSet options = command.getParser().parse(args); + final ProcessInfo processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir()); + final String input; + + if (abort) { + input = randomValueOtherThanMany(c -> c.equalsIgnoreCase("y"), () -> randomAlphaOfLength(1)); + } else { + input = randomBoolean() ? "y" : "Y"; + } + + terminal.addTextInput(input); + + try { + command.execute(terminal, options, environment, processInfo); + } finally { + assertThat(terminal.getOutput(), containsString(ElasticsearchNodeCommand.STOP_WARNING_MSG)); + } + + return terminal; + } + + private MockTerminal removeIndexSettings(Environment environment, boolean abort, String... args) throws Exception { + final MockTerminal terminal = executeCommand(new RemoveIndexSettingsCommand(), environment, abort, args); + assertThat(terminal.getOutput(), containsString(RemoveIndexSettingsCommand.CONFIRMATION_MSG)); + assertThat(terminal.getOutput(), containsString(RemoveIndexSettingsCommand.SETTINGS_REMOVED_MSG)); + return terminal; + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/AllocationIdIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/AllocationIdIT.java index 784a6e8f419c8..a25de555ce267 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/AllocationIdIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/AllocationIdIT.java @@ -113,7 +113,7 @@ public void testFailedRecoveryOnAllocateStalePrimaryRequiresAnotherAllocateStale final ClusterState state = clusterAdmin().prepareState().get().getState(); final ShardRouting shardRouting = state.routingTable().index(indexName).shard(shardId.id()).primaryShard(); assertThat(shardRouting.state(), equalTo(ShardRoutingState.UNASSIGNED)); - assertThat(shardRouting.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); + assertThat(shardRouting.unassignedInfo().reason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); }); internalCluster().stopNode(node1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index e7a7a6f2ba727..72594fef8c6ee 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -202,7 +202,7 @@ public void testFailedAllocationOfStalePrimaryToDataNodeWithNoData() throws Exce .shard(0) .primaryShard() .unassignedInfo() - .getReason(), + .reason(), equalTo(UnassignedInfo.Reason.NODE_LEFT) ); @@ -227,7 +227,7 @@ public void testFailedAllocationOfStalePrimaryToDataNodeWithNoData() throws Exce .shard(0) .primaryShard() .unassignedInfo() - .getReason(), + .reason(), equalTo(UnassignedInfo.Reason.NODE_LEFT) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java index 006c9e2394f3c..76311387115d2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java @@ -114,7 +114,7 @@ public Collection createAllocationDeciders(Settings settings, @Override public Decision canForceAllocatePrimary(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { // once a primary is cancelled it _stays_ cancelled - if (shardRouting.unassignedInfo().getReason() == UnassignedInfo.Reason.REROUTE_CANCELLED) { + if (shardRouting.unassignedInfo().reason() == UnassignedInfo.Reason.REROUTE_CANCELLED) { return Decision.NO; } return super.canForceAllocatePrimary(shardRouting, node, allocation); @@ -450,7 +450,7 @@ public AllocationCommand getCancelPrimaryCommand() { shardRouting.role().isPromotableToPrimary() ? UnassignedInfo.Reason.REROUTE_CANCELLED : UnassignedInfo.Reason.UNPROMOTABLE_REPLICA, - shardRouting.unassignedInfo().getReason() + shardRouting.unassignedInfo().reason() ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java b/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java new file mode 100644 index 0000000000000..4bd56e2276d18 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/common/network/ThreadWatchdogIT.java @@ -0,0 +1,163 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.network; + +import org.apache.logging.log4j.core.LogEvent; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.RunOnce; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.MockLog; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public class ThreadWatchdogIT extends ESIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.getKey(), "100ms") + .put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_QUIET_TIME.getKey(), "0") + .build(); + } + + @SuppressWarnings("unchecked") + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopyNoNullElements( + super.nodePlugins(), + SlowRequestProcessingPlugin.class, + MockTransportService.TestPlugin.class + ); + } + + @Override + protected boolean addMockHttpTransport() { + return false; + } + + public static class SlowRequestProcessingPlugin extends Plugin implements ActionPlugin { + + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster, + Predicate clusterSupportsFeature + ) { + return List.of(new RestHandler() { + @Override + public List routes() { + return List.of(Route.builder(RestRequest.Method.POST, "_slow").build()); + } + + @Override + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { + blockAndWaitForWatchdogLogs(); + new RestToXContentListener<>(channel).onResponse((b, p) -> b.startObject().endObject()); + } + }); + } + } + + private static void blockAndWaitForWatchdogLogs() { + final var threadName = Thread.currentThread().getName(); + final var logsSeenLatch = new CountDownLatch(2); + final var warningSeen = new RunOnce(logsSeenLatch::countDown); + final var threadDumpSeen = new RunOnce(logsSeenLatch::countDown); + MockLog.assertThatLogger(() -> safeAwait(logsSeenLatch), ThreadWatchdog.class, new MockLog.LoggingExpectation() { + @Override + public void match(LogEvent event) { + final var formattedMessage = event.getMessage().getFormattedMessage(); + if (formattedMessage.contains("the following threads are active but did not make progress in the preceding [100ms]:") + && formattedMessage.contains(threadName)) { + warningSeen.run(); + } + if (formattedMessage.contains("hot threads dump due to active threads not making progress")) { + threadDumpSeen.run(); + } + } + + @Override + public void assertMatched() {} + }); + } + + public void testThreadWatchdogHttpLogging() throws IOException { + ESRestTestCase.assertOK(getRestClient().performRequest(new Request("POST", "_slow"))); + } + + public void testThreadWatchdogTransportLogging() { + internalCluster().ensureAtLeastNumDataNodes(2); + final var transportServiceIterator = internalCluster().getInstances(TransportService.class).iterator(); + final var sourceTransportService = transportServiceIterator.next(); + final var targetTransportService = transportServiceIterator.next(); + + targetTransportService.registerRequestHandler( + "internal:slow", + EsExecutors.DIRECT_EXECUTOR_SERVICE, + TransportRequest.Empty::new, + (request, channel, task) -> { + blockAndWaitForWatchdogLogs(); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } + ); + + safeAwait( + SubscribableListener.newForked( + l -> sourceTransportService.sendRequest( + targetTransportService.getLocalNode(), + "internal:slow", + new TransportRequest.Empty(), + new ActionListenerResponseHandler( + l, + in -> TransportResponse.Empty.INSTANCE, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ) + ) + ) + ); + } + +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index d1827bf49410f..e05bda69d2c9c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -403,9 +403,9 @@ public void testRecoverBrokenIndexMetadata() throws Exception { assertTrue(shardRoutingTable.primaryShard().unassigned()); assertEquals( UnassignedInfo.AllocationStatus.DECIDERS_NO, - shardRoutingTable.primaryShard().unassignedInfo().getLastAllocationStatus() + shardRoutingTable.primaryShard().unassignedInfo().lastAllocationStatus() ); - assertThat(shardRoutingTable.primaryShard().unassignedInfo().getNumFailedAllocations(), greaterThan(0)); + assertThat(shardRoutingTable.primaryShard().unassignedInfo().failedAllocations(), greaterThan(0)); } }, 60, TimeUnit.SECONDS); indicesAdmin().prepareClose("test").get(); @@ -472,9 +472,9 @@ public void testRecoverMissingAnalyzer() throws Exception { assertTrue(shardRoutingTable.primaryShard().unassigned()); assertEquals( UnassignedInfo.AllocationStatus.DECIDERS_NO, - shardRoutingTable.primaryShard().unassignedInfo().getLastAllocationStatus() + shardRoutingTable.primaryShard().unassignedInfo().lastAllocationStatus() ); - assertThat(shardRoutingTable.primaryShard().unassignedInfo().getNumFailedAllocations(), greaterThan(0)); + assertThat(shardRoutingTable.primaryShard().unassignedInfo().failedAllocations(), greaterThan(0)); } }, 60, TimeUnit.SECONDS); indicesAdmin().prepareClose("test").get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java index f43aaf0bacad4..ef4616fdd0b40 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -311,8 +311,8 @@ public Settings onNodeStopped(String nodeName) throws Exception { // all shards should be failed due to a corrupted translog assertBusy(() -> { final UnassignedInfo unassignedInfo = getClusterAllocationExplanation(client(), indexName, 0, true).getUnassignedInfo(); - assertThat(unassignedInfo.getReason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); - assertThat(ExceptionsHelper.unwrap(unassignedInfo.getFailure(), TranslogCorruptedException.class), not(nullValue())); + assertThat(unassignedInfo.reason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); + assertThat(ExceptionsHelper.unwrap(unassignedInfo.failure(), TranslogCorruptedException.class), not(nullValue())); }); // have to shut down primary node - otherwise node lock is present diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java index a9d19473164bf..7e3df8d8e1cbc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -465,7 +465,7 @@ public void onTimeout(TimeValue timeout) { final var replicaShards = indexRoutingTable.shard(shardId).replicaShards(); if (replicaShards.isEmpty() || replicaShards.stream() - .anyMatch(sr -> sr.unassigned() == false || sr.unassignedInfo().getNumFailedAllocations() < maxRetries)) { + .anyMatch(sr -> sr.unassigned() == false || sr.unassignedInfo().failedAllocations() < maxRetries)) { return false; } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index ac5a10d246cfc..0c0ece4bf5227 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -83,8 +83,8 @@ public void onAllNodesStopped() throws Exception { final var description = Strings.toString(allocationExplainResponse); final var unassignedInfo = allocationExplainResponse.getUnassignedInfo(); assertThat(description, unassignedInfo, not(nullValue())); - assertThat(description, unassignedInfo.getReason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); - var failure = unassignedInfo.getFailure(); + assertThat(description, unassignedInfo.reason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); + var failure = unassignedInfo.failure(); assertNotNull(failure); final Throwable cause = ExceptionsHelper.unwrap(failure, TranslogCorruptedException.class); if (cause != null) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ShardLockFailureIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ShardLockFailureIT.java index 59e7a67687921..874ba7b42690c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ShardLockFailureIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ShardLockFailureIT.java @@ -61,7 +61,7 @@ public void testShardLockFailure() throws Exception { .routingTable() .shardRoutingTable(shardId) .allShards() - .noneMatch(sr -> sr.unassigned() && sr.unassignedInfo().getNumFailedAllocations() > 0) + .noneMatch(sr -> sr.unassigned() && sr.unassignedInfo().failedAllocations() > 0) ); } catch (IndexNotFoundException e) { // ok diff --git a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java index 69f30720533d4..bf6c59a4c0a9b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.plugins.internal; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngine; @@ -103,6 +104,7 @@ public IndexResult index(Index index) throws IOException { DocumentSizeReporter documentParsingReporter = documentParsingProvider.newDocumentSizeReporter( shardId.getIndexName(), + IndexMode.STANDARD, DocumentSizeAccumulator.EMPTY_INSTANCE ); documentParsingReporter.onIndexingCompleted(index.parsedDoc()); @@ -132,7 +134,11 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter newDocumentSizeReporter(String indexName, DocumentSizeAccumulator documentSizeAccumulator) { + public DocumentSizeReporter newDocumentSizeReporter( + String indexName, + IndexMode indexMode, + DocumentSizeAccumulator documentSizeAccumulator + ) { return new TestDocumentSizeReporter(indexName); } }; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index 09633a0ea1b3f..ae3347dafd55d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -25,8 +25,8 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -187,6 +187,14 @@ protected Collection> nodePlugins() { return List.of(TestMapperPlugin.class, ExceptionOnRewriteQueryPlugin.class, BlockingOnRewriteQueryPlugin.class); } + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) + .build(); + } + @Override protected boolean addMockHttpTransport() { return false; // enable http @@ -529,23 +537,31 @@ private void moveOrCloseShardsOnNodes(String nodeName) throws Exception { closeShardNoCheck(indexShard, randomBoolean()); } else if (randomBoolean()) { final ShardId shardId = indexShard.shardId(); - final String[] nodeNames = internalCluster().getNodeNames(); - final String newNodeName = randomValueOtherThanMany(n -> nodeName.equals(n) == false, () -> randomFrom(nodeNames)); - DiscoveryNode fromNode = null; - DiscoveryNode toNode = null; - for (DiscoveryNode node : clusterService().state().nodes()) { - if (node.getName().equals(nodeName)) { - fromNode = node; - } - if (node.getName().equals(newNodeName)) { - toNode = node; + + final var targetNodes = new ArrayList(); + for (final var targetIndicesService : internalCluster().getInstances(IndicesService.class)) { + final var targetNode = targetIndicesService.clusterService().localNode(); + if (targetNode.canContainData() && targetIndicesService.getShardOrNull(shardId) == null) { + targetNodes.add(targetNode.getId()); } } - assertNotNull(fromNode); - assertNotNull(toNode); - clusterAdmin().prepareReroute() - .add(new MoveAllocationCommand(shardId.getIndexName(), shardId.id(), fromNode.getId(), toNode.getId())) - .get(); + + if (targetNodes.isEmpty()) { + continue; + } + + safeGet( + clusterAdmin().prepareReroute() + .add( + new MoveAllocationCommand( + shardId.getIndexName(), + shardId.id(), + indicesService.clusterService().localNode().getId(), + randomFrom(targetNodes) + ) + ) + .execute() + ); } } } @@ -570,7 +586,7 @@ public void testRelocation() throws Exception { if (randomBoolean()) { request.indexFilter(QueryBuilders.rangeQuery("timestamp").gte("2020-01-01")); } - final FieldCapabilitiesResponse response = client().execute(TransportFieldCapabilitiesAction.TYPE, request).actionGet(); + final FieldCapabilitiesResponse response = safeGet(client().execute(TransportFieldCapabilitiesAction.TYPE, request)); assertThat(response.getIndices(), arrayContainingInAnyOrder("log-index-1", "log-index-2")); assertThat(response.getField("field1"), aMapWithSize(2)); assertThat(response.getField("field1"), hasKey("long")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/rank/FieldBasedRerankerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/rank/FieldBasedRerankerIT.java new file mode 100644 index 0000000000000..dd002d3f44493 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/rank/FieldBasedRerankerIT.java @@ -0,0 +1,817 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.rank; + +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TopDocs; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchPhaseController; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; +import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.search.rank.feature.RankFeatureShardResult; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasRank; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.hamcrest.Matchers.equalTo; + +@ESIntegTestCase.ClusterScope(minNumDataNodes = 3) +public class FieldBasedRerankerIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(FieldBasedRerankerPlugin.class); + } + + public void testFieldBasedReranker() throws Exception { + final String indexName = "test_index"; + final String rankFeatureField = "rankFeatureField"; + final String searchField = "searchField"; + final int rankWindowSize = 10; + + createIndex(indexName); + indexRandom( + true, + prepareIndex(indexName).setId("1").setSource(rankFeatureField, 0.1, searchField, "A"), + prepareIndex(indexName).setId("2").setSource(rankFeatureField, 0.2, searchField, "B"), + prepareIndex(indexName).setId("3").setSource(rankFeatureField, 0.3, searchField, "C"), + prepareIndex(indexName).setId("4").setSource(rankFeatureField, 0.4, searchField, "D"), + prepareIndex(indexName).setId("5").setSource(rankFeatureField, 0.5, searchField, "E") + ); + + assertNoFailuresAndResponse( + prepareSearch().setQuery( + boolQuery().should(constantScoreQuery(matchQuery(searchField, "A")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "B")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "C")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "D")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "E")).boost(randomFloat())) + ) + .setRankBuilder(new FieldBasedRankBuilder(rankWindowSize, rankFeatureField)) + .addFetchField(searchField) + .setTrackTotalHits(true) + .setAllowPartialSearchResults(true) + .setSize(10), + response -> { + assertHitCount(response, 5L); + int rank = 1; + for (SearchHit searchHit : response.getHits().getHits()) { + assertThat(searchHit, hasId(String.valueOf(5 - (rank - 1)))); + assertEquals(searchHit.getScore(), (0.5f - ((rank - 1) * 0.1f)), 1e-5f); + assertThat(searchHit, hasRank(rank)); + assertNotNull(searchHit.getFields().get(searchField)); + rank++; + } + } + ); + assertNoOpenContext(indexName); + } + + public void testFieldBasedRerankerPagination() throws Exception { + final String indexName = "test_index"; + final String rankFeatureField = "rankFeatureField"; + final String searchField = "searchField"; + final int rankWindowSize = 10; + + createIndex(indexName); + indexRandom( + true, + prepareIndex(indexName).setId("1").setSource(rankFeatureField, 0.1, searchField, "A"), + prepareIndex(indexName).setId("2").setSource(rankFeatureField, 0.2, searchField, "B"), + prepareIndex(indexName).setId("3").setSource(rankFeatureField, 0.3, searchField, "C"), + prepareIndex(indexName).setId("4").setSource(rankFeatureField, 0.4, searchField, "D"), + prepareIndex(indexName).setId("5").setSource(rankFeatureField, 0.5, searchField, "E") + ); + + assertResponse( + prepareSearch().setQuery( + boolQuery().should(constantScoreQuery(matchQuery(searchField, "A")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "B")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "C")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "D")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "E")).boost(randomFloat())) + ) + .setRankBuilder(new FieldBasedRankBuilder(rankWindowSize, rankFeatureField)) + .addFetchField(searchField) + .setTrackTotalHits(true) + .setAllowPartialSearchResults(true) + .setSize(2) + .setFrom(2), + response -> { + assertHitCount(response, 5L); + int rank = 3; + for (SearchHit searchHit : response.getHits().getHits()) { + assertThat(searchHit, hasId(String.valueOf(5 - (rank - 1)))); + assertEquals(searchHit.getScore(), (0.5f - ((rank - 1) * 0.1f)), 1e-5f); + assertThat(searchHit, hasRank(rank)); + assertNotNull(searchHit.getFields().get(searchField)); + rank++; + } + } + ); + assertNoOpenContext(indexName); + } + + public void testFieldBasedRerankerPaginationOutsideOfBounds() throws Exception { + final String indexName = "test_index"; + final String rankFeatureField = "rankFeatureField"; + final String searchField = "searchField"; + final int rankWindowSize = 10; + + createIndex(indexName); + indexRandom( + true, + prepareIndex(indexName).setId("1").setSource(rankFeatureField, 0.1, searchField, "A"), + prepareIndex(indexName).setId("2").setSource(rankFeatureField, 0.2, searchField, "B"), + prepareIndex(indexName).setId("3").setSource(rankFeatureField, 0.3, searchField, "C"), + prepareIndex(indexName).setId("4").setSource(rankFeatureField, 0.4, searchField, "D"), + prepareIndex(indexName).setId("5").setSource(rankFeatureField, 0.5, searchField, "E") + ); + + assertNoFailuresAndResponse( + prepareSearch().setQuery( + boolQuery().should(constantScoreQuery(matchQuery(searchField, "A")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "B")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "C")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "D")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "E")).boost(randomFloat())) + ) + .setRankBuilder(new FieldBasedRankBuilder(rankWindowSize, rankFeatureField)) + .addFetchField(searchField) + .setTrackTotalHits(true) + .setAllowPartialSearchResults(true) + .setSize(2) + .setFrom(10), + response -> { + assertHitCount(response, 5L); + assertEquals(0, response.getHits().getHits().length); + } + ); + assertNoOpenContext(indexName); + } + + public void testNotAllShardsArePresentInFetchPhase() throws Exception { + final String indexName = "test_index"; + final String rankFeatureField = "rankFeatureField"; + final String searchField = "searchField"; + final int rankWindowSize = 10; + + createIndex(indexName, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 10).build()); + indexRandom( + true, + prepareIndex(indexName).setId("1").setSource(rankFeatureField, 0.1, searchField, "A").setRouting("A"), + prepareIndex(indexName).setId("2").setSource(rankFeatureField, 0.2, searchField, "B").setRouting("B"), + prepareIndex(indexName).setId("3").setSource(rankFeatureField, 0.3, searchField, "C").setRouting("C"), + prepareIndex(indexName).setId("4").setSource(rankFeatureField, 0.4, searchField, "D").setRouting("C"), + prepareIndex(indexName).setId("5").setSource(rankFeatureField, 0.5, searchField, "E").setRouting("C") + ); + + assertNoFailuresAndResponse( + prepareSearch().setQuery( + boolQuery().should(constantScoreQuery(matchQuery(searchField, "A")).boost(0.1f)) + .should(constantScoreQuery(matchQuery(searchField, "C")).boost(0.3f)) + .should(constantScoreQuery(matchQuery(searchField, "D")).boost(0.3f)) + .should(constantScoreQuery(matchQuery(searchField, "E")).boost(0.3f)) + ) + .setRankBuilder(new FieldBasedRankBuilder(rankWindowSize, rankFeatureField)) + .addFetchField(searchField) + .setTrackTotalHits(true) + .setAllowPartialSearchResults(true) + .setSize(2), + response -> { + assertHitCount(response, 4L); + assertEquals(2, response.getHits().getHits().length); + int rank = 1; + for (SearchHit searchHit : response.getHits().getHits()) { + assertThat(searchHit, hasId(String.valueOf(5 - (rank - 1)))); + assertEquals(searchHit.getScore(), (0.5f - ((rank - 1) * 0.1f)), 1e-5f); + assertThat(searchHit, hasRank(rank)); + assertNotNull(searchHit.getFields().get(searchField)); + rank++; + } + } + ); + assertNoOpenContext(indexName); + } + + public void testFieldBasedRerankerNoMatchingDocs() throws Exception { + final String indexName = "test_index"; + final String rankFeatureField = "rankFeatureField"; + final String searchField = "searchField"; + final int rankWindowSize = 10; + + createIndex(indexName); + indexRandom( + true, + prepareIndex(indexName).setId("1").setSource(rankFeatureField, 0.1, searchField, "A"), + prepareIndex(indexName).setId("2").setSource(rankFeatureField, 0.2, searchField, "B"), + prepareIndex(indexName).setId("3").setSource(rankFeatureField, 0.3, searchField, "C"), + prepareIndex(indexName).setId("4").setSource(rankFeatureField, 0.4, searchField, "D"), + prepareIndex(indexName).setId("5").setSource(rankFeatureField, 0.5, searchField, "E") + ); + + assertNoFailuresAndResponse( + prepareSearch().setQuery(boolQuery().should(constantScoreQuery(matchQuery(searchField, "F")).boost(randomFloat()))) + .setRankBuilder(new FieldBasedRankBuilder(rankWindowSize, rankFeatureField)) + .addFetchField(searchField) + .setTrackTotalHits(true) + .setAllowPartialSearchResults(true) + .setSize(10), + response -> { + assertHitCount(response, 0L); + } + ); + assertNoOpenContext(indexName); + } + + public void testQueryPhaseShardThrowingRankBuilderAllContextsAreClosedAllShardsFail() throws Exception { + final String indexName = "test_index"; + final String rankFeatureField = "rankFeatureField"; + final String searchField = "searchField"; + final int rankWindowSize = 10; + + // this test is irrespective of the number of shards, as we will always reach QueryPhaseRankShardContext#combineQueryPhaseResults + // even with no results. So, when we get back to the coordinator, all shards will have failed, and the whole response + // will be marked as a failure + createIndex(indexName); + indexRandom( + true, + prepareIndex(indexName).setId("1").setSource(rankFeatureField, 0.1, searchField, "A"), + prepareIndex(indexName).setId("2").setSource(rankFeatureField, 0.2, searchField, "B"), + prepareIndex(indexName).setId("3").setSource(rankFeatureField, 0.3, searchField, "C"), + prepareIndex(indexName).setId("4").setSource(rankFeatureField, 0.4, searchField, "D"), + prepareIndex(indexName).setId("5").setSource(rankFeatureField, 0.5, searchField, "E") + ); + + expectThrows( + SearchPhaseExecutionException.class, + () -> prepareSearch().setQuery( + boolQuery().should(constantScoreQuery(matchQuery(searchField, "A")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "B")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "C")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "D")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "E")).boost(randomFloat())) + ) + .setRankBuilder( + new ThrowingRankBuilder( + rankWindowSize, + rankFeatureField, + ThrowingRankBuilder.ThrowingRankBuilderType.THROWING_QUERY_PHASE_SHARD_CONTEXT.name() + ) + ) + .addFetchField(searchField) + .setTrackTotalHits(true) + .setAllowPartialSearchResults(true) + .setSize(10) + .get() + ); + assertNoOpenContext(indexName); + } + + public void testQueryPhaseCoordinatorThrowingRankBuilderAllContextsAreClosedAllShardsFail() throws Exception { + final String indexName = "test_index"; + final String rankFeatureField = "rankFeatureField"; + final String searchField = "searchField"; + final int rankWindowSize = 10; + + createIndex(indexName); + indexRandom( + true, + prepareIndex(indexName).setId("1").setSource(rankFeatureField, 0.1, searchField, "A"), + prepareIndex(indexName).setId("2").setSource(rankFeatureField, 0.2, searchField, "B"), + prepareIndex(indexName).setId("3").setSource(rankFeatureField, 0.3, searchField, "C"), + prepareIndex(indexName).setId("4").setSource(rankFeatureField, 0.4, searchField, "D"), + prepareIndex(indexName).setId("5").setSource(rankFeatureField, 0.5, searchField, "E") + ); + + // when we throw on the coordinator, the onPhaseFailure handler will be invoked, which in turn will mark the whole + // search request as a failure (i.e. no partial results) + expectThrows( + SearchPhaseExecutionException.class, + () -> prepareSearch().setQuery( + boolQuery().should(constantScoreQuery(matchQuery(searchField, "A")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "B")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "C")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "D")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "E")).boost(randomFloat())) + ) + .setRankBuilder( + new ThrowingRankBuilder( + rankWindowSize, + rankFeatureField, + ThrowingRankBuilder.ThrowingRankBuilderType.THROWING_QUERY_PHASE_COORDINATOR_CONTEXT.name() + ) + ) + .addFetchField(searchField) + .setTrackTotalHits(true) + .setAllowPartialSearchResults(true) + .setSize(10) + .get() + ); + assertNoOpenContext(indexName); + } + + public void testRankFeaturePhaseShardThrowingRankBuilderAllContextsAreClosedPartialFailures() throws Exception { + final String indexName = "test_index"; + final String rankFeatureField = "rankFeatureField"; + final String searchField = "searchField"; + final int rankWindowSize = 10; + + createIndex(indexName, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 10).build()); + indexRandom( + true, + prepareIndex(indexName).setId("1").setSource(rankFeatureField, 0.1, searchField, "A"), + prepareIndex(indexName).setId("2").setSource(rankFeatureField, 0.2, searchField, "B"), + prepareIndex(indexName).setId("3").setSource(rankFeatureField, 0.3, searchField, "C"), + prepareIndex(indexName).setId("4").setSource(rankFeatureField, 0.4, searchField, "D"), + prepareIndex(indexName).setId("5").setSource(rankFeatureField, 0.5, searchField, "E") + ); + + // we have 10 shards and 5 documents, so when the exception is thrown we know that not all shards will report failures + assertResponse( + prepareSearch().setQuery( + boolQuery().should(constantScoreQuery(matchQuery(searchField, "A")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "B")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "C")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "D")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "E")).boost(randomFloat())) + ) + .setRankBuilder( + new ThrowingRankBuilder( + rankWindowSize, + rankFeatureField, + ThrowingRankBuilder.ThrowingRankBuilderType.THROWING_RANK_FEATURE_PHASE_SHARD_CONTEXT.name() + ) + ) + .addFetchField(searchField) + .setTrackTotalHits(true) + .setAllowPartialSearchResults(true) + .setSize(10), + response -> { + assertTrue(response.getFailedShards() > 0); + assertTrue( + Arrays.stream(response.getShardFailures()) + .allMatch(failure -> failure.getCause().getMessage().contains("rfs - simulated failure")) + ); + assertHitCount(response, 5); + assertTrue(response.getHits().getHits().length == 0); + } + ); + assertNoOpenContext(indexName); + } + + public void testRankFeaturePhaseShardThrowingRankBuilderAllContextsAreClosedAllShardsFail() throws Exception { + final String indexName = "test_index"; + final String rankFeatureField = "rankFeatureField"; + final String searchField = "searchField"; + final int rankWindowSize = 10; + + // we have 1 shard and 5 documents, so when the exception is thrown we know that all shards will have failed + createIndex(indexName, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build()); + indexRandom( + true, + prepareIndex(indexName).setId("1").setSource(rankFeatureField, 0.1, searchField, "A"), + prepareIndex(indexName).setId("2").setSource(rankFeatureField, 0.2, searchField, "B"), + prepareIndex(indexName).setId("3").setSource(rankFeatureField, 0.3, searchField, "C"), + prepareIndex(indexName).setId("4").setSource(rankFeatureField, 0.4, searchField, "D"), + prepareIndex(indexName).setId("5").setSource(rankFeatureField, 0.5, searchField, "E") + ); + + expectThrows( + SearchPhaseExecutionException.class, + () -> prepareSearch().setQuery( + boolQuery().should(constantScoreQuery(matchQuery(searchField, "A")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "B")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "C")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "D")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "E")).boost(randomFloat())) + ) + .setRankBuilder( + new ThrowingRankBuilder( + rankWindowSize, + rankFeatureField, + ThrowingRankBuilder.ThrowingRankBuilderType.THROWING_RANK_FEATURE_PHASE_SHARD_CONTEXT.name() + ) + ) + .addFetchField(searchField) + .setTrackTotalHits(true) + .setAllowPartialSearchResults(true) + .setSize(10) + .get() + ); + assertNoOpenContext(indexName); + } + + public void testRankFeaturePhaseCoordinatorThrowingRankBuilderAllContextsAreClosedAllShardsFail() throws Exception { + final String indexName = "test_index"; + final String rankFeatureField = "rankFeatureField"; + final String searchField = "searchField"; + final int rankWindowSize = 10; + + createIndex(indexName); + indexRandom( + true, + prepareIndex(indexName).setId("1").setSource(rankFeatureField, 0.1, searchField, "A"), + prepareIndex(indexName).setId("2").setSource(rankFeatureField, 0.2, searchField, "B"), + prepareIndex(indexName).setId("3").setSource(rankFeatureField, 0.3, searchField, "C"), + prepareIndex(indexName).setId("4").setSource(rankFeatureField, 0.4, searchField, "D"), + prepareIndex(indexName).setId("5").setSource(rankFeatureField, 0.5, searchField, "E") + ); + + expectThrows( + SearchPhaseExecutionException.class, + () -> prepareSearch().setQuery( + boolQuery().should(constantScoreQuery(matchQuery(searchField, "A")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "B")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "C")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "D")).boost(randomFloat())) + .should(constantScoreQuery(matchQuery(searchField, "E")).boost(randomFloat())) + ) + .setRankBuilder( + new ThrowingRankBuilder( + rankWindowSize, + rankFeatureField, + ThrowingRankBuilder.ThrowingRankBuilderType.THROWING_RANK_FEATURE_PHASE_COORDINATOR_CONTEXT.name() + ) + ) + .addFetchField(searchField) + .setTrackTotalHits(true) + .setAllowPartialSearchResults(true) + .setSize(10) + .get() + ); + assertNoOpenContext(indexName); + } + + private void assertNoOpenContext(final String indexName) throws Exception { + assertBusy( + () -> assertThat(indicesAdmin().prepareStats(indexName).get().getTotal().getSearch().getOpenContexts(), equalTo(0L)), + 1, + TimeUnit.SECONDS + ); + } + + public static class FieldBasedRankBuilder extends RankBuilder { + + public static final ParseField FIELD_FIELD = new ParseField("field"); + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "field-based-rank", + args -> { + int rankWindowSize = args[0] == null ? DEFAULT_RANK_WINDOW_SIZE : (int) args[0]; + String field = (String) args[1]; + if (field == null || field.isEmpty()) { + throw new IllegalArgumentException("Field cannot be null or empty"); + } + return new FieldBasedRankBuilder(rankWindowSize, field); + } + ); + + static { + PARSER.declareInt(optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD); + PARSER.declareString(constructorArg(), FIELD_FIELD); + } + + protected final String field; + + public static FieldBasedRankBuilder fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public FieldBasedRankBuilder(final int rankWindowSize, final String field) { + super(rankWindowSize); + this.field = field; + } + + public FieldBasedRankBuilder(StreamInput in) throws IOException { + super(in); + this.field = in.readString(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(field); + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(FIELD_FIELD.getPreferredName(), field); + } + + @Override + public boolean isCompoundBuilder() { + return false; + } + + @Override + public Explanation explainHit(Explanation baseExplanation, RankDoc scoreDoc, List queryNames) { + return baseExplanation; + } + + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + return new QueryPhaseRankShardContext(queries, rankWindowSize()) { + @Override + public RankShardResult combineQueryPhaseResults(List rankResults) { + Map rankDocs = new HashMap<>(); + rankResults.forEach(topDocs -> { + for (ScoreDoc scoreDoc : topDocs.scoreDocs) { + rankDocs.compute(scoreDoc.doc, (key, value) -> { + if (value == null) { + return new RankFeatureDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex); + } else { + value.score = Math.max(scoreDoc.score, rankDocs.get(scoreDoc.doc).score); + return value; + } + }); + } + }); + RankFeatureDoc[] sortedResults = rankDocs.values().toArray(RankFeatureDoc[]::new); + Arrays.sort(sortedResults, (o1, o2) -> Float.compare(o2.score, o1.score)); + return new RankFeatureShardResult(sortedResults); + } + }; + } + + @Override + public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from) { + return new QueryPhaseRankCoordinatorContext(rankWindowSize()) { + @Override + public ScoreDoc[] rankQueryPhaseResults( + List querySearchResults, + SearchPhaseController.TopDocsStats topDocStats + ) { + List rankDocs = new ArrayList<>(); + for (int i = 0; i < querySearchResults.size(); i++) { + QuerySearchResult querySearchResult = querySearchResults.get(i); + RankFeatureShardResult shardResult = (RankFeatureShardResult) querySearchResult.getRankShardResult(); + for (RankFeatureDoc frd : shardResult.rankFeatureDocs) { + frd.shardIndex = i; + rankDocs.add(frd); + } + } + // no support for sort field atm + // should pass needed info to make use of org.elasticsearch.action.search.SearchPhaseController.sortDocs? + rankDocs.sort(Comparator.comparing((RankFeatureDoc doc) -> doc.score).reversed()); + RankFeatureDoc[] topResults = rankDocs.stream().limit(rankWindowSize).toArray(RankFeatureDoc[]::new); + + assert topDocStats.fetchHits == 0; + topDocStats.fetchHits = topResults.length; + + return topResults; + } + }; + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + return new RankFeaturePhaseRankShardContext(field) { + @Override + public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) { + try { + RankFeatureDoc[] rankFeatureDocs = new RankFeatureDoc[hits.getHits().length]; + for (int i = 0; i < hits.getHits().length; i++) { + rankFeatureDocs[i] = new RankFeatureDoc(hits.getHits()[i].docId(), hits.getHits()[i].getScore(), shardId); + rankFeatureDocs[i].featureData(hits.getHits()[i].field(field).getValue().toString()); + } + return new RankFeatureShardResult(rankFeatureDocs); + } catch (Exception ex) { + throw ex; + } + } + }; + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from) { + return new RankFeaturePhaseRankCoordinatorContext(size, from, rankWindowSize()) { + @Override + protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener) { + float[] scores = new float[featureDocs.length]; + for (int i = 0; i < featureDocs.length; i++) { + scores[i] = Float.parseFloat(featureDocs[i].featureData); + } + scoreListener.onResponse(scores); + } + }; + } + + @Override + protected boolean doEquals(RankBuilder other) { + return other instanceof FieldBasedRankBuilder && Objects.equals(field, ((FieldBasedRankBuilder) other).field); + } + + @Override + protected int doHashCode() { + return Objects.hash(field); + } + + @Override + public String getWriteableName() { + return "field-based-rank"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.RANK_FEATURE_PHASE_ADDED; + } + } + + public static class ThrowingRankBuilder extends FieldBasedRankBuilder { + + public enum ThrowingRankBuilderType { + THROWING_QUERY_PHASE_SHARD_CONTEXT, + THROWING_QUERY_PHASE_COORDINATOR_CONTEXT, + THROWING_RANK_FEATURE_PHASE_SHARD_CONTEXT, + THROWING_RANK_FEATURE_PHASE_COORDINATOR_CONTEXT; + } + + protected final ThrowingRankBuilderType throwingRankBuilderType; + + public static final ParseField FIELD_FIELD = new ParseField("field"); + public static final ParseField THROWING_TYPE_FIELD = new ParseField("throwing-type"); + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("throwing-rank", args -> { + int rankWindowSize = args[0] == null ? DEFAULT_RANK_WINDOW_SIZE : (int) args[0]; + String field = (String) args[1]; + if (field == null || field.isEmpty()) { + throw new IllegalArgumentException("Field cannot be null or empty"); + } + String throwingType = (String) args[2]; + return new ThrowingRankBuilder(rankWindowSize, field, throwingType); + }); + + static { + PARSER.declareInt(optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD); + PARSER.declareString(constructorArg(), FIELD_FIELD); + PARSER.declareString(constructorArg(), THROWING_TYPE_FIELD); + } + + public static FieldBasedRankBuilder fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public ThrowingRankBuilder(final int rankWindowSize, final String field, final String throwingType) { + super(rankWindowSize, field); + this.throwingRankBuilderType = ThrowingRankBuilderType.valueOf(throwingType); + } + + public ThrowingRankBuilder(StreamInput in) throws IOException { + super(in); + this.throwingRankBuilderType = in.readEnum(ThrowingRankBuilderType.class); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + super.doWriteTo(out); + out.writeEnum(throwingRankBuilderType); + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + super.doXContent(builder, params); + builder.field(THROWING_TYPE_FIELD.getPreferredName(), throwingRankBuilderType); + } + + @Override + public String getWriteableName() { + return "throwing-rank"; + } + + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + if (this.throwingRankBuilderType == ThrowingRankBuilderType.THROWING_QUERY_PHASE_SHARD_CONTEXT) + return new QueryPhaseRankShardContext(queries, rankWindowSize()) { + @Override + public RankShardResult combineQueryPhaseResults(List rankResults) { + throw new UnsupportedOperationException("qps - simulated failure"); + } + }; + else { + return super.buildQueryPhaseShardContext(queries, from); + } + } + + @Override + public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from) { + if (this.throwingRankBuilderType == ThrowingRankBuilderType.THROWING_QUERY_PHASE_COORDINATOR_CONTEXT) + return new QueryPhaseRankCoordinatorContext(rankWindowSize()) { + @Override + public ScoreDoc[] rankQueryPhaseResults( + List querySearchResults, + SearchPhaseController.TopDocsStats topDocStats + ) { + throw new UnsupportedOperationException("qpc - simulated failure"); + } + }; + else { + return super.buildQueryPhaseCoordinatorContext(size, from); + } + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + if (this.throwingRankBuilderType == ThrowingRankBuilderType.THROWING_RANK_FEATURE_PHASE_SHARD_CONTEXT) + return new RankFeaturePhaseRankShardContext(field) { + @Override + public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) { + throw new UnsupportedOperationException("rfs - simulated failure"); + } + }; + else { + return super.buildRankFeaturePhaseShardContext(); + } + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from) { + if (this.throwingRankBuilderType == ThrowingRankBuilderType.THROWING_RANK_FEATURE_PHASE_COORDINATOR_CONTEXT) + return new RankFeaturePhaseRankCoordinatorContext(size, from, rankWindowSize()) { + @Override + protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener) { + throw new UnsupportedOperationException("rfc - simulated failure"); + } + }; + else { + return super.buildRankFeaturePhaseCoordinatorContext(size, from); + } + } + } + + public static class FieldBasedRerankerPlugin extends Plugin implements SearchPlugin { + + private static final String FIELD_BASED_RANK_BUILDER_NAME = "field-based-rank"; + private static final String THROWING_RANK_BUILDER_NAME = "throwing-rank"; + + @Override + public List getNamedWriteables() { + return List.of( + new NamedWriteableRegistry.Entry(RankBuilder.class, FIELD_BASED_RANK_BUILDER_NAME, FieldBasedRankBuilder::new), + new NamedWriteableRegistry.Entry(RankBuilder.class, THROWING_RANK_BUILDER_NAME, ThrowingRankBuilder::new), + new NamedWriteableRegistry.Entry(RankShardResult.class, "rank_feature_shard", RankFeatureShardResult::new) + ); + } + + @Override + public List getNamedXContent() { + return List.of( + new NamedXContentRegistry.Entry( + RankBuilder.class, + new ParseField(FIELD_BASED_RANK_BUILDER_NAME), + FieldBasedRankBuilder::fromXContent + ), + new NamedXContentRegistry.Entry( + RankBuilder.class, + new ParseField(THROWING_RANK_BUILDER_NAME), + ThrowingRankBuilder::fromXContent + ) + ); + } + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index 543f45b58279e..dd8cf5e527055 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -490,6 +490,46 @@ public void testTooLongRegexInRegexpQuery() throws Exception { ); } + public void testTooLongPrefixInPrefixQuery() throws Exception { + createIndex("idx"); + + // Ensure the field `num` exists in the mapping + client().admin() + .indices() + .preparePutMapping("idx") + .setSource("{\"properties\":{\"num\":{\"type\":\"keyword\"}}}", XContentType.JSON) + .get(); + + // Index a simple document to ensure the field `num` is in the index + indexRandom(true, prepareIndex("idx").setSource("{\"num\":\"test\"}", XContentType.JSON)); + + int defaultMaxRegexLength = IndexSettings.MAX_REGEX_LENGTH_SETTING.get(Settings.EMPTY); + StringBuilder prefix = new StringBuilder(defaultMaxRegexLength); + + while (prefix.length() <= defaultMaxRegexLength) { + prefix.append("a"); + } + + SearchPhaseExecutionException e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch("idx").setQuery(QueryBuilders.prefixQuery("num", prefix.toString())).get() + ); + assertThat( + e.getRootCause().getMessage(), + containsString( + "The length of prefix [" + + prefix.length() + + "] used in the Prefix Query request has exceeded " + + "the allowed maximum of [" + + defaultMaxRegexLength + + "]. " + + "This maximum can be set by changing the [" + + IndexSettings.MAX_REGEX_LENGTH_SETTING.getKey() + + "] index level setting." + ) + ); + } + public void testStrictlyCountRequest() throws Exception { createIndex("test_count_1"); indexRandom( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 005604b92a723..6e19cf60cf5b9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -579,8 +579,8 @@ public void testUnrestorableFilesDuringRestore() throws Exception { .build(); Consumer checkUnassignedInfo = unassignedInfo -> { - assertThat(unassignedInfo.getReason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); - assertThat(unassignedInfo.getNumFailedAllocations(), anyOf(equalTo(maxRetries), equalTo(1))); + assertThat(unassignedInfo.reason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); + assertThat(unassignedInfo.failedAllocations(), anyOf(equalTo(maxRetries), equalTo(1))); }; unrestorableUseCase(indexName, createIndexSettings, repositorySettings, Settings.EMPTY, checkUnassignedInfo, () -> {}); @@ -605,7 +605,7 @@ public void testUnrestorableIndexDuringRestore() throws Exception { Settings.EMPTY, Settings.EMPTY, restoreIndexSettings, - unassignedInfo -> assertThat(unassignedInfo.getReason(), equalTo(UnassignedInfo.Reason.NEW_INDEX_RESTORED)), + unassignedInfo -> assertThat(unassignedInfo.reason(), equalTo(UnassignedInfo.Reason.NEW_INDEX_RESTORED)), fixupAction ); } @@ -670,7 +670,7 @@ private void unrestorableUseCase( if (shard.primary()) { assertThat(shard.state(), equalTo(ShardRoutingState.UNASSIGNED)); assertThat(shard.recoverySource().getType(), equalTo(RecoverySource.Type.SNAPSHOT)); - assertThat(shard.unassignedInfo().getLastAllocationStatus(), equalTo(UnassignedInfo.AllocationStatus.DECIDERS_NO)); + assertThat(shard.unassignedInfo().lastAllocationStatus(), equalTo(UnassignedInfo.AllocationStatus.DECIDERS_NO)); checkUnassignedInfo.accept(shard.unassignedInfo()); } } diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index d8682500c49d6..2f08129b4080d 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -362,6 +362,7 @@ exports org.elasticsearch.search.query; exports org.elasticsearch.search.rank; exports org.elasticsearch.search.rank.context; + exports org.elasticsearch.search.rank.feature; exports org.elasticsearch.search.rescore; exports org.elasticsearch.search.retriever; exports org.elasticsearch.search.runtime; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 07579161a85c8..e6b98971ff8cb 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -183,6 +183,10 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_ENHANCE_DELETE_ENDPOINT = def(8_674_00_0); public static final TransportVersion ML_INFERENCE_GOOGLE_AI_STUDIO_EMBEDDINGS_ADDED = def(8_675_00_0); public static final TransportVersion ADD_MISTRAL_EMBEDDINGS_INFERENCE = def(8_676_00_0); + public static final TransportVersion ML_CHUNK_INFERENCE_OPTION = def(8_677_00_0); + public static final TransportVersion RANK_FEATURE_PHASE_ADDED = def(8_678_00_0); + public static final TransportVersion RANK_DOC_IN_SHARD_FETCH_REQUEST = def(8_679_00_0); + public static final TransportVersion SECURITY_SETTINGS_REQUEST_TIMEOUTS = def(8_680_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index dc161766b7954..06e4a1dd5368d 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -174,8 +174,8 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_13_2 = new Version(8_13_02_99); public static final Version V_8_13_3 = new Version(8_13_03_99); public static final Version V_8_13_4 = new Version(8_13_04_99); - public static final Version V_8_13_5 = new Version(8_13_05_99); public static final Version V_8_14_0 = new Version(8_14_00_99); + public static final Version V_8_14_1 = new Version(8_14_01_99); public static final Version V_8_15_0 = new Version(8_15_00_99); public static final Version CURRENT = V_8_15_0; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java index d22bae9c5a4b1..1e5f9d5d613d2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java @@ -226,16 +226,16 @@ private Iterator getShardAllocationDecisionChunked(ToXCont private static XContentBuilder unassignedInfoToXContent(UnassignedInfo unassignedInfo, XContentBuilder builder) throws IOException { builder.startObject("unassigned_info"); - builder.field("reason", unassignedInfo.getReason()); - builder.field("at", UnassignedInfo.DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(unassignedInfo.getUnassignedTimeInMillis()))); - if (unassignedInfo.getNumFailedAllocations() > 0) { - builder.field("failed_allocation_attempts", unassignedInfo.getNumFailedAllocations()); + builder.field("reason", unassignedInfo.reason()); + builder.field("at", UnassignedInfo.DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(unassignedInfo.unassignedTimeMillis()))); + if (unassignedInfo.failedAllocations() > 0) { + builder.field("failed_allocation_attempts", unassignedInfo.failedAllocations()); } - String details = unassignedInfo.getDetails(); + String details = unassignedInfo.details(); if (details != null) { builder.field("details", details); } - builder.field("last_allocation_status", AllocationDecision.fromAllocationStatus(unassignedInfo.getLastAllocationStatus())); + builder.field("last_allocation_status", AllocationDecision.fromAllocationStatus(unassignedInfo.lastAllocationStatus())); builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index a4c6b78f6a95e..aca7c8752ef4d 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -223,6 +223,8 @@ public static void performOnPrimary( final long startBulkTime = System.nanoTime(); + private final ActionListener onMappingUpdateDone = ActionListener.wrap(v -> executor.execute(this), this::onRejection); + @Override protected void doRun() throws Exception { while (context.hasMoreOperationsToExecute()) { @@ -232,8 +234,7 @@ protected void doRun() throws Exception { nowInMillisSupplier, mappingUpdater, waitForMappingUpdate, - - ActionListener.wrap(v -> executor.execute(this), this::onRejection), + onMappingUpdateDone, documentParsingProvider ) == false) { // We are waiting for a mapping update on another thread, that will invoke this action again once its done diff --git a/server/src/main/java/org/elasticsearch/action/search/CCSSingleCoordinatorSearchProgressListener.java b/server/src/main/java/org/elasticsearch/action/search/CCSSingleCoordinatorSearchProgressListener.java index 3b594c94db9a7..0504d0cde8986 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CCSSingleCoordinatorSearchProgressListener.java +++ b/server/src/main/java/org/elasticsearch/action/search/CCSSingleCoordinatorSearchProgressListener.java @@ -260,6 +260,24 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna } } + /** + * Executed when a shard returns a rank feature result. + * + * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards})}. + */ + @Override + public void onRankFeatureResult(int shardIndex) {} + + /** + * Executed when a shard reports a rank feature failure. + * + * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards})}. + * @param shardTarget The last shard target that thrown an exception. + * @param exc The cause of the failure. + */ + @Override + public void onRankFeatureFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) {} + /** * Executed when a shard returns a fetch result. * diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index f804ab31faf8e..c81f5a20bc2d1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -17,10 +17,13 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.ShardSearchContextId; -import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.transport.Transport; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.search.rank.RankDocShardInfo; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.function.BiFunction; /** @@ -29,7 +32,7 @@ */ final class FetchSearchPhase extends SearchPhase { private final ArraySearchPhaseResults fetchResults; - private final AtomicArray queryResults; + private final AtomicArray searchPhaseShardResults; private final BiFunction, SearchPhase> nextPhaseFactory; private final SearchPhaseContext context; private final Logger logger; @@ -74,7 +77,7 @@ final class FetchSearchPhase extends SearchPhase { } this.fetchResults = new ArraySearchPhaseResults<>(resultConsumer.getNumShards()); context.addReleasable(fetchResults); - this.queryResults = resultConsumer.getAtomicArray(); + this.searchPhaseShardResults = resultConsumer.getAtomicArray(); this.aggregatedDfs = aggregatedDfs; this.nextPhaseFactory = nextPhaseFactory; this.context = context; @@ -103,21 +106,26 @@ private void innerRun() { final int numShards = context.getNumShards(); // Usually when there is a single shard, we force the search type QUERY_THEN_FETCH. But when there's kNN, we might // still use DFS_QUERY_THEN_FETCH, which does not perform the "query and fetch" optimization during the query phase. - final boolean queryAndFetchOptimization = queryResults.length() == 1 + final boolean queryAndFetchOptimization = searchPhaseShardResults.length() == 1 && context.getRequest().hasKnnSearch() == false - && reducedQueryPhase.rankCoordinatorContext() == null; + && reducedQueryPhase.queryPhaseRankCoordinatorContext() == null; if (queryAndFetchOptimization) { assert assertConsistentWithQueryAndFetchOptimization(); // query AND fetch optimization - moveToNextPhase(queryResults); + moveToNextPhase(searchPhaseShardResults); } else { ScoreDoc[] scoreDocs = reducedQueryPhase.sortedTopDocs().scoreDocs(); // no docs to fetch -- sidestep everything and return if (scoreDocs.length == 0) { // we have to release contexts here to free up resources - queryResults.asList().stream().map(SearchPhaseResult::queryResult).forEach(this::releaseIrrelevantSearchContext); + searchPhaseShardResults.asList() + .forEach(searchPhaseShardResult -> releaseIrrelevantSearchContext(searchPhaseShardResult, context)); moveToNextPhase(fetchResults.getAtomicArray()); } else { + final boolean shouldExplainRank = shouldExplainRankScores(context.getRequest()); + final List> rankDocsPerShard = false == shouldExplainRank + ? null + : splitRankDocsPerShard(scoreDocs, numShards); final ScoreDoc[] lastEmittedDocPerShard = context.getRequest().scroll() != null ? SearchPhaseController.getLastEmittedDocPerShard(reducedQueryPhase, numShards) : null; @@ -130,51 +138,79 @@ private void innerRun() { ); for (int i = 0; i < docIdsToLoad.length; i++) { List entry = docIdsToLoad[i]; - SearchPhaseResult queryResult = queryResults.get(i); + RankDocShardInfo rankDocs = rankDocsPerShard == null || rankDocsPerShard.get(i).isEmpty() + ? null + : new RankDocShardInfo(rankDocsPerShard.get(i)); + SearchPhaseResult shardPhaseResult = searchPhaseShardResults.get(i); if (entry == null) { // no results for this shard ID - if (queryResult != null) { + if (shardPhaseResult != null) { // if we got some hits from this shard we have to release the context there // we do this as we go since it will free up resources and passing on the request on the // transport layer is cheap. - releaseIrrelevantSearchContext(queryResult.queryResult()); + releaseIrrelevantSearchContext(shardPhaseResult, context); progressListener.notifyFetchResult(i); } // in any case we count down this result since we don't talk to this shard anymore counter.countDown(); } else { - executeFetch(queryResult, counter, entry, (lastEmittedDocPerShard != null) ? lastEmittedDocPerShard[i] : null); + executeFetch( + shardPhaseResult, + counter, + entry, + rankDocs, + (lastEmittedDocPerShard != null) ? lastEmittedDocPerShard[i] : null + ); } } } } } + private List> splitRankDocsPerShard(ScoreDoc[] scoreDocs, int numShards) { + List> rankDocsPerShard = new ArrayList<>(numShards); + for (int i = 0; i < numShards; i++) { + rankDocsPerShard.add(new HashMap<>()); + } + for (ScoreDoc scoreDoc : scoreDocs) { + assert scoreDoc instanceof RankDoc : "ScoreDoc is not a RankDoc"; + assert scoreDoc.shardIndex >= 0 && scoreDoc.shardIndex <= numShards; + RankDoc rankDoc = (RankDoc) scoreDoc; + Map shardScoreDocs = rankDocsPerShard.get(rankDoc.shardIndex); + shardScoreDocs.put(rankDoc.doc, rankDoc); + } + return rankDocsPerShard; + } + private boolean assertConsistentWithQueryAndFetchOptimization() { - var phaseResults = queryResults.asList(); + var phaseResults = searchPhaseShardResults.asList(); assert phaseResults.isEmpty() || phaseResults.get(0).fetchResult() != null : "phaseResults empty [" + phaseResults.isEmpty() + "], single result: " + phaseResults.get(0).fetchResult(); return true; } private void executeFetch( - SearchPhaseResult queryResult, + SearchPhaseResult shardPhaseResult, final CountedCollector counter, final List entry, + final RankDocShardInfo rankDocs, ScoreDoc lastEmittedDocForShard ) { - final SearchShardTarget shardTarget = queryResult.getSearchShardTarget(); - final int shardIndex = queryResult.getShardIndex(); - final ShardSearchContextId contextId = queryResult.queryResult().getContextId(); + final SearchShardTarget shardTarget = shardPhaseResult.getSearchShardTarget(); + final int shardIndex = shardPhaseResult.getShardIndex(); + final ShardSearchContextId contextId = shardPhaseResult.queryResult() != null + ? shardPhaseResult.queryResult().getContextId() + : shardPhaseResult.rankFeatureResult().getContextId(); context.getSearchTransport() .sendExecuteFetch( context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()), new ShardFetchSearchRequest( - context.getOriginalIndices(queryResult.getShardIndex()), + context.getOriginalIndices(shardPhaseResult.getShardIndex()), contextId, - queryResult.getShardSearchRequest(), + shardPhaseResult.getShardSearchRequest(), entry, + rankDocs, lastEmittedDocForShard, - queryResult.getRescoreDocIds(), + shardPhaseResult.getRescoreDocIds(), aggregatedDfs ), context.getTask(), @@ -199,40 +235,25 @@ public void onFailure(Exception e) { // the search context might not be cleared on the node where the fetch was executed for example // because the action was rejected by the thread pool. in this case we need to send a dedicated // request to clear the search context. - releaseIrrelevantSearchContext(queryResult.queryResult()); + releaseIrrelevantSearchContext(shardPhaseResult, context); } } } ); } - /** - * Releases shard targets that are not used in the docsIdsToLoad. - */ - private void releaseIrrelevantSearchContext(QuerySearchResult queryResult) { - // we only release search context that we did not fetch from, if we are not scrolling - // or using a PIT and if it has at least one hit that didn't make it to the global topDocs - if (queryResult.hasSearchContext() - && context.getRequest().scroll() == null - && (context.isPartOfPointInTime(queryResult.getContextId()) == false)) { - try { - SearchShardTarget shardTarget = queryResult.getSearchShardTarget(); - Transport.Connection connection = context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()); - context.sendReleaseSearchContext( - queryResult.getContextId(), - connection, - context.getOriginalIndices(queryResult.getShardIndex()) - ); - } catch (Exception e) { - logger.trace("failed to release context", e); - } - } - } - private void moveToNextPhase(AtomicArray fetchResultsArr) { var resp = SearchPhaseController.merge(context.getRequest().scroll() != null, reducedQueryPhase, fetchResultsArr); context.addReleasable(resp::decRef); fetchResults.close(); - context.executeNextPhase(this, nextPhaseFactory.apply(resp, queryResults)); + context.executeNextPhase(this, nextPhaseFactory.apply(resp, searchPhaseShardResults)); } + + private boolean shouldExplainRankScores(SearchRequest request) { + return request.source() != null + && request.source().explain() != null + && request.source().explain() + && request.source().rankBuilder() != null; + } + } diff --git a/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java b/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java index 767597625edc6..291982dd9bdd3 100644 --- a/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java @@ -7,23 +7,39 @@ */ package org.elasticsearch.action.search; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.search.ScoreDoc; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.dfs.AggregatedDfs; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.search.rank.feature.RankFeatureResult; +import org.elasticsearch.search.rank.feature.RankFeatureShardRequest; + +import java.util.List; /** * This search phase is responsible for executing any re-ranking needed for the given search request, iff that is applicable. - * It starts by retrieving {code num_shards * window_size} results from the query phase and reduces them to a global list of + * It starts by retrieving {@code num_shards * window_size} results from the query phase and reduces them to a global list of * the top {@code window_size} results. It then reaches out to the shards to extract the needed feature data, * and finally passes all this information to the appropriate {@code RankFeatureRankCoordinatorContext} which is responsible for reranking * the results. If no rank query is specified, it proceeds directly to the next phase (FetchSearchPhase) by first reducing the results. */ -public final class RankFeaturePhase extends SearchPhase { +public class RankFeaturePhase extends SearchPhase { + private static final Logger logger = LogManager.getLogger(RankFeaturePhase.class); private final SearchPhaseContext context; - private final SearchPhaseResults queryPhaseResults; - + final SearchPhaseResults queryPhaseResults; + final SearchPhaseResults rankPhaseResults; private final AggregatedDfs aggregatedDfs; + private final SearchProgressListener progressListener; RankFeaturePhase(SearchPhaseResults queryPhaseResults, AggregatedDfs aggregatedDfs, SearchPhaseContext context) { super("rank-feature"); @@ -38,6 +54,9 @@ public final class RankFeaturePhase extends SearchPhase { this.context = context; this.queryPhaseResults = queryPhaseResults; this.aggregatedDfs = aggregatedDfs; + this.rankPhaseResults = new ArraySearchPhaseResults<>(context.getNumShards()); + context.addReleasable(rankPhaseResults); + this.progressListener = context.getTask().getProgressListener(); } @Override @@ -59,16 +78,154 @@ public void onFailure(Exception e) { }); } - private void innerRun() throws Exception { - // other than running reduce, this is currently close to a no-op + void innerRun() throws Exception { + // if the RankBuilder specifies a QueryPhaseCoordinatorContext, it will be called as part of the reduce call + // to operate on the first `window_size * num_shards` results and merge them appropriately. SearchPhaseController.ReducedQueryPhase reducedQueryPhase = queryPhaseResults.reduce(); - moveToNextPhase(queryPhaseResults, reducedQueryPhase); + RankFeaturePhaseRankCoordinatorContext rankFeaturePhaseRankCoordinatorContext = coordinatorContext(context.getRequest().source()); + if (rankFeaturePhaseRankCoordinatorContext != null) { + ScoreDoc[] queryScoreDocs = reducedQueryPhase.sortedTopDocs().scoreDocs(); // rank_window_size + final List[] docIdsToLoad = SearchPhaseController.fillDocIdsToLoad(context.getNumShards(), queryScoreDocs); + final CountedCollector rankRequestCounter = new CountedCollector<>( + rankPhaseResults, + context.getNumShards(), + () -> onPhaseDone(rankFeaturePhaseRankCoordinatorContext, reducedQueryPhase), + context + ); + + // we send out a request to each shard in order to fetch the needed feature info + for (int i = 0; i < docIdsToLoad.length; i++) { + List entry = docIdsToLoad[i]; + SearchPhaseResult queryResult = queryPhaseResults.getAtomicArray().get(i); + if (entry == null || entry.isEmpty()) { + if (queryResult != null) { + releaseIrrelevantSearchContext(queryResult, context); + progressListener.notifyRankFeatureResult(i); + } + rankRequestCounter.countDown(); + } else { + executeRankFeatureShardPhase(queryResult, rankRequestCounter, entry); + } + } + } else { + moveToNextPhase(queryPhaseResults, reducedQueryPhase); + } + } + + private RankFeaturePhaseRankCoordinatorContext coordinatorContext(SearchSourceBuilder source) { + return source == null || source.rankBuilder() == null + ? null + : context.getRequest() + .source() + .rankBuilder() + .buildRankFeaturePhaseCoordinatorContext(context.getRequest().source().size(), context.getRequest().source().from()); } - private void moveToNextPhase( - SearchPhaseResults phaseResults, + private void executeRankFeatureShardPhase( + SearchPhaseResult queryResult, + final CountedCollector rankRequestCounter, + final List entry + ) { + final SearchShardTarget shardTarget = queryResult.queryResult().getSearchShardTarget(); + final ShardSearchContextId contextId = queryResult.queryResult().getContextId(); + final int shardIndex = queryResult.getShardIndex(); + context.getSearchTransport() + .sendExecuteRankFeature( + context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()), + new RankFeatureShardRequest( + context.getOriginalIndices(queryResult.getShardIndex()), + queryResult.getContextId(), + queryResult.getShardSearchRequest(), + entry + ), + context.getTask(), + new SearchActionListener<>(shardTarget, shardIndex) { + @Override + protected void innerOnResponse(RankFeatureResult response) { + try { + progressListener.notifyRankFeatureResult(shardIndex); + rankRequestCounter.onResult(response); + } catch (Exception e) { + context.onPhaseFailure(RankFeaturePhase.this, "", e); + } + } + + @Override + public void onFailure(Exception e) { + try { + logger.debug(() -> "[" + contextId + "] Failed to execute rank phase", e); + progressListener.notifyRankFeatureFailure(shardIndex, shardTarget, e); + rankRequestCounter.onFailure(shardIndex, shardTarget, e); + } finally { + releaseIrrelevantSearchContext(queryResult, context); + } + } + } + ); + } + + private void onPhaseDone( + RankFeaturePhaseRankCoordinatorContext rankFeaturePhaseRankCoordinatorContext, SearchPhaseController.ReducedQueryPhase reducedQueryPhase ) { + assert rankFeaturePhaseRankCoordinatorContext != null; + ThreadedActionListener rankResultListener = new ThreadedActionListener<>(context, new ActionListener<>() { + @Override + public void onResponse(RankFeatureDoc[] docsWithUpdatedScores) { + RankFeatureDoc[] topResults = rankFeaturePhaseRankCoordinatorContext.rankAndPaginate(docsWithUpdatedScores); + SearchPhaseController.ReducedQueryPhase reducedRankFeaturePhase = newReducedQueryPhaseResults( + reducedQueryPhase, + topResults + ); + moveToNextPhase(rankPhaseResults, reducedRankFeaturePhase); + } + + @Override + public void onFailure(Exception e) { + context.onPhaseFailure(RankFeaturePhase.this, "Computing updated ranks for results failed", e); + } + }); + rankFeaturePhaseRankCoordinatorContext.rankGlobalResults( + rankPhaseResults.getAtomicArray().asList().stream().map(SearchPhaseResult::rankFeatureResult).toList(), + rankResultListener + ); + } + + private SearchPhaseController.ReducedQueryPhase newReducedQueryPhaseResults( + SearchPhaseController.ReducedQueryPhase reducedQueryPhase, + ScoreDoc[] scoreDocs + ) { + + return new SearchPhaseController.ReducedQueryPhase( + reducedQueryPhase.totalHits(), + reducedQueryPhase.fetchHits(), + maxScore(scoreDocs), + reducedQueryPhase.timedOut(), + reducedQueryPhase.terminatedEarly(), + reducedQueryPhase.suggest(), + reducedQueryPhase.aggregations(), + reducedQueryPhase.profileBuilder(), + new SearchPhaseController.SortedTopDocs(scoreDocs, false, null, null, null, 0), + reducedQueryPhase.sortValueFormats(), + reducedQueryPhase.queryPhaseRankCoordinatorContext(), + reducedQueryPhase.numReducePhases(), + reducedQueryPhase.size(), + reducedQueryPhase.from(), + reducedQueryPhase.isEmptyResult() + ); + } + + private float maxScore(ScoreDoc[] scoreDocs) { + float maxScore = Float.NaN; + for (ScoreDoc scoreDoc : scoreDocs) { + if (Float.isNaN(maxScore) || scoreDoc.score > maxScore) { + maxScore = scoreDoc.score; + } + } + return maxScore; + } + + void moveToNextPhase(SearchPhaseResults phaseResults, SearchPhaseController.ReducedQueryPhase reducedQueryPhase) { context.executeNextPhase(this, new FetchSearchPhase(phaseResults, aggregatedDfs, context, reducedQueryPhase)); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index 9d3eadcc42bf9..5ed449667fe57 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -9,6 +9,9 @@ import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.core.CheckedRunnable; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.transport.Transport; import java.io.IOException; import java.io.UncheckedIOException; @@ -62,4 +65,35 @@ static void doCheckNoMissingShards(String phaseName, SearchRequest request, Grou } } } + + /** + * Releases shard targets that are not used in the docsIdsToLoad. + */ + protected void releaseIrrelevantSearchContext(SearchPhaseResult searchPhaseResult, SearchPhaseContext context) { + // we only release search context that we did not fetch from, if we are not scrolling + // or using a PIT and if it has at least one hit that didn't make it to the global topDocs + if (searchPhaseResult == null) { + return; + } + // phaseResult.getContextId() is the same for query & rank feature results + SearchPhaseResult phaseResult = searchPhaseResult.queryResult() != null + ? searchPhaseResult.queryResult() + : searchPhaseResult.rankFeatureResult(); + if (phaseResult != null + && phaseResult.hasSearchContext() + && context.getRequest().scroll() == null + && (context.isPartOfPointInTime(phaseResult.getContextId()) == false)) { + try { + SearchShardTarget shardTarget = phaseResult.getSearchShardTarget(); + Transport.Connection connection = context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()); + context.sendReleaseSearchContext( + phaseResult.getContextId(), + connection, + context.getOriginalIndices(phaseResult.getShardIndex()) + ); + } catch (Exception e) { + context.getLogger().trace("failed to release context", e); + } + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 1b894dfe3d8bd..1d3859b9038fe 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -456,7 +456,7 @@ private static SearchHits getHits( : "not enough hits fetched. index [" + index + "] length: " + fetchResult.hits().getHits().length; SearchHit searchHit = fetchResult.hits().getHits()[index]; searchHit.shard(fetchResult.getSearchShardTarget()); - if (reducedQueryPhase.rankCoordinatorContext != null) { + if (reducedQueryPhase.queryPhaseRankCoordinatorContext != null) { assert shardDoc instanceof RankDoc; searchHit.setRank(((RankDoc) shardDoc).rank); searchHit.score(shardDoc.score); @@ -747,7 +747,7 @@ public record ReducedQueryPhase( // sort value formats used to sort / format the result DocValueFormat[] sortValueFormats, // the rank context if ranking is used - QueryPhaseRankCoordinatorContext rankCoordinatorContext, + QueryPhaseRankCoordinatorContext queryPhaseRankCoordinatorContext, // the number of reduces phases int numReducePhases, // the size of the top hits to return diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java b/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java index f5d280a01257c..3b5e03cb5ac4a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java @@ -88,6 +88,22 @@ protected void onPartialReduce(List shards, TotalHits totalHits, In */ protected void onFinalReduce(List shards, TotalHits totalHits, InternalAggregations aggs, int reducePhase) {} + /** + * Executed when a shard returns a rank feature result. + * + * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards})}. + */ + protected void onRankFeatureResult(int shardIndex) {} + + /** + * Executed when a shard reports a rank feature failure. + * + * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards})}. + * @param shardTarget The last shard target that thrown an exception. + * @param exc The cause of the failure. + */ + protected void onRankFeatureFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) {} + /** * Executed when a shard returns a fetch result. * @@ -160,6 +176,22 @@ protected final void notifyFinalReduce(List shards, TotalHits total } } + final void notifyRankFeatureResult(int shardIndex) { + try { + onRankFeatureResult(shardIndex); + } catch (Exception e) { + logger.warn(() -> "[" + shards.get(shardIndex) + "] Failed to execute progress listener on rank-feature result", e); + } + } + + final void notifyRankFeatureFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) { + try { + onRankFeatureFailure(shardIndex, shardTarget, exc); + } catch (Exception e) { + logger.warn(() -> "[" + shards.get(shardIndex) + "] Failed to execute progress listener on rank-feature failure", e); + } + } + final void notifyFetchResult(int shardIndex) { try { onFetchResult(shardIndex); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 4e3fdbc9633b9..e1fe6eac7e9c1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -407,7 +407,7 @@ public ActionRequestValidationException validate() { ); } int queryCount = source.subSearches().size() + source.knnSearch().size(); - if (queryCount < 2) { + if (source.rankBuilder().isCompoundBuilder() && queryCount < 2) { validationException = addValidationError( "[rank] requires a minimum of [2] result sets using a combination of sub searches and/or knn searches", validationException @@ -434,12 +434,6 @@ public ActionRequestValidationException validate() { if (source.pointInTimeBuilder() != null) { validationException = addValidationError("[rank] cannot be used with [point in time]", validationException); } - if (source.profile()) { - validationException = addValidationError("[rank] requires [profile] is [false]", validationException); - } - if (source.explain() != null && source.explain()) { - validationException = addValidationError("[rank] requires [explain] is [false]", validationException); - } } if (source.rescores() != null) { for (@SuppressWarnings("rawtypes") diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportAPMMetrics.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportAPMMetrics.java index 93b8e22d0d7cd..9f8896f169350 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportAPMMetrics.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportAPMMetrics.java @@ -19,6 +19,7 @@ public class SearchTransportAPMMetrics { public static final String DFS_ACTION_METRIC = "dfs_query_then_fetch/shard_dfs_phase"; public static final String QUERY_ID_ACTION_METRIC = "dfs_query_then_fetch/shard_query_phase"; public static final String QUERY_ACTION_METRIC = "query_then_fetch/shard_query_phase"; + public static final String RANK_SHARD_FEATURE_ACTION_METRIC = "rank/shard_feature_phase"; public static final String FREE_CONTEXT_ACTION_METRIC = "shard_release_context"; public static final String FETCH_ID_ACTION_METRIC = "shard_fetch_phase"; public static final String QUERY_SCROLL_ACTION_METRIC = "scroll/shard_query_phase"; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 66c395cf51d96..873c644725aba 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -39,6 +39,8 @@ import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult; +import org.elasticsearch.search.rank.feature.RankFeatureResult; +import org.elasticsearch.search.rank.feature.RankFeatureShardRequest; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterService; @@ -70,6 +72,7 @@ import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_FETCH_SCROLL_ACTION_METRIC; import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_ID_ACTION_METRIC; import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_SCROLL_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.RANK_SHARD_FEATURE_ACTION_METRIC; /** * An encapsulation of {@link org.elasticsearch.search.SearchService} operations exposed through @@ -96,6 +99,8 @@ public class SearchTransportService { public static final String FETCH_ID_SCROLL_ACTION_NAME = "indices:data/read/search[phase/fetch/id/scroll]"; public static final String FETCH_ID_ACTION_NAME = "indices:data/read/search[phase/fetch/id]"; + public static final String RANK_FEATURE_SHARD_ACTION_NAME = "indices:data/read/search[phase/rank/feature]"; + /** * The Can-Match phase. It is executed to pre-filter shards that a search request hits. It rewrites the query on * the shard and checks whether the result of the rewrite matches no documents, in which case the shard can be @@ -250,6 +255,21 @@ public void sendExecuteScrollQuery( ); } + public void sendExecuteRankFeature( + Transport.Connection connection, + final RankFeatureShardRequest request, + SearchTask task, + final SearchActionListener listener + ) { + transportService.sendChildRequest( + connection, + RANK_FEATURE_SHARD_ACTION_NAME, + request, + task, + new ConnectionCountingHandler<>(listener, RankFeatureResult::new, connection) + ); + } + public void sendExecuteScrollFetch( Transport.Connection connection, final InternalScrollSearchRequest request, @@ -419,7 +439,7 @@ public static void registerRequestHandler( }; transportService.registerRequestHandler( FREE_CONTEXT_SCROLL_ACTION_NAME, - EsExecutors.DIRECT_EXECUTOR_SERVICE, + transportService.getThreadPool().generic(), ScrollFreeContextRequest::new, instrumentedHandler(FREE_CONTEXT_SCROLL_ACTION_METRIC, transportService, searchTransportMetrics, freeContextHandler) ); @@ -427,7 +447,7 @@ public static void registerRequestHandler( transportService.registerRequestHandler( FREE_CONTEXT_ACTION_NAME, - EsExecutors.DIRECT_EXECUTOR_SERVICE, + transportService.getThreadPool().generic(), SearchFreeContextRequest::new, instrumentedHandler(FREE_CONTEXT_ACTION_METRIC, transportService, searchTransportMetrics, freeContextHandler) ); @@ -435,7 +455,7 @@ public static void registerRequestHandler( transportService.registerRequestHandler( CLEAR_SCROLL_CONTEXTS_ACTION_NAME, - EsExecutors.DIRECT_EXECUTOR_SERVICE, + transportService.getThreadPool().generic(), TransportRequest.Empty::new, instrumentedHandler(CLEAR_SCROLL_CONTEXTS_ACTION_METRIC, transportService, searchTransportMetrics, (request, channel, task) -> { searchService.freeAllScrollContexts(); @@ -539,6 +559,16 @@ public static void registerRequestHandler( ); TransportActionProxy.registerProxyAction(transportService, QUERY_FETCH_SCROLL_ACTION_NAME, true, ScrollQueryFetchSearchResult::new); + final TransportRequestHandler rankShardFeatureRequest = (request, channel, task) -> searchService + .executeRankFeaturePhase(request, (SearchShardTask) task, new ChannelActionListener<>(channel)); + transportService.registerRequestHandler( + RANK_FEATURE_SHARD_ACTION_NAME, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + RankFeatureShardRequest::new, + instrumentedHandler(RANK_SHARD_FEATURE_ACTION_METRIC, transportService, searchTransportMetrics, rankShardFeatureRequest) + ); + TransportActionProxy.registerProxyAction(transportService, RANK_FEATURE_SHARD_ACTION_NAME, true, RankFeatureResult::new); + final TransportRequestHandler shardFetchRequestHandler = (request, channel, task) -> searchService .executeFetchPhase(request, (SearchShardTask) task, new ChannelActionListener<>(channel)); transportService.registerRequestHandler( diff --git a/server/src/main/java/org/elasticsearch/cluster/DiffableUtils.java b/server/src/main/java/org/elasticsearch/cluster/DiffableUtils.java index d2ebab48142d6..061da87b00f69 100644 --- a/server/src/main/java/org/elasticsearch/cluster/DiffableUtils.java +++ b/server/src/main/java/org/elasticsearch/cluster/DiffableUtils.java @@ -13,9 +13,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.common.util.Maps; import java.io.IOException; +import java.util.AbstractMap; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -154,7 +154,9 @@ private static > MapDiff createDiff( inserts++; } else if (entry.getValue().equals(previousValue) == false) { if (valueSerializer.supportsDiffableValues()) { - diffs.add(new Maps.ImmutableEntry<>(entry.getKey(), valueSerializer.diff(entry.getValue(), previousValue))); + diffs.add( + new AbstractMap.SimpleImmutableEntry<>(entry.getKey(), valueSerializer.diff(entry.getValue(), previousValue)) + ); } else { upserts.add(entry); } @@ -308,14 +310,14 @@ private MapDiff( for (int i = 0; i < diffsCount; i++) { K key = keySerializer.readKey(in); Diff diff = valueSerializer.readDiff(in, key); - diffs.add(new Maps.ImmutableEntry<>(key, diff)); + diffs.add(new AbstractMap.SimpleImmutableEntry<>(key, diff)); } int upsertsCount = in.readVInt(); upserts = upsertsCount == 0 ? List.of() : new ArrayList<>(upsertsCount); for (int i = 0; i < upsertsCount; i++) { K key = keySerializer.readKey(in); T newValue = valueSerializer.read(in, key); - upserts.add(new Maps.ImmutableEntry<>(key, newValue)); + upserts.add(new AbstractMap.SimpleImmutableEntry<>(key, newValue)); } this.builderCtor = builderCtor; } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeToolCli.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeToolCli.java index 58f37ec220669..81044e8e3ad51 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeToolCli.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeToolCli.java @@ -20,6 +20,7 @@ class NodeToolCli extends MultiCommand { subcommands.put("detach-cluster", new DetachClusterCommand()); subcommands.put("override-version", new OverrideNodeVersionCommand()); subcommands.put("remove-settings", new RemoveSettingsCommand()); + subcommands.put("remove-index-settings", new RemoveIndexSettingsCommand()); subcommands.put("remove-customs", new RemoveCustomsCommand()); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/RemoveIndexSettingsCommand.java b/server/src/main/java/org/elasticsearch/cluster/coordination/RemoveIndexSettingsCommand.java new file mode 100644 index 0000000000000..c6514f9cb4a0b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/RemoveIndexSettingsCommand.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.cluster.coordination; + +import joptsimple.OptionSet; +import joptsimple.OptionSpec; + +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.cli.UserException; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.env.Environment; +import org.elasticsearch.gateway.PersistedClusterStateService; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.List; + +public class RemoveIndexSettingsCommand extends ElasticsearchNodeCommand { + + static final String SETTINGS_REMOVED_MSG = "Index settings were successfully removed from the cluster state"; + static final String CONFIRMATION_MSG = DELIMITER + + "\n" + + "You should only run this tool if you have incompatible index settings in the\n" + + "cluster state that prevent the cluster from forming.\n" + + "This tool can cause data loss and its use should be your last resort.\n" + + "\n" + + "Do you want to proceed?\n"; + + private final OptionSpec arguments; + + public RemoveIndexSettingsCommand() { + super("Removes index settings from the cluster state"); + arguments = parser.nonOptions("index setting names"); + } + + @Override + protected void processDataPaths(Terminal terminal, Path[] dataPaths, OptionSet options, Environment env) throws IOException, + UserException { + final List settingsToRemove = arguments.values(options); + if (settingsToRemove.isEmpty()) { + throw new UserException(ExitCodes.USAGE, "Must supply at least one index setting to remove"); + } + + final PersistedClusterStateService persistedClusterStateService = createPersistedClusterStateService(env.settings(), dataPaths); + + terminal.println(Terminal.Verbosity.VERBOSE, "Loading cluster state"); + final Tuple termAndClusterState = loadTermAndClusterState(persistedClusterStateService, env); + final ClusterState oldClusterState = termAndClusterState.v2(); + final Metadata.Builder newMetadataBuilder = Metadata.builder(oldClusterState.metadata()); + int changes = 0; + for (IndexMetadata indexMetadata : oldClusterState.metadata()) { + Settings oldSettings = indexMetadata.getSettings(); + Settings.Builder newSettings = Settings.builder().put(oldSettings); + boolean removed = false; + for (String settingToRemove : settingsToRemove) { + for (String settingKey : oldSettings.keySet()) { + if (Regex.simpleMatch(settingToRemove, settingKey)) { + terminal.println( + "Index setting [" + settingKey + "] will be removed from index [" + indexMetadata.getIndex() + "]" + ); + newSettings.remove(settingKey); + removed = true; + } + } + } + if (removed) { + newMetadataBuilder.put(IndexMetadata.builder(indexMetadata).settings(newSettings)); + changes++; + } + } + if (changes == 0) { + throw new UserException(ExitCodes.USAGE, "No index setting matching " + settingsToRemove + " were found on this node"); + } + + final ClusterState newClusterState = ClusterState.builder(oldClusterState).metadata(newMetadataBuilder).build(); + terminal.println( + Terminal.Verbosity.VERBOSE, + "[old cluster state = " + oldClusterState + ", new cluster state = " + newClusterState + "]" + ); + + confirm(terminal, CONFIRMATION_MSG); + + try (PersistedClusterStateService.Writer writer = persistedClusterStateService.createWriter()) { + writer.writeFullStateAndCommit(termAndClusterState.v1(), newClusterState); + } + + terminal.println(SETTINGS_REMOVED_MSG); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java b/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java index 785b0db5cc807..adb5a7caf2f45 100644 --- a/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java +++ b/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java @@ -167,8 +167,8 @@ public static ClusterHealthStatus getInactivePrimaryHealth(final ShardRouting sh assert shardRouting.recoverySource() != null : "cannot invoke on a shard that has no recovery source" + shardRouting; final UnassignedInfo unassignedInfo = shardRouting.unassignedInfo(); RecoverySource.Type recoveryType = shardRouting.recoverySource().getType(); - if (unassignedInfo.getLastAllocationStatus() != AllocationStatus.DECIDERS_NO - && unassignedInfo.getNumFailedAllocations() == 0 + if (unassignedInfo.lastAllocationStatus() != AllocationStatus.DECIDERS_NO + && unassignedInfo.failedAllocations() == 0 && (recoveryType == RecoverySource.Type.EMPTY_STORE || recoveryType == RecoverySource.Type.LOCAL_SHARDS || recoveryType == RecoverySource.Type.SNAPSHOT)) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index ae01b7c064749..bf1d9462ab89f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -636,6 +636,40 @@ public DataStream replaceBackingIndex(Index existingBackingIndex, Index newBacki .build(); } + /** + * Replaces the specified failure store index with a new index and returns a new {@code DataStream} instance with + * the modified backing indices. An {@code IllegalArgumentException} is thrown if the index to be replaced + * is not a failure store index for this data stream or if it is the {@code DataStream}'s failure store write index. + * + * @param existingFailureIndex the failure store index to be replaced + * @param newFailureIndex the new index that will be part of the {@code DataStream} + * @return new {@code DataStream} instance with failure store indices that contain replacement index instead of the specified + * existing index. + */ + public DataStream replaceFailureStoreIndex(Index existingFailureIndex, Index newFailureIndex) { + List currentFailureIndices = new ArrayList<>(failureIndices.indices); + int failureIndexPosition = currentFailureIndices.indexOf(existingFailureIndex); + if (failureIndexPosition == -1) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "index [%s] is not part of data stream [%s] failure store", existingFailureIndex.getName(), name) + ); + } + if (failureIndices.indices.size() == (failureIndexPosition + 1)) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "cannot replace failure index [%s] of data stream [%s] because it is the failure store write index", + existingFailureIndex.getName(), + name + ) + ); + } + currentFailureIndices.set(failureIndexPosition, newFailureIndex); + return copy().setFailureIndices(this.failureIndices.copy().setIndices(currentFailureIndices).build()) + .setGeneration(generation + 1) + .build(); + } + /** * Adds the specified index as a backing index and returns a new {@code DataStream} instance with the new combination * of backing indices. diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 681dcb3e314e3..64809c963cb6d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -2267,8 +2267,9 @@ IndexMetadata build(boolean repair) { } final boolean isSearchableSnapshot = SearchableSnapshotsSettings.isSearchableSnapshotStore(settings); - final String indexMode = settings.get(IndexSettings.MODE.getKey()); - final boolean isTsdb = indexMode != null && IndexMode.TIME_SERIES.getName().equals(indexMode.toLowerCase(Locale.ROOT)); + String indexModeString = settings.get(IndexSettings.MODE.getKey()); + final IndexMode indexMode = indexModeString != null ? IndexMode.fromString(indexModeString.toLowerCase(Locale.ROOT)) : null; + final boolean isTsdb = indexMode == IndexMode.TIME_SERIES; return new IndexMetadata( new Index(index, uuid), version, @@ -2308,7 +2309,7 @@ IndexMetadata build(boolean repair) { AutoExpandReplicas.SETTING.get(settings), isSearchableSnapshot, isSearchableSnapshot && settings.getAsBoolean(SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY, false), - isTsdb ? IndexMode.TIME_SERIES : null, + indexMode, isTsdb ? IndexSettings.TIME_SERIES_START_TIME.get(settings) : null, isTsdb ? IndexSettings.TIME_SERIES_END_TIME.get(settings) : null, SETTING_INDEX_VERSION_COMPATIBILITY.get(settings), diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java index 0124f23a1156d..e774d7e4d552d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java @@ -187,6 +187,9 @@ protected TokenStreamComponents createComponents(String fieldName) { () -> null, indexSettings.getMode().idFieldMapperWithoutFieldData(), scriptService, + query -> { + throw new UnsupportedOperationException("IndexMetadataVerifier"); + }, mapperMetrics ) ) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index 6679f17a0427b..d62dd91d7e87b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -574,15 +574,15 @@ private static UnassignedInfo withLastAllocatedNodeId(UnassignedInfo unassignedI return previousNodes == null || previousNodes.size() <= shardCopy ? unassignedInfo : new UnassignedInfo( - unassignedInfo.getReason(), - unassignedInfo.getMessage(), - unassignedInfo.getFailure(), - unassignedInfo.getNumFailedAllocations(), - unassignedInfo.getUnassignedTimeInNanos(), - unassignedInfo.getUnassignedTimeInMillis(), - unassignedInfo.isDelayed(), - unassignedInfo.getLastAllocationStatus(), - unassignedInfo.getFailedNodeIds(), + unassignedInfo.reason(), + unassignedInfo.message(), + unassignedInfo.failure(), + unassignedInfo.failedAllocations(), + unassignedInfo.unassignedTimeNanos(), + unassignedInfo.unassignedTimeMillis(), + unassignedInfo.delayed(), + unassignedInfo.lastAllocationStatus(), + unassignedInfo.failedNodeIds(), previousNodes.get(shardCopy) ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index cf8e0608ecbd4..0b3cadb6e187c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -563,8 +563,8 @@ assert getByAllocationId(failedShard.shardId(), failedShard.allocationId().getId "primary failed while replica initializing", null, 0, - unassignedInfo.getUnassignedTimeInNanos(), - unassignedInfo.getUnassignedTimeInMillis(), + unassignedInfo.unassignedTimeNanos(), + unassignedInfo.unassignedTimeMillis(), false, AllocationStatus.NO_ATTEMPT, Collections.emptySet(), @@ -644,11 +644,11 @@ private void unassignPrimaryAndPromoteActiveReplicaIfExists( unpromotableReplica, new UnassignedInfo( UnassignedInfo.Reason.UNPROMOTABLE_REPLICA, - unassignedInfo.getMessage(), - unassignedInfo.getFailure(), + unassignedInfo.message(), + unassignedInfo.failure(), 0, - unassignedInfo.getUnassignedTimeInNanos(), - unassignedInfo.getUnassignedTimeInMillis(), + unassignedInfo.unassignedTimeNanos(), + unassignedInfo.unassignedTimeMillis(), false, // TODO debatable, but do we want to delay reassignment of unpromotable replicas tho? AllocationStatus.NO_ATTEMPT, Set.of(), @@ -970,18 +970,18 @@ public void ignoreShard(ShardRouting shard, AllocationStatus allocationStatus, R ignoredPrimaries++; UnassignedInfo currInfo = shard.unassignedInfo(); assert currInfo != null; - if (allocationStatus.equals(currInfo.getLastAllocationStatus()) == false) { + if (allocationStatus.equals(currInfo.lastAllocationStatus()) == false) { UnassignedInfo newInfo = new UnassignedInfo( - currInfo.getReason(), - currInfo.getMessage(), - currInfo.getFailure(), - currInfo.getNumFailedAllocations(), - currInfo.getUnassignedTimeInNanos(), - currInfo.getUnassignedTimeInMillis(), - currInfo.isDelayed(), + currInfo.reason(), + currInfo.message(), + currInfo.failure(), + currInfo.failedAllocations(), + currInfo.unassignedTimeNanos(), + currInfo.unassignedTimeMillis(), + currInfo.delayed(), allocationStatus, - currInfo.getFailedNodeIds(), - currInfo.getLastAllocatedNodeId() + currInfo.failedNodeIds(), + currInfo.lastAllocatedNodeId() ); ShardRouting updatedShard = shard.updateUnassigned(newInfo, shard.recoverySource()); changes.unassignedInfoUpdated(shard, newInfo); @@ -1283,16 +1283,16 @@ public void resetFailedCounter(RoutingChangesObserver routingChangesObserver) { UnassignedInfo unassignedInfo = shardRouting.unassignedInfo(); unassignedIterator.updateUnassigned( new UnassignedInfo( - unassignedInfo.getNumFailedAllocations() > 0 ? UnassignedInfo.Reason.MANUAL_ALLOCATION : unassignedInfo.getReason(), - unassignedInfo.getMessage(), - unassignedInfo.getFailure(), + unassignedInfo.failedAllocations() > 0 ? UnassignedInfo.Reason.MANUAL_ALLOCATION : unassignedInfo.reason(), + unassignedInfo.message(), + unassignedInfo.failure(), 0, - unassignedInfo.getUnassignedTimeInNanos(), - unassignedInfo.getUnassignedTimeInMillis(), - unassignedInfo.isDelayed(), - unassignedInfo.getLastAllocationStatus(), + unassignedInfo.unassignedTimeNanos(), + unassignedInfo.unassignedTimeMillis(), + unassignedInfo.delayed(), + unassignedInfo.lastAllocationStatus(), Collections.emptySet(), - unassignedInfo.getLastAllocatedNodeId() + unassignedInfo.lastAllocatedNodeId() ), shardRouting.recoverySource(), routingChangesObserver diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index 95882e26773e5..523dc0efd450b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -342,7 +342,7 @@ public ShardRouting(ShardId shardId, StreamInput in) throws IOException { } else { recoverySource = null; } - unassignedInfo = in.readOptionalWriteable(UnassignedInfo::new); + unassignedInfo = in.readOptionalWriteable(UnassignedInfo::fromStreamInput); if (in.getTransportVersion().onOrAfter(RELOCATION_FAILURE_INFO_VERSION)) { relocationFailureInfo = RelocationFailureInfo.readFrom(in); } else { @@ -410,7 +410,7 @@ public void writeTo(StreamOutput out) throws IOException { public ShardRouting updateUnassigned(UnassignedInfo unassignedInfo, RecoverySource recoverySource) { assert this.unassignedInfo != null : "can only update unassigned info if it is already set"; - assert this.unassignedInfo.isDelayed() || (unassignedInfo.isDelayed() == false) : "cannot transition from non-delayed to delayed"; + assert this.unassignedInfo.delayed() || (unassignedInfo.delayed() == false) : "cannot transition from non-delayed to delayed"; return new ShardRouting( shardId, currentNodeId, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java index bde667df3821a..9423e32be6846 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java @@ -41,9 +41,40 @@ import static org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_SETTING; /** - * Holds additional information as to why the shard is in unassigned state. + * Holds additional information as to why the shard is in an unassigned state. + * + * @param reason why the shard is unassigned. + * @param message optional details explaining the reasons. + * @param failure additional failure exception details if exists. + * @param failedAllocations number of previously failed allocations of this shard. + * @param delayed true if allocation of this shard is delayed due to {@link #INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING}. + * @param unassignedTimeMillis The timestamp in milliseconds when the shard became unassigned, based on System.currentTimeMillis(). + * Note, we use timestamp here since we want to make sure its preserved across node serializations. + * @param unassignedTimeNanos The timestamp in nanoseconds when the shard became unassigned, based on System.nanoTime(). + * Used to calculate the delay for delayed shard allocation. + * ONLY EXPOSED FOR TESTS! + * @param lastAllocationStatus status for the last allocation attempt for this shard. + * @param failedNodeIds A set of nodeIds that failed to complete allocations for this shard. + * {@link org.elasticsearch.gateway.ReplicaShardAllocator} uses this bset to avoid repeatedly canceling ongoing + * recoveries for copies on those nodes, although they can perform noop recoveries. This set will be discarded when a + * shard moves to started. And if a shard is failed while started (i.e., from started to unassigned), the currently + * assigned node won't be added to this set. + * @see org.elasticsearch.gateway.ReplicaShardAllocator#processExistingRecoveries + * @see org.elasticsearch.cluster.routing.allocation.AllocationService#applyFailedShards(ClusterState, List, List) + * @param lastAllocatedNodeId ID of the node this shard was last allocated to, or null if unavailable. */ -public final class UnassignedInfo implements ToXContentFragment, Writeable { +public record UnassignedInfo( + Reason reason, + @Nullable String message, + @Nullable Exception failure, + int failedAllocations, + long unassignedTimeNanos, + long unassignedTimeMillis, + boolean delayed, + AllocationStatus lastAllocationStatus, + Set failedNodeIds, + @Nullable String lastAllocatedNodeId +) implements ToXContentFragment, Writeable { /** * The version that the {@code lastAllocatedNode} field was added in. Used to adapt streaming of this class as appropriate for the @@ -218,17 +249,6 @@ public String value() { } } - private final Reason reason; - private final long unassignedTimeMillis; // used for display and log messages, in milliseconds - private final long unassignedTimeNanos; // in nanoseconds, used to calculate delay for delayed shard allocation - private final boolean delayed; // if allocation of this shard is delayed due to INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING - private final String message; - private final Exception failure; - private final int failedAllocations; - private final Set failedNodeIds; - private final AllocationStatus lastAllocationStatus; // result of the last allocation attempt for this shard - private final String lastAllocatedNodeId; - /** * creates an UnassignedInfo object based on **current** time * @@ -261,28 +281,10 @@ public UnassignedInfo(Reason reason, String message) { * @param failedNodeIds a set of nodeIds that failed to complete allocations for this shard * @param lastAllocatedNodeId the ID of the node this shard was last allocated to */ - public UnassignedInfo( - Reason reason, - @Nullable String message, - @Nullable Exception failure, - int failedAllocations, - long unassignedTimeNanos, - long unassignedTimeMillis, - boolean delayed, - AllocationStatus lastAllocationStatus, - Set failedNodeIds, - @Nullable String lastAllocatedNodeId - ) { - this.reason = Objects.requireNonNull(reason); - this.unassignedTimeMillis = unassignedTimeMillis; - this.unassignedTimeNanos = unassignedTimeNanos; - this.delayed = delayed; - this.message = message; - this.failure = failure; - this.failedAllocations = failedAllocations; - this.lastAllocationStatus = Objects.requireNonNull(lastAllocationStatus); - this.failedNodeIds = Set.copyOf(failedNodeIds); - this.lastAllocatedNodeId = lastAllocatedNodeId; + public UnassignedInfo { + Objects.requireNonNull(reason); + Objects.requireNonNull(lastAllocationStatus); + failedNodeIds = Set.copyOf(failedNodeIds); assert (failedAllocations > 0) == (reason == Reason.ALLOCATION_FAILED) : "failedAllocations: " + failedAllocations + " for reason " + reason; assert (message == null && failure != null) == false : "provide a message if a failure exception is provided"; @@ -294,24 +296,37 @@ public UnassignedInfo( : "last allocated node ID must be set if the shard is unassigned due to a node restarting"; } - public UnassignedInfo(StreamInput in) throws IOException { + public static UnassignedInfo fromStreamInput(StreamInput in) throws IOException { // Because Reason.NODE_RESTARTING is new and can't be sent by older versions, there's no need to vary the deserialization behavior - this.reason = Reason.values()[(int) in.readByte()]; - this.unassignedTimeMillis = in.readLong(); + var reason = Reason.values()[(int) in.readByte()]; + var unassignedTimeMillis = in.readLong(); // As System.nanoTime() cannot be compared across different JVMs, reset it to now. // This means that in master fail-over situations, elapsed delay time is forgotten. - this.unassignedTimeNanos = System.nanoTime(); - this.delayed = in.readBoolean(); - this.message = in.readOptionalString(); - this.failure = in.readException(); - this.failedAllocations = in.readVInt(); - this.lastAllocationStatus = AllocationStatus.readFrom(in); - this.failedNodeIds = in.readCollectionAsImmutableSet(StreamInput::readString); + var unassignedTimeNanos = System.nanoTime(); + var delayed = in.readBoolean(); + var message = in.readOptionalString(); + var failure = in.readException(); + var failedAllocations = in.readVInt(); + var lastAllocationStatus = AllocationStatus.readFrom(in); + var failedNodeIds = in.readCollectionAsImmutableSet(StreamInput::readString); + String lastAllocatedNodeId; if (in.getTransportVersion().onOrAfter(VERSION_LAST_ALLOCATED_NODE_ADDED)) { - this.lastAllocatedNodeId = in.readOptionalString(); + lastAllocatedNodeId = in.readOptionalString(); } else { - this.lastAllocatedNodeId = null; + lastAllocatedNodeId = null; } + return new UnassignedInfo( + reason, + message, + failure, + failedAllocations, + unassignedTimeNanos, + unassignedTimeMillis, + delayed, + lastAllocationStatus, + failedNodeIds, + lastAllocatedNodeId + ); } public void writeTo(StreamOutput out) throws IOException { @@ -335,107 +350,25 @@ public void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns the number of previously failed allocations of this shard. - */ - public int getNumFailedAllocations() { - return failedAllocations; - } - - /** - * Returns true if allocation of this shard is delayed due to {@link #INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING} - */ - public boolean isDelayed() { - return delayed; - } - - /** - * The reason why the shard is unassigned. - */ - public Reason getReason() { - return this.reason; - } - - /** - * The timestamp in milliseconds when the shard became unassigned, based on System.currentTimeMillis(). - * Note, we use timestamp here since we want to make sure its preserved across node serializations. - */ - public long getUnassignedTimeInMillis() { - return this.unassignedTimeMillis; - } - - /** - * The timestamp in nanoseconds when the shard became unassigned, based on System.nanoTime(). - * Used to calculate the delay for delayed shard allocation. - * ONLY EXPOSED FOR TESTS! - */ - public long getUnassignedTimeInNanos() { - return this.unassignedTimeNanos; - } - - /** - * Returns optional details explaining the reasons. - */ - @Nullable - public String getMessage() { - return this.message; - } - - /** - * Returns additional failure exception details if exists. - */ - @Nullable - public Exception getFailure() { - return failure; - } - /** * Builds a string representation of the message and the failure if exists. */ @Nullable - public String getDetails() { + public String details() { if (message == null) { return null; } return message + (failure == null ? "" : ", failure " + ExceptionsHelper.stackTrace(failure)); } - /** - * Gets the ID of the node this shard was last allocated to, or null if unavailable. - */ - @Nullable - public String getLastAllocatedNodeId() { - return lastAllocatedNodeId; - } - - /** - * Get the status for the last allocation attempt for this shard. - */ - public AllocationStatus getLastAllocationStatus() { - return lastAllocationStatus; - } - - /** - * A set of nodeIds that failed to complete allocations for this shard. {@link org.elasticsearch.gateway.ReplicaShardAllocator} - * uses this set to avoid repeatedly canceling ongoing recoveries for copies on those nodes although they can perform noop recoveries. - * This set will be discarded when a shard moves to started. And if a shard is failed while started (i.e., from started to unassigned), - * the currently assigned node won't be added to this set. - * - * @see org.elasticsearch.gateway.ReplicaShardAllocator#processExistingRecoveries - * @see org.elasticsearch.cluster.routing.allocation.AllocationService#applyFailedShards(ClusterState, List, List) - */ - public Set getFailedNodeIds() { - return failedNodeIds; - } - /** * Calculates the delay left based on current time (in nanoseconds) and the delay defined by the index settings. - * Only relevant if shard is effectively delayed (see {@link #isDelayed()}) + * Only relevant if shard is effectively delayed (see {@link #delayed()}) * Returns 0 if delay is negative * * @return calculated delay in nanoseconds */ - public long getRemainingDelay(final long nanoTimeNow, final Settings indexSettings, final NodesShutdownMetadata nodesShutdownMetadata) { + public long remainingDelay(final long nanoTimeNow, final Settings indexSettings, final NodesShutdownMetadata nodesShutdownMetadata) { final long indexLevelDelay = INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.get(indexSettings).nanos(); long delayTimeoutNanos = Optional.ofNullable(lastAllocatedNodeId) // If the node wasn't restarting when this became unassigned, use default delay @@ -455,7 +388,7 @@ public long getRemainingDelay(final long nanoTimeNow, final Settings indexSettin public static int getNumberOfDelayedUnassigned(ClusterState state) { int count = 0; for (ShardRouting shard : state.getRoutingNodes().unassigned()) { - if (shard.unassignedInfo().isDelayed()) { + if (shard.unassignedInfo().delayed()) { count++; } } @@ -472,10 +405,10 @@ public static long findNextDelayedAllocation(long currentNanoTime, ClusterState long nextDelayNanos = Long.MAX_VALUE; for (ShardRouting shard : state.getRoutingNodes().unassigned()) { UnassignedInfo unassignedInfo = shard.unassignedInfo(); - if (unassignedInfo.isDelayed()) { + if (unassignedInfo.delayed()) { Settings indexSettings = metadata.index(shard.index()).getSettings(); // calculate next time to schedule - final long newComputedLeftDelayNanos = unassignedInfo.getRemainingDelay( + final long newComputedLeftDelayNanos = unassignedInfo.remainingDelay( currentNanoTime, indexSettings, metadata.nodeShutdowns() @@ -502,7 +435,7 @@ public String shortSummary() { if (lastAllocatedNodeId != null) { sb.append(", last_node[").append(lastAllocatedNodeId).append("]"); } - String details = getDetails(); + String details = details(); if (details != null) { sb.append(", details[").append(details).append("]"); } @@ -530,7 +463,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (lastAllocatedNodeId != null) { builder.field("last_node", lastAllocatedNodeId); } - String details = getDetails(); + String details = details(); if (details != null) { builder.field("details", details); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index e19e266cc2672..436399a02005f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -215,11 +215,11 @@ public ClusterState applyFailedShards( failedShard ); } - int failedAllocations = failedShard.unassignedInfo() != null ? failedShard.unassignedInfo().getNumFailedAllocations() : 0; + int failedAllocations = failedShard.unassignedInfo() != null ? failedShard.unassignedInfo().failedAllocations() : 0; final Set failedNodeIds; if (failedShard.unassignedInfo() != null) { - failedNodeIds = Sets.newHashSetWithExpectedSize(failedShard.unassignedInfo().getFailedNodeIds().size() + 1); - failedNodeIds.addAll(failedShard.unassignedInfo().getFailedNodeIds()); + failedNodeIds = Sets.newHashSetWithExpectedSize(failedShard.unassignedInfo().failedNodeIds().size() + 1); + failedNodeIds.addAll(failedShard.unassignedInfo().failedNodeIds()); failedNodeIds.add(failedShard.currentNodeId()); } else { failedNodeIds = Collections.emptySet(); @@ -425,8 +425,8 @@ default void removeDelayMarkers(RoutingAllocation allocation) { while (unassignedIterator.hasNext()) { ShardRouting shardRouting = unassignedIterator.next(); UnassignedInfo unassignedInfo = shardRouting.unassignedInfo(); - if (unassignedInfo.isDelayed()) { - final long newComputedLeftDelayNanos = unassignedInfo.getRemainingDelay( + if (unassignedInfo.delayed()) { + final long newComputedLeftDelayNanos = unassignedInfo.remainingDelay( allocation.getCurrentNanoTime(), metadata.getIndexSafe(shardRouting.index()).getSettings(), metadata.nodeShutdowns() @@ -434,16 +434,16 @@ default void removeDelayMarkers(RoutingAllocation allocation) { if (newComputedLeftDelayNanos == 0) { unassignedIterator.updateUnassigned( new UnassignedInfo( - unassignedInfo.getReason(), - unassignedInfo.getMessage(), - unassignedInfo.getFailure(), - unassignedInfo.getNumFailedAllocations(), - unassignedInfo.getUnassignedTimeInNanos(), - unassignedInfo.getUnassignedTimeInMillis(), + unassignedInfo.reason(), + unassignedInfo.message(), + unassignedInfo.failure(), + unassignedInfo.failedAllocations(), + unassignedInfo.unassignedTimeNanos(), + unassignedInfo.unassignedTimeMillis(), false, - unassignedInfo.getLastAllocationStatus(), - unassignedInfo.getFailedNodeIds(), - unassignedInfo.getLastAllocatedNodeId() + unassignedInfo.lastAllocationStatus(), + unassignedInfo.failedNodeIds(), + unassignedInfo.lastAllocatedNodeId() ), shardRouting.recoverySource(), allocation.changes() diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardChangesObserver.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardChangesObserver.java index 1b5d1875bc1d3..f265ab7f62db2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardChangesObserver.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardChangesObserver.java @@ -36,7 +36,7 @@ public void relocationStarted(ShardRouting startedShard, ShardRouting targetRelo @Override public void shardFailed(ShardRouting failedShard, UnassignedInfo unassignedInfo) { - logger.debug("{} has failed on [{}]: {}", shardIdentifier(failedShard), failedShard.currentNodeId(), unassignedInfo.getReason()); + logger.debug("{} has failed on [{}]: {}", shardIdentifier(failedShard), failedShard.currentNodeId(), unassignedInfo.reason()); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 57f28e4ea021c..2fca8895b011c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -220,19 +220,19 @@ private void failAllocationOfNewPrimaries(RoutingAllocation allocation) { while (unassignedIterator.hasNext()) { final ShardRouting shardRouting = unassignedIterator.next(); final UnassignedInfo unassignedInfo = shardRouting.unassignedInfo(); - if (shardRouting.primary() && unassignedInfo.getLastAllocationStatus() == AllocationStatus.NO_ATTEMPT) { + if (shardRouting.primary() && unassignedInfo.lastAllocationStatus() == AllocationStatus.NO_ATTEMPT) { unassignedIterator.updateUnassigned( new UnassignedInfo( - unassignedInfo.getReason(), - unassignedInfo.getMessage(), - unassignedInfo.getFailure(), - unassignedInfo.getNumFailedAllocations(), - unassignedInfo.getUnassignedTimeInNanos(), - unassignedInfo.getUnassignedTimeInMillis(), - unassignedInfo.isDelayed(), + unassignedInfo.reason(), + unassignedInfo.message(), + unassignedInfo.failure(), + unassignedInfo.failedAllocations(), + unassignedInfo.unassignedTimeNanos(), + unassignedInfo.unassignedTimeMillis(), + unassignedInfo.delayed(), AllocationStatus.DECIDERS_NO, - unassignedInfo.getFailedNodeIds(), - unassignedInfo.getLastAllocatedNodeId() + unassignedInfo.failedNodeIds(), + unassignedInfo.lastAllocatedNodeId() ), shardRouting.recoverySource(), allocation.changes() diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java index f0fd108dd31fd..7c04d518eb2f6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputer.java @@ -120,7 +120,7 @@ public DesiredBalance compute( for (final var iterator = unassigned.iterator(); iterator.hasNext();) { final var shardRouting = iterator.next(); if (shardRouting.primary() == primary) { - var lastAllocatedNodeId = shardRouting.unassignedInfo().getLastAllocatedNodeId(); + var lastAllocatedNodeId = shardRouting.unassignedInfo().lastAllocatedNodeId(); if (knownNodeIds.contains(lastAllocatedNodeId) || ignoredShards.contains(discardAllocationStatus(shardRouting)) == false) { shardRoutings.computeIfAbsent(shardRouting.shardId(), ShardRoutings::new).unassigned().add(shardRouting); @@ -154,7 +154,7 @@ public DesiredBalance compute( // preserving last known shard location as a starting point to avoid unnecessary relocations for (ShardRouting shardRouting : routings.unassigned()) { - var lastAllocatedNodeId = shardRouting.unassignedInfo().getLastAllocatedNodeId(); + var lastAllocatedNodeId = shardRouting.unassignedInfo().lastAllocatedNodeId(); if (knownNodeIds.contains(lastAllocatedNodeId)) { targetNodes.add(lastAllocatedNodeId); } @@ -346,19 +346,18 @@ public DesiredBalance compute( for (var shard : routingNodes.unassigned().ignored()) { var info = shard.unassignedInfo(); assert info != null - && (info.getLastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO - || info.getLastAllocationStatus() == UnassignedInfo.AllocationStatus.NO_ATTEMPT - || info.getLastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED) - : "Unexpected stats in: " + info; + && (info.lastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO + || info.lastAllocationStatus() == UnassignedInfo.AllocationStatus.NO_ATTEMPT + || info.lastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED) : "Unexpected stats in: " + info; - if (hasChanges == false && info.getLastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED) { + if (hasChanges == false && info.lastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED) { // Simulation could not progress due to missing information in any of the deciders. // Currently, this could happen if `HasFrozenCacheAllocationDecider` is still fetching the data. // Progress would be made after the followup reroute call. hasChanges = true; } - var ignored = shard.unassignedInfo().getLastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO ? 0 : 1; + var ignored = shard.unassignedInfo().lastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO ? 0 : 1; assignments.compute( shard.shardId(), (key, oldValue) -> oldValue == null @@ -400,20 +399,20 @@ private static ShardRouting discardAllocationStatus(ShardRouting shardRouting) { } private static UnassignedInfo discardAllocationStatus(UnassignedInfo info) { - if (info.getLastAllocationStatus() == UnassignedInfo.AllocationStatus.NO_ATTEMPT) { + if (info.lastAllocationStatus() == UnassignedInfo.AllocationStatus.NO_ATTEMPT) { return info; } return new UnassignedInfo( - info.getReason(), - info.getMessage(), - info.getFailure(), - info.getNumFailedAllocations(), - info.getUnassignedTimeInNanos(), - info.getUnassignedTimeInMillis(), - info.isDelayed(), + info.reason(), + info.message(), + info.failure(), + info.failedAllocations(), + info.unassignedTimeNanos(), + info.unassignedTimeMillis(), + info.delayed(), UnassignedInfo.AllocationStatus.NO_ATTEMPT, - info.getFailedNodeIds(), - info.getLastAllocatedNodeId() + info.failedNodeIds(), + info.lastAllocatedNodeId() ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index f70d0b8929252..24e7abca45d2d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -226,19 +226,19 @@ private void failAllocationOfNewPrimaries(RoutingAllocation allocation) { while (unassignedIterator.hasNext()) { final ShardRouting shardRouting = unassignedIterator.next(); final UnassignedInfo unassignedInfo = shardRouting.unassignedInfo(); - if (shardRouting.primary() && unassignedInfo.getLastAllocationStatus() == AllocationStatus.NO_ATTEMPT) { + if (shardRouting.primary() && unassignedInfo.lastAllocationStatus() == AllocationStatus.NO_ATTEMPT) { unassignedIterator.updateUnassigned( new UnassignedInfo( - unassignedInfo.getReason(), - unassignedInfo.getMessage(), - unassignedInfo.getFailure(), - unassignedInfo.getNumFailedAllocations(), - unassignedInfo.getUnassignedTimeInNanos(), - unassignedInfo.getUnassignedTimeInMillis(), - unassignedInfo.isDelayed(), + unassignedInfo.reason(), + unassignedInfo.message(), + unassignedInfo.failure(), + unassignedInfo.failedAllocations(), + unassignedInfo.unassignedTimeNanos(), + unassignedInfo.unassignedTimeMillis(), + unassignedInfo.delayed(), AllocationStatus.DECIDERS_NO, - unassignedInfo.getFailedNodeIds(), - unassignedInfo.getLastAllocatedNodeId() + unassignedInfo.failedNodeIds(), + unassignedInfo.lastAllocatedNodeId() ), shardRouting.recoverySource(), allocation.changes() diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java index e2fdec43d8e12..2b006988a2ae4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java @@ -126,20 +126,20 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) } UnassignedInfo unassignedInfoToUpdate = null; - if (shardRouting.unassignedInfo().getReason() != UnassignedInfo.Reason.FORCED_EMPTY_PRIMARY) { + if (shardRouting.unassignedInfo().reason() != UnassignedInfo.Reason.FORCED_EMPTY_PRIMARY) { String unassignedInfoMessage = "force empty allocation from previous reason " - + shardRouting.unassignedInfo().getReason() + + shardRouting.unassignedInfo().reason() + ", " - + shardRouting.unassignedInfo().getMessage(); + + shardRouting.unassignedInfo().message(); unassignedInfoToUpdate = new UnassignedInfo( UnassignedInfo.Reason.FORCED_EMPTY_PRIMARY, unassignedInfoMessage, - shardRouting.unassignedInfo().getFailure(), + shardRouting.unassignedInfo().failure(), 0, System.nanoTime(), System.currentTimeMillis(), false, - shardRouting.unassignedInfo().getLastAllocationStatus(), + shardRouting.unassignedInfo().lastAllocationStatus(), Collections.emptySet(), null ); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java index f37039608d7bd..1f7d1fe0143c3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java @@ -50,7 +50,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocat final int maxRetries = SETTING_ALLOCATION_MAX_RETRY.get(allocation.metadata().getIndexSafe(shardRouting.index()).getSettings()); final var unassignedInfo = shardRouting.unassignedInfo(); - final int numFailedAllocations = unassignedInfo == null ? 0 : unassignedInfo.getNumFailedAllocations(); + final int numFailedAllocations = unassignedInfo == null ? 0 : unassignedInfo.failedAllocations(); if (numFailedAllocations > 0) { final var decision = numFailedAllocations >= maxRetries ? Decision.NO : Decision.YES; return allocation.debugDecision() ? debugDecision(decision, unassignedInfo, numFailedAllocations, maxRetries) : decision; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java index 7b08a4d94512e..7adfc2c17d4aa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java @@ -55,7 +55,7 @@ public Decision canAllocate(final ShardRouting shardRouting, final RoutingAlloca + "to restore the snapshot again or use the reroute API to force the allocation of an empty primary shard. Details: [%s]", source.snapshot(), shardRouting.getIndexName(), - shardRouting.unassignedInfo().getDetails() + shardRouting.unassignedInfo().details() ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorService.java index 309848635a440..8fb91d89417e0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorService.java @@ -536,15 +536,15 @@ static boolean isNewlyCreatedAndInitializingReplica(ShardRouting routing, Cluste private static boolean isUnassignedDueToTimelyRestart(ShardRouting routing, NodesShutdownMetadata shutdowns) { var info = routing.unassignedInfo(); - if (info == null || info.getReason() != UnassignedInfo.Reason.NODE_RESTARTING) { + if (info == null || info.reason() != UnassignedInfo.Reason.NODE_RESTARTING) { return false; } - var shutdown = shutdowns.get(info.getLastAllocatedNodeId(), SingleNodeShutdownMetadata.Type.RESTART); + var shutdown = shutdowns.get(info.lastAllocatedNodeId(), SingleNodeShutdownMetadata.Type.RESTART); if (shutdown == null) { return false; } var now = System.nanoTime(); - var restartingAllocationDelayExpiration = info.getUnassignedTimeInNanos() + shutdown.getAllocationDelay().nanos(); + var restartingAllocationDelayExpiration = info.unassignedTimeNanos() + shutdown.getAllocationDelay().nanos(); return now - restartingAllocationDelayExpiration <= 0; } @@ -567,10 +567,10 @@ private static boolean isUnassignedDueToNewInitialization(ShardRouting routing, List diagnoseUnassignedShardRouting(ShardRouting shardRouting, ClusterState state) { List diagnosisDefs = new ArrayList<>(); LOGGER.trace("Diagnosing unassigned shard [{}] due to reason [{}]", shardRouting.shardId(), shardRouting.unassignedInfo()); - switch (shardRouting.unassignedInfo().getLastAllocationStatus()) { + switch (shardRouting.unassignedInfo().lastAllocationStatus()) { case NO_VALID_SHARD_COPY -> diagnosisDefs.add(ACTION_RESTORE_FROM_SNAPSHOT); case NO_ATTEMPT -> { - if (shardRouting.unassignedInfo().isDelayed()) { + if (shardRouting.unassignedInfo().delayed()) { diagnosisDefs.add(DIAGNOSIS_WAIT_FOR_OR_FIX_DELAYED_SHARDS); } else { diagnosisDefs.addAll(explainAllocationsAndDiagnoseDeciders(shardRouting, state)); diff --git a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java index 33d8fbf99f31f..cce61f5ff55e3 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java +++ b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java @@ -14,8 +14,6 @@ import com.carrotsearch.hppc.procedures.ObjectObjectProcedure; import com.carrotsearch.hppc.procedures.ObjectProcedure; -import org.elasticsearch.common.util.Maps; - import java.util.AbstractCollection; import java.util.AbstractMap; import java.util.AbstractSet; @@ -146,7 +144,7 @@ public boolean isEmpty() { @Override public Iterator> iterator() { - return Iterators.map(map.iterator(), c -> new Maps.ImmutableEntry<>(c.key, c.value)); + return Iterators.map(map.iterator(), c -> new AbstractMap.SimpleImmutableEntry<>(c.key, c.value)); } @Override @@ -156,7 +154,9 @@ public Spliterator> spliterator() { @Override public void forEach(Consumer> action) { - map.forEach((Consumer>) c -> action.accept(new Maps.ImmutableEntry<>(c.key, c.value))); + map.forEach( + (Consumer>) c -> action.accept(new AbstractMap.SimpleImmutableEntry<>(c.key, c.value)) + ); } @SuppressWarnings("unchecked") diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 33fb000c1bca2..833e7f27852c8 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -370,11 +370,12 @@ public void writeOptionalText(@Nullable Text text) throws IOException { } } - private final BytesRefBuilder spare = new BytesRefBuilder(); + private static final ThreadLocal spareBytesRefBuilder = ThreadLocal.withInitial(BytesRefBuilder::new); public void writeText(Text text) throws IOException { if (text.hasBytes() == false) { final String string = text.string(); + var spare = spareBytesRefBuilder.get(); spare.copyChars(string); writeInt(spare.length()); write(spare.bytes(), 0, spare.length()); diff --git a/server/src/main/java/org/elasticsearch/common/network/NetworkService.java b/server/src/main/java/org/elasticsearch/common/network/NetworkService.java index b1a01553ef1bd..f5a52cd1c4ce9 100644 --- a/server/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/server/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -85,6 +85,7 @@ public interface CustomNameResolver { private final List customNameResolvers; private final HandlingTimeTracker handlingTimeTracker = new HandlingTimeTracker(); + private final ThreadWatchdog threadWatchdog = new ThreadWatchdog(); public NetworkService(List customNameResolvers) { this.customNameResolvers = Objects.requireNonNull(customNameResolvers, "customNameResolvers must be non null"); @@ -94,6 +95,10 @@ public HandlingTimeTracker getHandlingTimeTracker() { return handlingTimeTracker; } + public ThreadWatchdog getThreadWatchdog() { + return threadWatchdog; + } + /** * Resolves {@code bindHosts} to a list of internet addresses. The list will * not contain duplicate addresses. diff --git a/server/src/main/java/org/elasticsearch/common/network/ThreadWatchdog.java b/server/src/main/java/org/elasticsearch/common/network/ThreadWatchdog.java new file mode 100644 index 0000000000000..90d4d2493de89 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/network/ThreadWatchdog.java @@ -0,0 +1,280 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.network; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.ReferenceDocs; +import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.monitor.jvm.HotThreads; +import org.elasticsearch.threadpool.ThreadPool; + +import java.lang.ref.WeakReference; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Watchdog mechanism for making sure that no transport thread spends too long blocking the event loop. + */ +// Today we only use this to track activity processing reads on network threads. Tracking time when we're busy processing writes is a little +// trickier because that code is more re-entrant, both within the network layer and also it may complete a listener from the wider codebase +// that ends up calling back into the network layer again. But also we don't see many network threads blocking for ages on the write path, +// so we focus on reads for now. +public class ThreadWatchdog { + + public static final Setting NETWORK_THREAD_WATCHDOG_INTERVAL = Setting.timeSetting( + "network.thread.watchdog.interval", + TimeValue.timeValueSeconds(5), + Setting.Property.NodeScope + ); + + public static final Setting NETWORK_THREAD_WATCHDOG_QUIET_TIME = Setting.timeSetting( + "network.thread.watchdog.quiet_time", + TimeValue.timeValueMinutes(10), + Setting.Property.NodeScope + ); + + private static final Logger logger = LogManager.getLogger(ThreadWatchdog.class); + + /** + * Activity tracker for the current thread. Thread-locals are only retained by the owning thread so these will be GCd after thread exit. + */ + private final ThreadLocal activityTrackerThreadLocal = new ThreadLocal<>(); + + /** + * Collection of known activity trackers to be scanned for stuck threads. Uses {@link WeakReference} so that we don't prevent trackers + * from being GCd if a thread exits. There aren't many such trackers, O(#cpus), and they almost never change, so an {@link ArrayList} + * with explicit synchronization is fine. + */ + private final List> knownTrackers = new ArrayList<>(); + + /** + * @return an activity tracker for activities on the current thread. + */ + public ActivityTracker getActivityTrackerForCurrentThread() { + var result = activityTrackerThreadLocal.get(); + if (result == null) { + // this is a previously-untracked thread; thread creation is assumed to be very rare, no need to optimize this path at all + result = new ActivityTracker(); + synchronized (knownTrackers) { + knownTrackers.add(new WeakReference<>(result)); + } + activityTrackerThreadLocal.set(result); + } + return result; + } + + // exposed for testing + List getStuckThreadNames() { + List stuckThreadNames = null; + // this is not called very often, and only on a single thread, with almost no contention on this mutex since thread creation is rare + synchronized (knownTrackers) { + final var iterator = knownTrackers.iterator(); + while (iterator.hasNext()) { + final var tracker = iterator.next().get(); + if (tracker == null) { + // tracker was GCd because its thread exited - very rare, no need to optimize this case + iterator.remove(); + } else if (tracker.isIdleOrMakingProgress() == false) { + if (stuckThreadNames == null) { + stuckThreadNames = new ArrayList<>(); + } + stuckThreadNames.add(tracker.getTrackedThreadName()); + } + } + } + if (stuckThreadNames == null) { + return List.of(); + } else { + stuckThreadNames.sort(Comparator.naturalOrder()); + return stuckThreadNames; + } + } + + /** + * Per-thread class which keeps track of activity on that thread, represented as a {@code long} which is incremented every time an + * activity starts or stops. Thus the parity of its value indicates whether the thread is idle or not. Crucially, the activity tracking + * is very lightweight (on the tracked thread). + */ + public static final class ActivityTracker extends AtomicLong { + + private final Thread trackedThread; + private long lastObservedValue; + + public ActivityTracker() { + this.trackedThread = Thread.currentThread(); + } + + String getTrackedThreadName() { + return trackedThread.getName(); + } + + public void startActivity() { + assert trackedThread == Thread.currentThread() : trackedThread.getName() + " vs " + Thread.currentThread().getName(); + final var prevValue = getAndIncrement(); + assert isIdle(prevValue) : "thread [" + trackedThread.getName() + "] was already active"; + } + + public void stopActivity() { + assert trackedThread == Thread.currentThread() : trackedThread.getName() + " vs " + Thread.currentThread().getName(); + final var prevValue = getAndIncrement(); + assert isIdle(prevValue) == false : "thread [" + trackedThread.getName() + "] was already idle"; + } + + boolean isIdleOrMakingProgress() { + final var value = get(); + if (isIdle(value)) { + return true; + } + if (value == lastObservedValue) { + // no change since last check + return false; + } else { + // made progress since last check + lastObservedValue = value; + return true; + } + } + + private static boolean isIdle(long value) { + // the parity of the value indicates the idle state: initially zero (idle), so active == odd + return (value & 1) == 0; + } + } + + public void run(Settings settings, ThreadPool threadPool, Lifecycle lifecycle) { + new Checker(threadPool, NETWORK_THREAD_WATCHDOG_INTERVAL.get(settings), NETWORK_THREAD_WATCHDOG_QUIET_TIME.get(settings), lifecycle) + .run(); + } + + /** + * Action which runs itself periodically, calling {@link #getStuckThreadNames} to check for active threads that didn't make progress + * since the last call, and if it finds any then it dispatches {@link #threadDumper} to log the current hot threads. + */ + private final class Checker extends AbstractRunnable { + private final ThreadPool threadPool; + private final TimeValue interval; + private final TimeValue quietTime; + private final Lifecycle lifecycle; + + Checker(ThreadPool threadPool, TimeValue interval, TimeValue quietTime, Lifecycle lifecycle) { + this.threadPool = threadPool; + this.interval = interval; + this.quietTime = quietTime.compareTo(interval) <= 0 ? interval : quietTime; + this.lifecycle = lifecycle; + assert this.interval.millis() <= this.quietTime.millis(); + } + + @Override + protected void doRun() { + if (isRunning() == false) { + return; + } + + boolean rescheduleImmediately = true; + try { + final var stuckThreadNames = getStuckThreadNames(); + if (stuckThreadNames.isEmpty() == false) { + logger.warn( + "the following threads are active but did not make progress in the preceding [{}]: {}", + interval, + stuckThreadNames + ); + rescheduleImmediately = false; + threadPool.generic().execute(threadDumper); + } + } finally { + if (rescheduleImmediately) { + scheduleNext(interval); + } + } + } + + @Override + public boolean isForceExecution() { + return true; + } + + private boolean isRunning() { + return 0 < interval.millis() && lifecycle.stoppedOrClosed() == false; + } + + private void scheduleNext(TimeValue delay) { + if (isRunning()) { + threadPool.scheduleUnlessShuttingDown(delay, EsExecutors.DIRECT_EXECUTOR_SERVICE, Checker.this); + } + } + + private final AbstractRunnable threadDumper = new AbstractRunnable() { + @Override + protected void doRun() { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.GENERIC); + if (isRunning()) { + HotThreads.logLocalHotThreads( + logger, + Level.WARN, + "hot threads dump due to active threads not making progress", + ReferenceDocs.NETWORK_THREADING_MODEL + ); + } + } + + @Override + public boolean isForceExecution() { + return true; + } + + @Override + public void onFailure(Exception e) { + Checker.this.onFailure(e); + } + + @Override + public void onRejection(Exception e) { + Checker.this.onRejection(e); + } + + @Override + public void onAfter() { + scheduleNext(quietTime); + } + + @Override + public String toString() { + return "ThreadWatchDog$Checker#threadDumper"; + } + }; + + @Override + public void onFailure(Exception e) { + logger.error("exception in ThreadWatchDog$Checker", e); + assert false : e; + } + + @Override + public void onRejection(Exception e) { + logger.debug("ThreadWatchDog$Checker execution rejected", e); + assert e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown() : e; + } + + @Override + public String toString() { + return "ThreadWatchDog$Checker"; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 2ea0cc60b5afd..90609adabfbc5 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -60,6 +60,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -421,6 +422,8 @@ public void apply(Settings value, Settings current, Settings previous) { NetworkService.TCP_REUSE_ADDRESS, NetworkService.TCP_SEND_BUFFER_SIZE, NetworkService.TCP_RECEIVE_BUFFER_SIZE, + ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL, + ThreadWatchdog.NETWORK_THREAD_WATCHDOG_QUIET_TIME, IndexSettings.QUERY_STRING_ANALYZE_WILDCARD, IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD, ScriptService.SCRIPT_CACHE_SIZE_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/util/Maps.java b/server/src/main/java/org/elasticsearch/common/util/Maps.java index fc911793711b7..8e36cd250f7b7 100644 --- a/server/src/main/java/org/elasticsearch/common/util/Maps.java +++ b/server/src/main/java/org/elasticsearch/common/util/Maps.java @@ -329,39 +329,4 @@ public static Map transformValues(Map source, Function return copy; } - /** - * An immutable implementation of {@link Map.Entry}. - * Unlike {@code Map.entry(...)} this implementation permits null key and value. - */ - public record ImmutableEntry(KType key, VType value) implements Map.Entry { - - @Override - public KType getKey() { - return key; - } - - @Override - public VType getValue() { - return value; - } - - @Override - public VType setValue(VType value) { - throw new UnsupportedOperationException(); - } - - @Override - @SuppressWarnings("rawtypes") - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof Map.Entry) == false) return false; - Map.Entry that = (Map.Entry) o; - return Objects.equals(key, that.getKey()) && Objects.equals(value, that.getValue()); - } - - @Override - public int hashCode() { - return Objects.hashCode(key) ^ Objects.hashCode(value); - } - } } diff --git a/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index fa9636dc89d69..d07d2498d6534 100644 --- a/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -65,7 +65,7 @@ public void processExistingRecoveries(RoutingAllocation allocation, Predicate failedNodeIds = shard.unassignedInfo() == null ? Collections.emptySet() - : shard.unassignedInfo().getFailedNodeIds(); + : shard.unassignedInfo().failedNodeIds(); UnassignedInfo unassignedInfo = new UnassignedInfo( UnassignedInfo.Reason.REALLOCATED_REPLICA, "existing allocation of replica to [" @@ -138,7 +138,7 @@ private static boolean isResponsibleFor(final ShardRouting shard) { return shard.primary() == false // must be a replica && shard.unassigned() // must be unassigned // if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one... - && shard.unassignedInfo().getReason() != UnassignedInfo.Reason.INDEX_CREATED; + && shard.unassignedInfo().reason() != UnassignedInfo.Reason.INDEX_CREATED; } @Override @@ -234,7 +234,7 @@ public AllocateUnassignedDecision makeAllocationDecision( // we found a match return AllocateUnassignedDecision.yes(nodeWithHighestMatch.node(), null, nodeDecisions, true); } - } else if (matchingNodes.hasAnyData() == false && unassignedShard.unassignedInfo().isDelayed()) { + } else if (matchingNodes.hasAnyData() == false && unassignedShard.unassignedInfo().delayed()) { // if we didn't manage to find *any* data (regardless of matching sizes), and the replica is // unassigned due to a node leaving, so we delay allocation of this replica to see if the // node with the shard copy will rejoin so we can re-use the copy it has @@ -262,7 +262,7 @@ public static AllocateUnassignedDecision delayedDecision( Metadata metadata = allocation.metadata(); IndexMetadata indexMetadata = metadata.index(unassignedShard.index()); totalDelayMillis = INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.get(indexMetadata.getSettings()).getMillis(); - long remainingDelayNanos = unassignedInfo.getRemainingDelay( + long remainingDelayNanos = unassignedInfo.remainingDelay( System.nanoTime(), indexMetadata.getSettings(), metadata.nodeShutdowns() @@ -357,7 +357,7 @@ private MatchingNodes findMatchingNodes( DiscoveryNode discoNode = nodeStoreEntry.getKey(); if (noMatchFailedNodes && shard.unassignedInfo() != null - && shard.unassignedInfo().getFailedNodeIds().contains(discoNode.getId())) { + && shard.unassignedInfo().failedNodeIds().contains(discoNode.getId())) { continue; } TransportNodesListShardStoreMetadata.StoreFilesMetadata storeFilesMetadata = nodeStoreEntry.getValue().storeFilesMetadata(); diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index ff8db4bacef8c..fa2a9f0f35259 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -652,6 +652,9 @@ public MapperService newIndexMapperService( }, indexSettings.getMode().idFieldMapperWithoutFieldData(), scriptService, + query -> { + throw new UnsupportedOperationException("no index query shard context available"); + }, mapperMetrics ); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 1712f824a132c..0605e36b2ea4b 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -212,6 +212,7 @@ public IndexService( this.indexAnalyzers = indexAnalyzers; if (needsMapperService(indexSettings, indexCreationContext)) { assert indexAnalyzers != null; + this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this)); this.mapperService = new MapperService( clusterService, indexSettings, @@ -223,6 +224,7 @@ public IndexService( () -> newSearchExecutionContext(0, 0, null, System::currentTimeMillis, null, emptyMap()), idFieldMapper, scriptService, + bitsetFilterCache::getBitSetProducer, mapperMetrics ); this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService); @@ -238,7 +240,6 @@ public IndexService( this.indexSortSupplier = () -> null; } indexFieldData.setListener(new FieldDataCacheListener(this)); - this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this)); this.warmer = new IndexWarmer(threadPool, indexFieldData, bitsetFilterCache.createListener(threadPool)); this.indexCache = new IndexCache(queryCache, bitsetFilterCache); } else { diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index aa92025f32428..5446027a2ca40 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -284,7 +284,7 @@ public final class IndexSettings { TimeValue.MINUS_ONE, Property.NodeScope ); // TODO: remove setting - public static TimeValue STATELESS_DEFAULT_REFRESH_INTERVAL = TimeValue.timeValueSeconds(15); // TODO: this value is still not final + public static TimeValue STATELESS_DEFAULT_REFRESH_INTERVAL = TimeValue.timeValueSeconds(5); // TODO: this value is still not final public static TimeValue STATELESS_MIN_NON_FAST_REFRESH_INTERVAL = TimeValue.timeValueSeconds(5); public static final Setting INDEX_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.refresh_interval", (settings) -> { if (EXISTING_SHARDS_ALLOCATOR_SETTING.get(settings).equals("stateless") && INDEX_FAST_REFRESH_SETTING.get(settings) == false) { diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 0c28601646ac3..b50545efef893 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -306,7 +306,7 @@ private GetResult innerGetFetch( Map metadataFields = null; DocIdAndVersion docIdAndVersion = get.docIdAndVersion(); SourceLoader loader = forceSyntheticSource - ? new SourceLoader.Synthetic(mappingLookup.getMapping(), mapperMetrics.sourceFieldMetrics()) + ? new SourceLoader.Synthetic(mappingLookup.getMapping()::syntheticFieldLoader, mapperMetrics.sourceFieldMetrics()) : mappingLookup.newSourceLoader(mapperMetrics.sourceFieldMetrics()); StoredFieldLoader storedFieldLoader = buildStoredFieldLoader(storedFields, fetchSourceContext, loader); LeafStoredFieldLoader leafStoredFieldLoader = storedFieldLoader.getLoader(docIdAndVersion.reader.getContext(), null); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index a89a89472a678..3d4f0823bb1cf 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -965,6 +965,19 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) { protected String contentType() { throw new UnsupportedOperationException(); } + + @Override + protected SyntheticSourceMode syntheticSourceMode() { + // Opt out of fallback synthetic source implementation + // since there is custom logic in #parseCreateField() + return SyntheticSourceMode.NATIVE; + } + + @Override + public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { + // Handled via IgnoredSourceFieldMapper infrastructure + return SourceLoader.SyntheticFieldLoader.NOTHING; + } }; private static class NoOpObjectMapper extends ObjectMapper { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 040963b0f8bad..4338a62d79ab9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -443,27 +443,36 @@ public Map indexAnalyzers() { * Specifies the mode of synthetic source support by the mapper. * *
-     * {@link NATIVE} - mapper natively supports synthetic source, f.e. by constructing it from doc values.
+     * {@link SyntheticSourceMode#NATIVE} - mapper natively supports synthetic source, f.e. by constructing it from doc values.
      *
-     * {@link FALLBACK} - mapper does not have native support but uses fallback implementation.
-     * This is a temporary variant that exists in order to roll out fallback implementation on a per field basis.
-     *
-     * {@link NOT_SUPPORTED} - synthetic source is not supported.
+     * {@link SyntheticSourceMode#FALLBACK} - mapper does not have native support and uses generic fallback implementation
+     * that stores raw input source data as is.
      * 
*/ protected enum SyntheticSourceMode { NATIVE, - FALLBACK, - NOT_SUPPORTED + FALLBACK } /** + *

* Specifies the mode of synthetic source support by the mapper. - * + *
+ * This is used to determine if a field mapper has support for + * constructing synthetic source. + * In case it doesn't (meaning {@link SyntheticSourceMode#FALLBACK}), + * we will store raw source data for this field as is + * and then use it for synthetic source. + *

+ *

+ * Field mappers must override this method if they provide + * a custom implementation of {@link #syntheticFieldLoader()} + * in order to use a more efficient field-specific implementation. + *

* @return {@link SyntheticSourceMode} */ protected SyntheticSourceMode syntheticSourceMode() { - return SyntheticSourceMode.NOT_SUPPORTED; + return SyntheticSourceMode.FALLBACK; } /** @@ -476,7 +485,7 @@ protected SyntheticSourceMode syntheticSourceMode() { @Override public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { // If mapper supports synthetic source natively, it overrides this method, - // /so we won't see those here. + // so we won't see those here. if (syntheticSourceMode() == SyntheticSourceMode.FALLBACK) { if (copyTo.copyToFields().isEmpty() != true) { throw new IllegalArgumentException( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index d3665c3b978bd..e5dc95ddbc2a0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -8,6 +8,8 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; @@ -167,6 +169,7 @@ public MapperService( Supplier searchExecutionContextSupplier, IdFieldMapper idFieldMapper, ScriptCompiler scriptCompiler, + Function bitSetProducer, MapperMetrics mapperMetrics ) { this( @@ -179,6 +182,7 @@ public MapperService( searchExecutionContextSupplier, idFieldMapper, scriptCompiler, + bitSetProducer, mapperMetrics ); } @@ -194,6 +198,7 @@ public MapperService( Supplier searchExecutionContextSupplier, IdFieldMapper idFieldMapper, ScriptCompiler scriptCompiler, + Function bitSetProducer, MapperMetrics mapperMetrics ) { super(indexSettings); @@ -210,7 +215,8 @@ public MapperService( scriptCompiler, indexAnalyzers, indexSettings, - idFieldMapper + idFieldMapper, + bitSetProducer ); this.documentParser = new DocumentParser(parserConfiguration, this.mappingParserContextSupplier.get()); Map metadataMapperParsers = mapperRegistry.getMetadataMapperParsers( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingParserContext.java index 88df87859ccc2..3f614d4346fd4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingParserContext.java @@ -8,6 +8,8 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; @@ -37,6 +39,7 @@ public class MappingParserContext { private final IndexAnalyzers indexAnalyzers; private final IndexSettings indexSettings; private final IdFieldMapper idFieldMapper; + private final Function bitSetProducer; private final long mappingObjectDepthLimit; private long mappingObjectDepth = 0; @@ -50,7 +53,8 @@ public MappingParserContext( ScriptCompiler scriptCompiler, IndexAnalyzers indexAnalyzers, IndexSettings indexSettings, - IdFieldMapper idFieldMapper + IdFieldMapper idFieldMapper, + Function bitSetProducer ) { this.similarityLookupService = similarityLookupService; this.typeParsers = typeParsers; @@ -63,6 +67,7 @@ public MappingParserContext( this.indexSettings = indexSettings; this.idFieldMapper = idFieldMapper; this.mappingObjectDepthLimit = indexSettings.getMappingDepthLimit(); + this.bitSetProducer = bitSetProducer; } public IndexAnalyzers getIndexAnalyzers() { @@ -132,6 +137,10 @@ public ScriptCompiler scriptCompiler() { return scriptCompiler; } + public BitSetProducer bitSetProducer(Query query) { + return bitSetProducer.apply(query); + } + void incrementMappingObjectDepth() throws MapperParsingException { mappingObjectDepth++; if (mappingObjectDepth > mappingObjectDepthLimit) { @@ -159,7 +168,8 @@ private static class MultiFieldParserContext extends MappingParserContext { in.scriptCompiler, in.indexAnalyzers, in.indexSettings, - in.idFieldMapper + in.idFieldMapper, + in.bitSetProducer ); } @@ -188,7 +198,8 @@ private static class DynamicTemplateParserContext extends MappingParserContext { in.scriptCompiler, in.indexAnalyzers, in.indexSettings, - in.idFieldMapper + in.idFieldMapper, + in.bitSetProducer ); this.dateFormatter = dateFormatter; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index e98b8ba7deba1..65748847406ea 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -9,7 +9,9 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -18,6 +20,7 @@ import java.io.IOException; import java.util.Locale; import java.util.Map; +import java.util.function.Function; /** * A Mapper for nested objects @@ -31,10 +34,12 @@ public static class Builder extends ObjectMapper.Builder { private Explicit includeInRoot = Explicit.IMPLICIT_FALSE; private Explicit includeInParent = Explicit.IMPLICIT_FALSE; private final IndexVersion indexCreatedVersion; + private final Function bitsetProducer; - public Builder(String name, IndexVersion indexCreatedVersion) { + public Builder(String name, IndexVersion indexCreatedVersion, Function bitSetProducer) { super(name, Explicit.IMPLICIT_TRUE); this.indexCreatedVersion = indexCreatedVersion; + this.bitsetProducer = bitSetProducer; } Builder includeInRoot(boolean includeInRoot) { @@ -50,24 +55,21 @@ Builder includeInParent(boolean includeInParent) { @Override public NestedObjectMapper build(MapperBuilderContext context) { boolean parentIncludedInRoot = this.includeInRoot.value(); + final Query parentTypeFilter; if (context instanceof NestedMapperBuilderContext nc) { // we're already inside a nested mapper, so adjust our includes if (nc.parentIncludedInRoot && this.includeInParent.value()) { this.includeInRoot = Explicit.IMPLICIT_FALSE; } + parentTypeFilter = nc.nestedTypeFilter; } else { // this is a top-level nested mapper, so include_in_parent = include_in_root parentIncludedInRoot |= this.includeInParent.value(); if (this.includeInParent.value()) { this.includeInRoot = Explicit.IMPLICIT_FALSE; } + parentTypeFilter = Queries.newNonNestedFilter(indexCreatedVersion); } - NestedMapperBuilderContext nestedContext = new NestedMapperBuilderContext( - context.buildFullName(name()), - parentIncludedInRoot, - context.getDynamic(dynamic), - context.getMergeReason() - ); final String fullPath = context.buildFullName(name()); final String nestedTypePath; if (indexCreatedVersion.before(IndexVersions.V_8_0_0)) { @@ -75,6 +77,14 @@ public NestedObjectMapper build(MapperBuilderContext context) { } else { nestedTypePath = fullPath; } + final Query nestedTypeFilter = NestedPathFieldMapper.filter(indexCreatedVersion, nestedTypePath); + NestedMapperBuilderContext nestedContext = new NestedMapperBuilderContext( + context.buildFullName(name()), + nestedTypeFilter, + parentIncludedInRoot, + context.getDynamic(dynamic), + context.getMergeReason() + ); return new NestedObjectMapper( name(), fullPath, @@ -83,8 +93,10 @@ public NestedObjectMapper build(MapperBuilderContext context) { dynamic, includeInParent, includeInRoot, + parentTypeFilter, nestedTypePath, - NestedPathFieldMapper.filter(indexCreatedVersion, nestedTypePath) + nestedTypeFilter, + bitsetProducer ); } } @@ -96,7 +108,11 @@ public Mapper.Builder parse(String name, Map node, MappingParser if (parseSubobjects(node).explicit()) { throw new MapperParsingException("Nested type [" + name + "] does not support [subobjects] parameter"); } - NestedObjectMapper.Builder builder = new NestedObjectMapper.Builder(name, parserContext.indexVersionCreated()); + NestedObjectMapper.Builder builder = new NestedObjectMapper.Builder( + name, + parserContext.indexVersionCreated(), + parserContext::bitSetProducer + ); parseNested(name, node, builder); parseObjectFields(node, parserContext, builder); return builder; @@ -119,24 +135,43 @@ protected static void parseNested(String name, Map node, NestedO } private static class NestedMapperBuilderContext extends MapperBuilderContext { - final boolean parentIncludedInRoot; - - NestedMapperBuilderContext(String path, boolean parentIncludedInRoot, Dynamic dynamic, MapperService.MergeReason mergeReason) { + final Query nestedTypeFilter; + + NestedMapperBuilderContext( + String path, + Query nestedTypeFilter, + boolean parentIncludedInRoot, + Dynamic dynamic, + MapperService.MergeReason mergeReason + ) { super(path, false, false, false, dynamic, mergeReason); this.parentIncludedInRoot = parentIncludedInRoot; + this.nestedTypeFilter = nestedTypeFilter; } @Override public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { - return new NestedMapperBuilderContext(buildFullName(name), parentIncludedInRoot, getDynamic(dynamic), getMergeReason()); + return new NestedMapperBuilderContext( + buildFullName(name), + nestedTypeFilter, + parentIncludedInRoot, + getDynamic(dynamic), + getMergeReason() + ); } } private final Explicit includeInRoot; private final Explicit includeInParent; + // The query to identify parent documents + private final Query parentTypeFilter; + // The path of the nested field private final String nestedTypePath; + // The query to identify nested documents at this level private final Query nestedTypeFilter; + // Function to create a bitset for identifying parent documents + private final Function bitsetProducer; NestedObjectMapper( String name, @@ -146,14 +181,22 @@ public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { ObjectMapper.Dynamic dynamic, Explicit includeInParent, Explicit includeInRoot, + Query parentTypeFilter, String nestedTypePath, - Query nestedTypeFilter + Query nestedTypeFilter, + Function bitsetProducer ) { super(name, fullPath, enabled, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_FALSE, dynamic, mappers); + this.parentTypeFilter = parentTypeFilter; this.nestedTypePath = nestedTypePath; this.nestedTypeFilter = nestedTypeFilter; this.includeInParent = includeInParent; this.includeInRoot = includeInRoot; + this.bitsetProducer = bitsetProducer; + } + + public Query parentTypeFilter() { + return parentTypeFilter; } public Query nestedTypeFilter() { @@ -177,13 +220,17 @@ public boolean isIncludeInRoot() { return this.includeInRoot.value(); } + public Function bitsetProducer() { + return bitsetProducer; + } + public Map getChildren() { return this.mappers; } @Override public ObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) { - NestedObjectMapper.Builder builder = new NestedObjectMapper.Builder(simpleName(), indexVersionCreated); + NestedObjectMapper.Builder builder = new NestedObjectMapper.Builder(simpleName(), indexVersionCreated, bitsetProducer); builder.enabled = enabled; builder.dynamic = dynamic; builder.includeInRoot = includeInRoot; @@ -201,8 +248,10 @@ NestedObjectMapper withoutMappers() { dynamic, includeInParent, includeInRoot, + parentTypeFilter, nestedTypePath, - nestedTypeFilter + nestedTypeFilter, + bitsetProducer ); } @@ -270,8 +319,10 @@ public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContex mergeResult.dynamic(), incInParent, incInRoot, + parentTypeFilter, nestedTypePath, - nestedTypeFilter + nestedTypeFilter, + bitsetProducer ); } @@ -285,6 +336,7 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo return mapperMergeContext.createChildContext( new NestedMapperBuilderContext( mapperBuilderContext.buildFullName(name), + nestedTypeFilter, parentIncludedInRoot, mapperBuilderContext.getDynamic(dynamic), mapperBuilderContext.getMergeReason() diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index b819ffb0ef6ad..67e457907f8cc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -69,6 +69,14 @@ private enum Mode { IndexMode.TIME_SERIES ); + private static final SourceFieldMapper LOGS_DEFAULT = new SourceFieldMapper( + Mode.SYNTHETIC, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + IndexMode.LOGS + ); + /* * Synthetic source was added as the default for TSDB in v.8.7. The legacy field mapper below * is used in bwc tests and mixed clusters containing time series indexes created in an earlier version. @@ -156,7 +164,8 @@ protected Parameter[] getParameters() { private boolean isDefault() { Mode m = mode.get(); - if (m != null && (((indexMode == IndexMode.TIME_SERIES && m == Mode.SYNTHETIC) == false) || m == Mode.DISABLED)) { + if (m != null + && (((indexMode != null && indexMode.isSyntheticSourceEnabled() && m == Mode.SYNTHETIC) == false) || m == Mode.DISABLED)) { return false; } return enabled.get().value() && includes.getValue().isEmpty() && excludes.getValue().isEmpty(); @@ -165,15 +174,19 @@ private boolean isDefault() { @Override public SourceFieldMapper build() { if (enabled.getValue().explicit()) { - if (indexMode == IndexMode.TIME_SERIES) { - throw new MapperParsingException("Time series indices only support synthetic source"); + if (indexMode != null && indexMode.isSyntheticSourceEnabled()) { + throw new MapperParsingException("Indices with with index mode [" + indexMode + "] only support synthetic source"); } if (mode.get() != null) { throw new MapperParsingException("Cannot set both [mode] and [enabled] parameters"); } } if (isDefault()) { - return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : DEFAULT; + return switch (indexMode) { + case TIME_SERIES -> TSDB_DEFAULT; + case LOGS -> LOGS_DEFAULT; + default -> DEFAULT; + }; } if (supportsNonDefaultParameterValues == false) { List disallowed = new ArrayList<>(); @@ -212,10 +225,21 @@ public SourceFieldMapper build() { } - public static final TypeParser PARSER = new ConfigurableTypeParser( - c -> c.getIndexSettings().getMode() == IndexMode.TIME_SERIES - ? c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0) ? TSDB_DEFAULT : TSDB_LEGACY_DEFAULT - : DEFAULT, + public static final TypeParser PARSER = new ConfigurableTypeParser(c -> { + var indexMode = c.getIndexSettings().getMode(); + if (indexMode.isSyntheticSourceEnabled()) { + if (indexMode == IndexMode.TIME_SERIES) { + if (c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0)) { + return TSDB_DEFAULT; + } else { + return TSDB_LEGACY_DEFAULT; + } + } else if (indexMode == IndexMode.LOGS) { + return LOGS_DEFAULT; + } + } + return DEFAULT; + }, c -> new Builder( c.getIndexSettings().getMode(), c.getSettings(), @@ -323,6 +347,9 @@ public void preParse(DocumentParserContext context) throws IOException { final BytesReference adaptedSource = applyFilters(originalSource, contentType); if (adaptedSource != null) { + assert context.indexSettings().getIndexVersionCreated().before(IndexVersions.V_8_7_0) + || indexMode == null + || indexMode.isSyntheticSourceEnabled() == false; final BytesRef ref = adaptedSource.toBytesRef(); context.doc().add(new StoredField(fieldType().name(), ref.bytes, ref.offset, ref.length)); } @@ -363,7 +390,7 @@ public FieldMapper.Builder getMergeBuilder() { */ public SourceLoader newSourceLoader(Mapping mapping, SourceFieldMetrics metrics) { if (mode == Mode.SYNTHETIC) { - return new SourceLoader.Synthetic(mapping, metrics); + return new SourceLoader.Synthetic(mapping::syntheticFieldLoader, metrics); } return SourceLoader.FROM_STORED_SOURCE; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java index a1b95e7a2c8b0..c9bea33852a20 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java @@ -57,6 +57,14 @@ interface Leaf { * @param docId the doc to load */ Source source(LeafStoredFieldLoader storedFields, int docId) throws IOException; + + /** + * Write the {@code _source} for a document in the provided {@link XContentBuilder}. + * @param storedFields a loader for stored fields + * @param docId the doc to load + * @param b the builder to write the xcontent + */ + void write(LeafStoredFieldLoader storedFields, int docId, XContentBuilder b) throws IOException; } /** @@ -70,7 +78,18 @@ public boolean reordersFieldValues() { @Override public Leaf leaf(LeafReader reader, int[] docIdsInLeaf) { - return (storedFieldLoader, docId) -> Source.fromBytes(storedFieldLoader.source()); + return new Leaf() { + @Override + public Source source(LeafStoredFieldLoader storedFields, int docId) throws IOException { + return Source.fromBytes(storedFields.source()); + } + + @Override + public void write(LeafStoredFieldLoader storedFields, int docId, XContentBuilder builder) throws IOException { + Source source = source(storedFields, docId); + builder.rawValue(source.internalSourceRef().streamInput(), source.sourceContentType()); + } + }; } @Override @@ -80,15 +99,20 @@ public Set requiredStoredFields() { }; /** - * Load {@code _source} from doc values. + * Reconstructs {@code _source} from doc values anf stored fields. */ class Synthetic implements SourceLoader { private final Supplier syntheticFieldLoaderLeafSupplier; private final Set requiredStoredFields; private final SourceFieldMetrics metrics; - public Synthetic(Mapping mapping, SourceFieldMetrics metrics) { - this.syntheticFieldLoaderLeafSupplier = mapping::syntheticFieldLoader; + /** + * Creates a {@link SourceLoader} to reconstruct {@code _source} from doc values anf stored fields. + * @param fieldLoaderSupplier A supplier to create {@link SyntheticFieldLoader}, one for each leaf. + * @param metrics Metrics for profiling. + */ + public Synthetic(Supplier fieldLoaderSupplier, SourceFieldMetrics metrics) { + this.syntheticFieldLoaderLeafSupplier = fieldLoaderSupplier; this.requiredStoredFields = syntheticFieldLoaderLeafSupplier.get() .storedFieldLoaders() .map(Map.Entry::getKey) @@ -126,6 +150,16 @@ public Source source(LeafStoredFieldLoader storedFields, int docId) throws IOExc return source; } + + @Override + public void write(LeafStoredFieldLoader storedFields, int docId, XContentBuilder b) throws IOException { + long startTime = metrics.getRelativeTimeSupplier().getAsLong(); + + leaf.write(storedFields, docId, b); + + TimeValue duration = TimeValue.timeValueMillis(metrics.getRelativeTimeSupplier().getAsLong() - startTime); + metrics.recordSyntheticSourceLoadLatency(duration); + } } private static class SyntheticLeaf implements Leaf { @@ -143,6 +177,14 @@ private SyntheticLeaf(SyntheticFieldLoader loader, SyntheticFieldLoader.DocValue @Override public Source source(LeafStoredFieldLoader storedFieldLoader, int docId) throws IOException { + try (XContentBuilder b = new XContentBuilder(JsonXContent.jsonXContent, new ByteArrayOutputStream())) { + write(storedFieldLoader, docId, b); + return Source.fromBytes(BytesReference.bytes(b), b.contentType()); + } + } + + @Override + public void write(LeafStoredFieldLoader storedFieldLoader, int docId, XContentBuilder b) throws IOException { // Maps the names of existing objects to lists of ignored fields they contain. Map> objectsWithIgnoredFields = null; @@ -168,13 +210,10 @@ public Source source(LeafStoredFieldLoader storedFieldLoader, int docId) throws docValuesLoader.advanceToDoc(docId); } // TODO accept a requested xcontent type - try (XContentBuilder b = new XContentBuilder(JsonXContent.jsonXContent, new ByteArrayOutputStream())) { - if (loader.hasValue()) { - loader.write(b); - } else { - b.startObject().endObject(); - } - return Source.fromBytes(BytesReference.bytes(b), b.contentType()); + if (loader.hasValue()) { + loader.write(b); + } else { + b.startObject().endObject(); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index b47f6fd80a77e..c00f360e94c7e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -467,6 +467,28 @@ int getNumBytes(int dimensions) { ByteBuffer createByteBuffer(IndexVersion indexVersion, int numBytes) { return ByteBuffer.wrap(new byte[numBytes]); } + + @Override + int parseDimensionCount(DocumentParserContext context) throws IOException { + XContentParser.Token currentToken = context.parser().currentToken(); + return switch (currentToken) { + case START_ARRAY -> { + int index = 0; + for (Token token = context.parser().nextToken(); token != Token.END_ARRAY; token = context.parser().nextToken()) { + index++; + } + yield index; + } + case VALUE_STRING -> { + byte[] decodedVector = HexFormat.of().parseHex(context.parser().text()); + yield decodedVector.length; + } + default -> throw new ParsingException( + context.parser().getTokenLocation(), + format("Unsupported type [%s] for provided value [%s]", currentToken, context.parser().text()) + ); + }; + } }, FLOAT(4) { diff --git a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java index 5042ab358a96c..e64a424e86052 100644 --- a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.support.QueryParsers; @@ -209,6 +210,20 @@ protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) throw @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { + final int maxAllowedRegexLength = context.getIndexSettings().getMaxRegexLength(); + if (value.length() > maxAllowedRegexLength) { + throw new IllegalArgumentException( + "The length of prefix [" + + value.length() + + "] used in the Prefix Query request has exceeded " + + "the allowed maximum of [" + + maxAllowedRegexLength + + "]. " + + "This maximum can be set by changing the [" + + IndexSettings.MAX_REGEX_LENGTH_SETTING.getKey() + + "] index level setting." + ); + } MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(rewrite, null, LoggingDeprecationHandler.INSTANCE); MappedFieldType fieldType = context.getFieldType(fieldName); diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index 7ca0b0bd401ea..9d3aa9905c744 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -436,7 +436,7 @@ public boolean isSourceSynthetic() { */ public SourceLoader newSourceLoader(boolean forceSyntheticSource) { if (forceSyntheticSource) { - return new SourceLoader.Synthetic(mappingLookup.getMapping(), mapperMetrics.sourceFieldMetrics()); + return new SourceLoader.Synthetic(mappingLookup.getMapping()::syntheticFieldLoader, mapperMetrics.sourceFieldMetrics()); } return mappingLookup.newSourceLoader(mapperMetrics.sourceFieldMetrics()); } diff --git a/server/src/main/java/org/elasticsearch/inference/ChunkedInferenceServiceResults.java b/server/src/main/java/org/elasticsearch/inference/ChunkedInferenceServiceResults.java index 5ba2196e91488..f3461aba13d92 100644 --- a/server/src/main/java/org/elasticsearch/inference/ChunkedInferenceServiceResults.java +++ b/server/src/main/java/org/elasticsearch/inference/ChunkedInferenceServiceResults.java @@ -8,6 +8,26 @@ package org.elasticsearch.inference; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContent; + +import java.util.Iterator; + public interface ChunkedInferenceServiceResults extends InferenceServiceResults { + /** + * Implementations of this function serialize their embeddings to {@link BytesReference} for storage in semantic text fields. + * The iterator iterates over all the chunks stored in the {@link ChunkedInferenceServiceResults}. + * + * @param xcontent provided by the SemanticTextField + * @return an iterator of the serialized {@link Chunk} which includes the matched text (input) and bytes reference (output/embedding). + */ + Iterator chunksAsMatchedTextAndByteReference(XContent xcontent); + + /** + * A chunk of inference results containing matched text and the bytes reference. + * @param matchedText + * @param bytesReference + */ + record Chunk(String matchedText, BytesReference bytesReference) {} } diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java index 64fcc2f8ff684..130850640cf3c 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java @@ -469,8 +469,13 @@ private OsStats.Cgroup.CpuStat getCgroupCpuAcctCpuStat(final String controlGroup * nr_bursts \d+ * burst_time * - * These additional fields are currently ignored. * + * When schedstat_enabled is enabled, an additional statistics information {@code wait_sum} will also be available + *
+     * wait_sum \d+
+     * 
+ * {@code wait_sum} represent the conflict between task groups, which is simply sum the wait time of group's cfs_rq + * These three additional fields are currently ignored. * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code cpu} subsystem * @return the lines from {@code cpu.stat} * @throws IOException if an I/O exception occurs reading {@code cpu.stat} for the control group @@ -478,7 +483,7 @@ private OsStats.Cgroup.CpuStat getCgroupCpuAcctCpuStat(final String controlGroup @SuppressForbidden(reason = "access /sys/fs/cgroup/cpu") List readSysFsCgroupCpuAcctCpuStat(final String controlGroup) throws IOException { final List lines = Files.readAllLines(PathUtils.get("/sys/fs/cgroup/cpu", controlGroup, "cpu.stat")); - assert lines != null && (lines.size() == 3 || lines.size() == 5); + assert lines != null && (lines.size() >= 3); return lines; } diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index e0ca0f7a48cdd..fd2aabce8e952 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -1044,6 +1044,7 @@ record PluginServiceInstances( threadPool, scriptService, bigArrays, + searchModule.getRankFeatureShardPhase(), searchModule.getFetchPhase(), responseCollectorService, circuitBreakerService, diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index ab90ca42bca98..914dd51d0c6b2 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -33,6 +33,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.fetch.FetchPhase; +import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ThreadPool; @@ -116,6 +117,7 @@ SearchService newSearchService( ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, + RankFeatureShardPhase rankFeatureShardPhase, FetchPhase fetchPhase, ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, @@ -128,6 +130,7 @@ SearchService newSearchService( threadPool, scriptService, bigArrays, + rankFeatureShardPhase, fetchPhase, responseCollectorService, circuitBreakerService, diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java index da12c97281de1..0e404ca03707f 100644 --- a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java +++ b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java @@ -8,6 +8,8 @@ package org.elasticsearch.plugins.internal; +import org.elasticsearch.index.IndexMode; + /** * An interface to provide instances of document parsing observer and reporter */ @@ -32,7 +34,11 @@ default DocumentSizeObserver newFixedSizeDocumentObserver(long normalisedBytesPa /** * @return an instance of a reporter to use when parsing has been completed and indexing successful */ - default DocumentSizeReporter newDocumentSizeReporter(String indexName, DocumentSizeAccumulator documentSizeAccumulator) { + default DocumentSizeReporter newDocumentSizeReporter( + String indexName, + IndexMode indexMode, + DocumentSizeAccumulator documentSizeAccumulator + ) { return DocumentSizeReporter.EMPTY_INSTANCE; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index 664f9b63dee2a..d9a34fe36c860 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -324,13 +324,13 @@ Table buildTable(RestRequest request, ClusterStateResponse state, IndicesStatsRe table.addCell(commitStats == null ? null : commitStats.getUserData().get(Engine.SYNC_COMMIT_ID)); if (shard.unassignedInfo() != null) { - table.addCell(shard.unassignedInfo().getReason()); - Instant unassignedTime = Instant.ofEpochMilli(shard.unassignedInfo().getUnassignedTimeInMillis()); + table.addCell(shard.unassignedInfo().reason()); + Instant unassignedTime = Instant.ofEpochMilli(shard.unassignedInfo().unassignedTimeMillis()); table.addCell(UnassignedInfo.DATE_TIME_FORMATTER.format(unassignedTime)); table.addCell( - TimeValue.timeValueMillis(Math.max(0, System.currentTimeMillis() - shard.unassignedInfo().getUnassignedTimeInMillis())) + TimeValue.timeValueMillis(Math.max(0, System.currentTimeMillis() - shard.unassignedInfo().unassignedTimeMillis())) ); - table.addCell(shard.unassignedInfo().getDetails()); + table.addCell(shard.unassignedInfo().details()); } else { table.addCell(null); table.addCell(null); diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 9bacf19a9169d..4f16d3a5720fb 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -70,6 +70,7 @@ import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.feature.RankFeatureResult; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.sort.SortAndFormats; @@ -102,6 +103,7 @@ final class DefaultSearchContext extends SearchContext { private final ContextIndexSearcher searcher; private DfsSearchResult dfsResult; private QuerySearchResult queryResult; + private RankFeatureResult rankFeatureResult; private FetchSearchResult fetchResult; private final float queryBoost; private final boolean lowLevelCancellation; @@ -308,6 +310,17 @@ static boolean isParallelCollectionSupportedForResults( return false; } + @Override + public void addRankFeatureResult() { + this.rankFeatureResult = new RankFeatureResult(this.readerContext.id(), this.shardTarget, this.request); + addReleasable(rankFeatureResult::decRef); + } + + @Override + public RankFeatureResult rankFeatureResult() { + return rankFeatureResult; + } + @Override public void addFetchResult() { this.fetchResult = new FetchSearchResult(this.readerContext.id(), this.shardTarget); diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 8d5fa0a7ac155..ef80ce706704a 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -226,6 +226,9 @@ import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import org.elasticsearch.search.fetch.subphase.highlight.PlainHighlighter; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.retriever.KnnRetrieverBuilder; @@ -331,6 +334,7 @@ public SearchModule(Settings settings, List plugins, TelemetryProv registerRetrieverParsers(plugins); registerQueryParsers(plugins); registerRescorers(plugins); + registerRankers(); registerSorts(); registerValueFormats(); registerSignificanceHeuristics(plugins); @@ -827,6 +831,10 @@ private void registerRescorer(RescorerSpec spec) { namedWriteables.add(new NamedWriteableRegistry.Entry(RescorerBuilder.class, spec.getName().getPreferredName(), spec.getReader())); } + private void registerRankers() { + namedWriteables.add(new NamedWriteableRegistry.Entry(RankDoc.class, RankFeatureDoc.NAME, RankFeatureDoc::new)); + } + private void registerSorts() { namedWriteables.add(new NamedWriteableRegistry.Entry(SortBuilder.class, GeoDistanceSortBuilder.NAME, GeoDistanceSortBuilder::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(SortBuilder.class, ScoreSortBuilder.NAME, ScoreSortBuilder::new)); @@ -1252,6 +1260,10 @@ private void registerQuery(QuerySpec spec) { ); } + public RankFeatureShardPhase getRankFeatureShardPhase() { + return new RankFeatureShardPhase(); + } + public FetchPhase getFetchPhase() { return new FetchPhase(fetchSubPhases); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java b/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java index 254cd7d3370b5..450b98b22f39c 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java +++ b/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java @@ -15,6 +15,7 @@ import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.rank.feature.RankFeatureResult; import org.elasticsearch.transport.TransportResponse; import java.io.IOException; @@ -43,6 +44,14 @@ protected SearchPhaseResult(StreamInput in) throws IOException { super(in); } + /** + * Specifies whether the specific search phase results are associated with an opened SearchContext on the shards that + * executed the request. + */ + public boolean hasSearchContext() { + return false; + } + /** * Returns the search context ID that is used to reference the search context on the executing node * or null if no context was created. @@ -81,6 +90,13 @@ public QuerySearchResult queryResult() { return null; } + /** + * Returns the rank feature result iff it's included in this response otherwise null + */ + public RankFeatureResult rankFeatureResult() { + return null; + } + /** * Returns the fetch result iff it's included in this response otherwise null */ diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 41796967c3870..b45a2e2e2ca14 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -112,6 +112,9 @@ import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult; +import org.elasticsearch.search.rank.feature.RankFeatureResult; +import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; +import org.elasticsearch.search.rank.feature.RankFeatureShardRequest; import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -151,6 +154,7 @@ import static org.elasticsearch.core.TimeValue.timeValueMillis; import static org.elasticsearch.core.TimeValue.timeValueMinutes; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.elasticsearch.search.rank.feature.RankFeatureShardPhase.EMPTY_RESULT; public class SearchService extends AbstractLifecycleComponent implements IndexEventListener { private static final Logger logger = LogManager.getLogger(SearchService.class); @@ -276,6 +280,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private final DfsPhase dfsPhase = new DfsPhase(); private final FetchPhase fetchPhase; + private final RankFeatureShardPhase rankFeatureShardPhase; private volatile boolean enableSearchWorkerThreads; private volatile boolean enableQueryPhaseParallelCollection; @@ -314,6 +319,7 @@ public SearchService( ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, + RankFeatureShardPhase rankFeatureShardPhase, FetchPhase fetchPhase, ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, @@ -327,6 +333,7 @@ public SearchService( this.scriptService = scriptService; this.responseCollectorService = responseCollectorService; this.bigArrays = bigArrays; + this.rankFeatureShardPhase = rankFeatureShardPhase; this.fetchPhase = fetchPhase; this.multiBucketConsumerService = new MultiBucketConsumerService( clusterService, @@ -713,12 +720,38 @@ private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchSh } } + public void executeRankFeaturePhase(RankFeatureShardRequest request, SearchShardTask task, ActionListener listener) { + final ReaderContext readerContext = findReaderContext(request.contextId(), request); + final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.getShardSearchRequest()); + final Releasable markAsUsed = readerContext.markAsUsed(getKeepAlive(shardSearchRequest)); + runAsync(getExecutor(readerContext.indexShard()), () -> { + try (SearchContext searchContext = createContext(readerContext, shardSearchRequest, task, ResultsType.RANK_FEATURE, false)) { + int[] docIds = request.getDocIds(); + if (docIds == null || docIds.length == 0) { + searchContext.rankFeatureResult().shardResult(EMPTY_RESULT); + searchContext.rankFeatureResult().incRef(); + return searchContext.rankFeatureResult(); + } + rankFeatureShardPhase.prepareForFetch(searchContext, request); + fetchPhase.execute(searchContext, docIds, null); + rankFeatureShardPhase.processFetch(searchContext); + var rankFeatureResult = searchContext.rankFeatureResult(); + rankFeatureResult.incRef(); + return rankFeatureResult; + } catch (Exception e) { + assert TransportActions.isShardNotAvailableException(e) == false : new AssertionError(e); + // we handle the failure in the failure listener below + throw e; + } + }, wrapFailureListener(listener, readerContext, markAsUsed)); + } + private QueryFetchSearchResult executeFetchPhase(ReaderContext reader, SearchContext context, long afterQueryTime) { try ( Releasable scope = tracer.withScope(context.getTask()); SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context, true, afterQueryTime) ) { - fetchPhase.execute(context, shortcutDocIdsToLoad(context)); + fetchPhase.execute(context, shortcutDocIdsToLoad(context), null); if (reader.singleSession()) { freeReaderContext(reader.id()); } @@ -871,7 +904,7 @@ public void executeFetchPhase(ShardFetchRequest request, SearchShardTask task, A try ( SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(searchContext, true, System.nanoTime()) ) { - fetchPhase.execute(searchContext, request.docIds()); + fetchPhase.execute(searchContext, request.docIds(), request.getRankDocks()); if (readerContext.singleSession()) { freeReaderContext(request.contextId()); } @@ -1559,6 +1592,12 @@ void addResultsObject(SearchContext context) { context.addQueryResult(); } }, + RANK_FEATURE { + @Override + void addResultsObject(SearchContext context) { + context.addRankFeatureResult(); + } + }, FETCH { @Override void addResultsObject(SearchContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index 92fb09b017b2c..a3cf20d0b9b72 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -225,7 +225,7 @@ public SearchExecutionContext getSearchExecutionContext() { return searchExecutionContext; } }; - fetchSubSearchContext.fetchPhase().execute(fetchSubSearchContext, docIdsToLoad); + fetchSubSearchContext.fetchPhase().execute(fetchSubSearchContext, docIdsToLoad, null); return fetchSubSearchContext.fetchResult(); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java index 911c66f2fd533..65d49f771a045 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java @@ -25,6 +25,7 @@ import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.Source; +import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rescore.RescoreContext; import java.util.Collections; @@ -155,6 +156,19 @@ public List rescore() { return searchContext.rescore(); } + /** + * The rank builder used in the original search + */ + public RankBuilder rankBuilder() { + return searchContext.request().source() == null ? null : searchContext.request().source().rankBuilder(); + } + + public List queryNames() { + return searchContext.request().source() == null + ? Collections.emptyList() + : searchContext.request().source().subSearches().stream().map(x -> x.getQueryBuilder().queryName()).toList(); + } + /** * Should the response include sequence number and primary term metadata */ diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 0c54e8ff89589..db5617b543577 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -31,6 +31,8 @@ import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.profile.Timer; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.search.rank.RankDocShardInfo; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.xcontent.XContentType; @@ -56,7 +58,7 @@ public FetchPhase(List fetchSubPhases) { this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsPhase(this); } - public void execute(SearchContext context, int[] docIdsToLoad) { + public void execute(SearchContext context, int[] docIdsToLoad, RankDocShardInfo rankDocs) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("{}", new SearchContextSourcePrinter(context)); } @@ -75,7 +77,7 @@ public void execute(SearchContext context, int[] docIdsToLoad) { Profiler profiler = context.getProfilers() == null ? Profiler.NOOP : Profilers.startProfilingFetchPhase(); SearchHits hits = null; try { - hits = buildSearchHits(context, docIdsToLoad, profiler); + hits = buildSearchHits(context, docIdsToLoad, profiler, rankDocs); } finally { // Always finish profiling ProfileResult profileResult = profiler.finish(); @@ -97,7 +99,7 @@ public Source getSource(LeafReaderContext ctx, int doc) { } } - private SearchHits buildSearchHits(SearchContext context, int[] docIdsToLoad, Profiler profiler) { + private SearchHits buildSearchHits(SearchContext context, int[] docIdsToLoad, Profiler profiler, RankDocShardInfo rankDocs) { FetchContext fetchContext = new FetchContext(context); SourceLoader sourceLoader = context.newSourceLoader(); @@ -165,7 +167,8 @@ protected SearchHit nextDoc(int doc) throws IOException { doc, ctx, leafSourceLoader, - leafIdLoader + leafIdLoader, + rankDocs == null ? null : rankDocs.get(doc) ); boolean success = false; try { @@ -222,7 +225,8 @@ private static HitContext prepareHitContext( int docId, LeafReaderContext subReaderContext, SourceLoader.Leaf sourceLoader, - IdLoader.Leaf idLoader + IdLoader.Leaf idLoader, + RankDoc rankDoc ) throws IOException { if (nestedDocuments.advance(docId - subReaderContext.docBase) == null) { return prepareNonNestedHitContext( @@ -232,7 +236,8 @@ private static HitContext prepareHitContext( docId, subReaderContext, sourceLoader, - idLoader + idLoader, + rankDoc ); } else { return prepareNestedHitContext( @@ -242,7 +247,8 @@ private static HitContext prepareHitContext( docId, nestedDocuments, subReaderContext, - leafStoredFieldLoader + leafStoredFieldLoader, + rankDoc ); } } @@ -261,7 +267,8 @@ private static HitContext prepareNonNestedHitContext( int docId, LeafReaderContext subReaderContext, SourceLoader.Leaf sourceLoader, - IdLoader.Leaf idLoader + IdLoader.Leaf idLoader, + RankDoc rankDoc ) throws IOException { int subDocId = docId - subReaderContext.docBase; @@ -272,7 +279,7 @@ private static HitContext prepareNonNestedHitContext( SearchHit hit = new SearchHit(docId); // TODO: can we use real pooled buffers here as well? Source source = Source.lazy(lazyStoredSourceLoader(profiler, subReaderContext, subDocId)); - return new HitContext(hit, subReaderContext, subDocId, Map.of(), source); + return new HitContext(hit, subReaderContext, subDocId, Map.of(), source, rankDoc); } else { SearchHit hit = new SearchHit(docId, id); Source source; @@ -288,7 +295,7 @@ private static HitContext prepareNonNestedHitContext( } else { source = Source.lazy(lazyStoredSourceLoader(profiler, subReaderContext, subDocId)); } - return new HitContext(hit, subReaderContext, subDocId, leafStoredFieldLoader.storedFields(), source); + return new HitContext(hit, subReaderContext, subDocId, leafStoredFieldLoader.storedFields(), source, rankDoc); } } @@ -320,7 +327,8 @@ private static HitContext prepareNestedHitContext( int topDocId, LeafNestedDocuments nestedInfo, LeafReaderContext subReaderContext, - LeafStoredFieldLoader childFieldLoader + LeafStoredFieldLoader childFieldLoader, + RankDoc rankDoc ) throws IOException { String rootId; @@ -352,7 +360,7 @@ private static HitContext prepareNestedHitContext( Source nestedSource = nestedIdentity.extractSource(rootSource); SearchHit hit = new SearchHit(topDocId, rootId, nestedIdentity); - return new HitContext(hit, subReaderContext, nestedInfo.doc(), childFieldLoader.storedFields(), nestedSource); + return new HitContext(hit, subReaderContext, nestedInfo.doc(), childFieldLoader.storedFields(), nestedSource, rankDoc); } interface Profiler { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java index a6f41f8b7fed3..d6de6d46462e4 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java @@ -11,8 +11,10 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; +import org.elasticsearch.core.Nullable; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.lookup.Source; +import org.elasticsearch.search.rank.RankDoc; import java.io.IOException; import java.util.List; @@ -29,13 +31,22 @@ class HitContext { private final int docId; private final Source source; private final Map> loadedFields; + private final RankDoc rankDoc; - public HitContext(SearchHit hit, LeafReaderContext context, int docId, Map> loadedFields, Source source) { + public HitContext( + SearchHit hit, + LeafReaderContext context, + int docId, + Map> loadedFields, + Source source, + RankDoc rankDoc + ) { this.hit = hit; this.readerContext = context; this.docId = docId; this.source = source; this.loadedFields = loadedFields; + this.rankDoc = rankDoc; } public SearchHit hit() { @@ -72,6 +83,11 @@ public Map> loadedFields() { return loadedFields; } + @Nullable + public RankDoc rankDoc() { + return this.rankDoc; + } + public IndexReader topLevelReader() { return ReaderUtil.getTopLevelContext(readerContext).reader(); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java b/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java index 86f6db0b681d7..8128f48dda013 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java @@ -19,6 +19,7 @@ import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.search.rank.RankDocShardInfo; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportRequest; @@ -118,4 +119,9 @@ public RescoreDocIds getRescoreDocIds() { public AggregatedDfs getAggregatedDfs() { return null; } + + @Nullable + public RankDocShardInfo getRankDocks() { + return null; + } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java b/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java index a0f960dc4aaad..0415ecc4a6498 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.fetch; import org.apache.lucene.search.ScoreDoc; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.IndicesOptions; @@ -18,6 +19,7 @@ import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.search.rank.RankDocShardInfo; import java.io.IOException; import java.util.List; @@ -32,12 +34,14 @@ public class ShardFetchSearchRequest extends ShardFetchRequest implements Indice private final ShardSearchRequest shardSearchRequest; private final RescoreDocIds rescoreDocIds; private final AggregatedDfs aggregatedDfs; + private final RankDocShardInfo rankDocs; public ShardFetchSearchRequest( OriginalIndices originalIndices, ShardSearchContextId id, ShardSearchRequest shardSearchRequest, List docIds, + RankDocShardInfo rankDocs, ScoreDoc lastEmittedDoc, RescoreDocIds rescoreDocIds, AggregatedDfs aggregatedDfs @@ -47,6 +51,7 @@ public ShardFetchSearchRequest( this.shardSearchRequest = shardSearchRequest; this.rescoreDocIds = rescoreDocIds; this.aggregatedDfs = aggregatedDfs; + this.rankDocs = rankDocs; } public ShardFetchSearchRequest(StreamInput in) throws IOException { @@ -55,6 +60,11 @@ public ShardFetchSearchRequest(StreamInput in) throws IOException { shardSearchRequest = in.readOptionalWriteable(ShardSearchRequest::new); rescoreDocIds = new RescoreDocIds(in); aggregatedDfs = in.readOptionalWriteable(AggregatedDfs::new); + if (in.getTransportVersion().onOrAfter(TransportVersions.RANK_DOC_IN_SHARD_FETCH_REQUEST)) { + this.rankDocs = in.readOptionalWriteable(RankDocShardInfo::new); + } else { + this.rankDocs = null; + } } @Override @@ -64,6 +74,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(shardSearchRequest); rescoreDocIds.writeTo(out); out.writeOptionalWriteable(aggregatedDfs); + if (out.getTransportVersion().onOrAfter(TransportVersions.RANK_DOC_IN_SHARD_FETCH_REQUEST)) { + out.writeOptionalWriteable(rankDocs); + } } @Override @@ -96,4 +109,9 @@ public RescoreDocIds getRescoreDocIds() { public AggregatedDfs getAggregatedDfs() { return aggregatedDfs; } + + @Override + public RankDocShardInfo getRankDocks() { + return this.rankDocs; + } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java index 0873ca777d428..16f07b2ab9880 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java @@ -16,6 +16,7 @@ import org.elasticsearch.search.rescore.RescoreContext; import java.io.IOException; +import java.util.List; /** * Explains the scoring calculations for the top hits. @@ -27,6 +28,9 @@ public FetchSubPhaseProcessor getProcessor(FetchContext context) { return null; } return new FetchSubPhaseProcessor() { + + private final List queryNames = context.queryNames(); + @Override public void setNextReader(LeafReaderContext readerContext) { @@ -40,6 +44,9 @@ public void process(HitContext hitContext) throws IOException { for (RescoreContext rescore : context.rescore()) { explanation = rescore.rescorer().explain(topLevelDocId, context.searcher(), rescore, explanation); } + if (context.rankBuilder() != null) { + explanation = context.rankBuilder().explainHit(explanation, hitContext.rankDoc(), queryNames); + } // we use the top level doc id, since we work with the top level searcher hitContext.hit().explanation(explanation); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java index a4ba982e1dd73..61e3b15d530f7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java @@ -92,7 +92,7 @@ private void hitExecute(Map innerHi innerHitsContext.setRootId(hit.getId()); innerHitsContext.setRootLookup(rootSource); - fetchPhase.execute(innerHitsContext, docIdsToLoad); + fetchPhase.execute(innerHitsContext, docIdsToLoad, null); FetchSearchResult fetchResult = innerHitsContext.fetchResult(); SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits(); for (int j = 0; j < internalHits.length; j++) { diff --git a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index d5c3c00c00ce1..e32397e25d773 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -35,6 +35,7 @@ import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.feature.RankFeatureResult; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -374,6 +375,16 @@ public float getMaxScore() { return in.getMaxScore(); } + @Override + public void addRankFeatureResult() { + in.addRankFeatureResult(); + } + + @Override + public RankFeatureResult rankFeatureResult() { + return in.rankFeatureResult(); + } + @Override public FetchSearchResult fetchResult() { return in.fetchResult(); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 35f96ee2dc102..9bc622034184c 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -42,6 +42,7 @@ import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.feature.RankFeatureResult; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -332,6 +333,10 @@ public Query rewrittenQuery() { public abstract float getMaxScore(); + public abstract void addRankFeatureResult(); + + public abstract RankFeatureResult rankFeatureResult(); + public abstract FetchPhase fetchPhase(); public abstract FetchSearchResult fetchResult(); diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java index 8a180d4f11ec7..a8c898409bf9a 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java @@ -48,6 +48,6 @@ static SourceProvider fromStoredFields() { * multiple threads. */ static SourceProvider fromSyntheticSource(Mapping mapping, SourceFieldMetrics metrics) { - return new SyntheticSourceProvider(new SourceLoader.Synthetic(mapping, metrics)); + return new SyntheticSourceProvider(new SourceLoader.Synthetic(mapping::syntheticFieldLoader, metrics)); } } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 828c6d2b4f3e8..0d2610aa34282 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -87,35 +87,38 @@ static void executeRank(SearchContext searchContext) throws QueryPhaseExecutionE boolean searchTimedOut = querySearchResult.searchTimedOut(); long serviceTimeEWMA = querySearchResult.serviceTimeEWMA(); int nodeQueueSize = querySearchResult.nodeQueueSize(); - - // run each of the rank queries - for (Query rankQuery : queryPhaseRankShardContext.queries()) { - // if a search timeout occurs, exit with partial results - if (searchTimedOut) { - break; - } - try ( - RankSearchContext rankSearchContext = new RankSearchContext( - searchContext, - rankQuery, - queryPhaseRankShardContext.rankWindowSize() - ) - ) { - QueryPhase.addCollectorsAndSearch(rankSearchContext); - QuerySearchResult rrfQuerySearchResult = rankSearchContext.queryResult(); - rrfRankResults.add(rrfQuerySearchResult.topDocs().topDocs); - serviceTimeEWMA += rrfQuerySearchResult.serviceTimeEWMA(); - nodeQueueSize = Math.max(nodeQueueSize, rrfQuerySearchResult.nodeQueueSize()); - searchTimedOut = rrfQuerySearchResult.searchTimedOut(); + try { + // run each of the rank queries + for (Query rankQuery : queryPhaseRankShardContext.queries()) { + // if a search timeout occurs, exit with partial results + if (searchTimedOut) { + break; + } + try ( + RankSearchContext rankSearchContext = new RankSearchContext( + searchContext, + rankQuery, + queryPhaseRankShardContext.rankWindowSize() + ) + ) { + QueryPhase.addCollectorsAndSearch(rankSearchContext); + QuerySearchResult rrfQuerySearchResult = rankSearchContext.queryResult(); + rrfRankResults.add(rrfQuerySearchResult.topDocs().topDocs); + serviceTimeEWMA += rrfQuerySearchResult.serviceTimeEWMA(); + nodeQueueSize = Math.max(nodeQueueSize, rrfQuerySearchResult.nodeQueueSize()); + searchTimedOut = rrfQuerySearchResult.searchTimedOut(); + } } - } - querySearchResult.setRankShardResult(queryPhaseRankShardContext.combineQueryPhaseResults(rrfRankResults)); + querySearchResult.setRankShardResult(queryPhaseRankShardContext.combineQueryPhaseResults(rrfRankResults)); - // record values relevant to all queries - querySearchResult.searchTimedOut(searchTimedOut); - querySearchResult.serviceTimeEWMA(serviceTimeEWMA); - querySearchResult.nodeQueueSize(nodeQueueSize); + // record values relevant to all queries + querySearchResult.searchTimedOut(searchTimedOut); + querySearchResult.serviceTimeEWMA(serviceTimeEWMA); + querySearchResult.nodeQueueSize(nodeQueueSize); + } catch (Exception e) { + throw new QueryPhaseExecutionException(searchContext.shardTarget(), "Failed to execute rank query", e); + } } static void executeQuery(SearchContext searchContext) throws QueryPhaseExecutionException { diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankBuilder.java b/server/src/main/java/org/elasticsearch/search/rank/RankBuilder.java index 7118c9f49b36d..704ead39b1ded 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/RankBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/rank/RankBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.rank; +import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -16,6 +17,8 @@ import org.elasticsearch.search.SearchService; import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -32,7 +35,7 @@ public abstract class RankBuilder implements VersionedNamedWriteable, ToXContent public static final ParseField RANK_WINDOW_SIZE_FIELD = new ParseField("rank_window_size"); - public static final int DEFAULT_WINDOW_SIZE = SearchService.DEFAULT_SIZE; + public static final int DEFAULT_RANK_WINDOW_SIZE = SearchService.DEFAULT_SIZE; private final int rankWindowSize; @@ -68,6 +71,19 @@ public int rankWindowSize() { return rankWindowSize; } + /** + * Specify whether this rank builder is a compound builder or not. A compound builder is a rank builder that requires + * two or more queries to be executed in order to generate the final result. + */ + public abstract boolean isCompoundBuilder(); + + /** + * Generates an {@code Explanation} on how the final score for the provided {@code RankDoc} is computed for the given `RankBuilder`. + * In addition to the base explanation to enrich, we also have access to the query names that were provided in the request, + * so that we can have direct association with the user provided query. + */ + public abstract Explanation explainHit(Explanation baseExplanation, RankDoc scoreDoc, List queryNames); + /** * Generates a context used to execute required searches during the query phase on the shard. */ @@ -78,6 +94,19 @@ public int rankWindowSize() { */ public abstract QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from); + /** + * Generates a context used to execute the rank feature phase on the shard. This is responsible for retrieving any needed + * feature data, and passing them back to the coordinator through the appropriate {@link RankShardResult}. + */ + public abstract RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext(); + + /** + * Generates a context used to perform global ranking during the RankFeature phase, + * on the coordinator based on all the individual shard results. The output of this will be a `size` ranked list of ordered results, + * which will then be passed to fetch phase. + */ + public abstract RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from); + @Override public final boolean equals(Object obj) { if (this == obj) { diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java b/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java index bd177008dd902..50b3ddc0f370a 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java +++ b/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java @@ -9,9 +9,9 @@ package org.elasticsearch.search.rank; import org.apache.lucene.search.ScoreDoc; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; import java.util.Objects; @@ -21,7 +21,7 @@ * Subclasses should extend this with additional information * required for their global ranking method. */ -public abstract class RankDoc extends ScoreDoc implements Writeable { +public abstract class RankDoc extends ScoreDoc implements NamedWriteable { public static final int NO_RANK = -1; @@ -37,7 +37,7 @@ public RankDoc(int doc, float score, int shardIndex) { super(doc, score, shardIndex); } - protected RankDoc(StreamInput in) throws IOException { + public RankDoc(StreamInput in) throws IOException { super(in.readVInt(), in.readFloat(), in.readVInt()); rank = in.readVInt(); } diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankDocShardInfo.java b/server/src/main/java/org/elasticsearch/search/rank/RankDocShardInfo.java new file mode 100644 index 0000000000000..56866dba36159 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/rank/RankDocShardInfo.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.rank; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Map; + +/** + * A {@code RankDocShardInfo} holds all the final rank documents that exist in a shard. We pass this + * to fetchPhase so that we can pass all the needed information for the RankBuilder to perform any actions needed + * when building the final SearchHits (e.g. explain). + */ +public class RankDocShardInfo implements Writeable { + + // doc-id to RankDoc mapping + private final Map rankDocs; + + public RankDocShardInfo(Map rankDocs) { + this.rankDocs = rankDocs; + } + + public RankDocShardInfo(StreamInput in) throws IOException { + rankDocs = in.readMap(StreamInput::readVInt, v -> v.readNamedWriteable(RankDoc.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(rankDocs, StreamOutput::writeVInt, StreamOutput::writeNamedWriteable); + } + + public RankDoc get(int index) { + return rankDocs.get(index); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java index 1cb5843dfc7da..7f8e99971d61b 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java @@ -43,6 +43,7 @@ import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.feature.RankFeatureResult; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -57,14 +58,14 @@ public class RankSearchContext extends SearchContext { private final SearchContext parent; private final Query rankQuery; - private final int windowSize; + private final int rankWindowSize; private final QuerySearchResult querySearchResult; @SuppressWarnings("this-escape") - public RankSearchContext(SearchContext parent, Query rankQuery, int windowSize) { + public RankSearchContext(SearchContext parent, Query rankQuery, int rankWindowSize) { this.parent = parent; this.rankQuery = parent.buildFilteredQuery(rankQuery); - this.windowSize = windowSize; + this.rankWindowSize = rankWindowSize; this.querySearchResult = new QuerySearchResult(parent.readerContext().id(), parent.shardTarget(), parent.request()); this.addReleasable(querySearchResult::decRef); } @@ -182,7 +183,7 @@ public int from() { @Override public int size() { - return windowSize; + return rankWindowSize; } /** @@ -492,6 +493,16 @@ public FetchPhase fetchPhase() { throw new UnsupportedOperationException(); } + @Override + public void addRankFeatureResult() { + throw new UnsupportedOperationException(); + } + + @Override + public RankFeatureResult rankFeatureResult() { + throw new UnsupportedOperationException(); + } + @Override public FetchSearchResult fetchResult() { throw new UnsupportedOperationException(); diff --git a/server/src/main/java/org/elasticsearch/search/rank/context/RankFeaturePhaseRankCoordinatorContext.java b/server/src/main/java/org/elasticsearch/search/rank/context/RankFeaturePhaseRankCoordinatorContext.java new file mode 100644 index 0000000000000..b8951a4779166 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/rank/context/RankFeaturePhaseRankCoordinatorContext.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.rank.context; + +import org.apache.lucene.search.ScoreDoc; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.search.rank.feature.RankFeatureResult; +import org.elasticsearch.search.rank.feature.RankFeatureShardResult; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; + +import static org.elasticsearch.search.SearchService.DEFAULT_FROM; +import static org.elasticsearch.search.SearchService.DEFAULT_SIZE; + +/** + * {@code RankFeaturePhaseRankCoordinatorContext} is a base class that runs on the coordinating node and is responsible for retrieving + * {@code window_size} total results from all shards, rank them, and then produce a final paginated response of [from, from+size] results. + */ +public abstract class RankFeaturePhaseRankCoordinatorContext { + + protected final int size; + protected final int from; + protected final int rankWindowSize; + + public RankFeaturePhaseRankCoordinatorContext(int size, int from, int rankWindowSize) { + this.size = size < 0 ? DEFAULT_SIZE : size; + this.from = from < 0 ? DEFAULT_FROM : from; + this.rankWindowSize = rankWindowSize; + } + + /** + * Computes the updated scores for a list of features (i.e. document-based data). We also pass along an ActionListener + * that should be called with the new scores, and will continue execution to the next phase + */ + protected abstract void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener); + + /** + * This method is responsible for ranking the global results based on the provided rank feature results from each shard. + *

+ * We first start by extracting ordered feature data through a {@code List} + * from the provided rankSearchResults, and then compute the updated score for each of the documents. + * Once all the scores have been computed, we sort the results, perform any pagination needed, and then call the `onFinish` consumer + * with the final array of {@link ScoreDoc} results. + * + * @param rankSearchResults a list of rank feature results from each shard + * @param rankListener a rankListener to handle the global ranking result + */ + public void rankGlobalResults(List rankSearchResults, ActionListener rankListener) { + // extract feature data from each shard rank-feature phase result + RankFeatureDoc[] featureDocs = extractFeatureDocs(rankSearchResults); + + // generate the final `topResults` paginated results, and pass them to fetch phase through the `rankListener` + computeScores(featureDocs, rankListener.delegateFailureAndWrap((listener, scores) -> { + for (int i = 0; i < featureDocs.length; i++) { + featureDocs[i].score = scores[i]; + } + listener.onResponse(featureDocs); + })); + } + + /** + * Ranks the provided {@link RankFeatureDoc} array and paginates the results based on the `from` and `size` parameters. + */ + public RankFeatureDoc[] rankAndPaginate(RankFeatureDoc[] rankFeatureDocs) { + Arrays.sort(rankFeatureDocs, Comparator.comparing((RankFeatureDoc doc) -> doc.score).reversed()); + RankFeatureDoc[] topResults = new RankFeatureDoc[Math.max(0, Math.min(size, rankFeatureDocs.length - from))]; + for (int rank = 0; rank < topResults.length; ++rank) { + topResults[rank] = rankFeatureDocs[from + rank]; + topResults[rank].rank = from + rank + 1; + } + return topResults; + } + + private RankFeatureDoc[] extractFeatureDocs(List rankSearchResults) { + List docFeatures = new ArrayList<>(); + for (RankFeatureResult rankFeatureResult : rankSearchResults) { + RankFeatureShardResult shardResult = rankFeatureResult.shardResult(); + for (RankFeatureDoc rankFeatureDoc : shardResult.rankFeatureDocs) { + if (rankFeatureDoc.featureData != null) { + docFeatures.add(rankFeatureDoc); + } + } + } + return docFeatures.toArray(new RankFeatureDoc[0]); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/rank/context/RankFeaturePhaseRankShardContext.java b/server/src/main/java/org/elasticsearch/search/rank/context/RankFeaturePhaseRankShardContext.java new file mode 100644 index 0000000000000..5d3f30bce757a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/rank/context/RankFeaturePhaseRankShardContext.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.rank.context; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.rank.RankShardResult; + +/** + * {@link RankFeaturePhaseRankShardContext} is a base class used to execute the RankFeature phase on each shard. + * In this class, we can fetch the feature data for a given set of documents and pass them back to the coordinator + * through the {@link RankShardResult}. + */ +public abstract class RankFeaturePhaseRankShardContext { + + protected final String field; + + public RankFeaturePhaseRankShardContext(final String field) { + this.field = field; + } + + public String getField() { + return field; + } + + /** + * This is used to fetch the feature data for a given set of documents, using the {@link org.elasticsearch.search.fetch.FetchPhase} + * and the {@link org.elasticsearch.search.fetch.subphase.FetchFieldsPhase} subphase. + * The feature data is then stored in a {@link org.elasticsearch.search.rank.feature.RankFeatureDoc} and passed back to the coordinator. + */ + @Nullable + public abstract RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId); +} diff --git a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureDoc.java b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureDoc.java new file mode 100644 index 0000000000000..d8b4ec10410f1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureDoc.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.rank.feature; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.rank.RankDoc; + +import java.io.IOException; +import java.util.Objects; + +/** + * A {@link RankDoc} that contains field data to be used later by the reranker on the coordinator node. + */ +public class RankFeatureDoc extends RankDoc { + + public static final String NAME = "rank_feature_doc"; + + // todo: update to support more than 1 fields; and not restrict to string data + public String featureData; + + public RankFeatureDoc(int doc, float score, int shardIndex) { + super(doc, score, shardIndex); + } + + public RankFeatureDoc(StreamInput in) throws IOException { + super(in); + featureData = in.readOptionalString(); + } + + public void featureData(String featureData) { + this.featureData = featureData; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeOptionalString(featureData); + } + + @Override + protected boolean doEquals(RankDoc rd) { + RankFeatureDoc other = (RankFeatureDoc) rd; + return Objects.equals(this.featureData, other.featureData); + } + + @Override + protected int doHashCode() { + return Objects.hashCode(featureData); + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureResult.java b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureResult.java new file mode 100644 index 0000000000000..1e16d18cda367 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureResult.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.rank.feature; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; + +import java.io.IOException; + +/** + * The result of a rank feature search phase. + * Each instance holds a {@code RankFeatureShardResult} along with the references associated with it. + */ +public class RankFeatureResult extends SearchPhaseResult { + + private RankFeatureShardResult rankShardResult; + + public RankFeatureResult() {} + + public RankFeatureResult(ShardSearchContextId id, SearchShardTarget shardTarget, ShardSearchRequest request) { + this.contextId = id; + setSearchShardTarget(shardTarget); + setShardSearchRequest(request); + } + + public RankFeatureResult(StreamInput in) throws IOException { + super(in); + contextId = new ShardSearchContextId(in); + rankShardResult = in.readOptionalWriteable(RankFeatureShardResult::new); + setShardSearchRequest(in.readOptionalWriteable(ShardSearchRequest::new)); + setSearchShardTarget(in.readOptionalWriteable(SearchShardTarget::new)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + assert hasReferences(); + contextId.writeTo(out); + out.writeOptionalWriteable(rankShardResult); + out.writeOptionalWriteable(getShardSearchRequest()); + out.writeOptionalWriteable(getSearchShardTarget()); + } + + @Override + public RankFeatureResult rankFeatureResult() { + return this; + } + + public void shardResult(RankFeatureShardResult shardResult) { + this.rankShardResult = shardResult; + } + + public RankFeatureShardResult shardResult() { + return rankShardResult; + } + + @Override + public boolean hasSearchContext() { + return rankShardResult != null; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java new file mode 100644 index 0000000000000..727ed4e938cca --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.rank.feature; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.search.SearchContextSourcePrinter; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.fetch.FetchSearchResult; +import org.elasticsearch.search.fetch.StoredFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; +import org.elasticsearch.tasks.TaskCancelledException; + +import java.util.Arrays; +import java.util.Collections; + +/** + * The {@code RankFeatureShardPhase} executes the rank feature phase on the shard, iff there is a {@code RankBuilder} that requires it. + * This phase is responsible for reading field data for a set of docids. To do this, it reuses the {@code FetchPhase} to read the required + * fields for all requested documents using the `FetchFieldPhase` sub-phase. + */ +public final class RankFeatureShardPhase { + + private static final Logger logger = LogManager.getLogger(RankFeatureShardPhase.class); + + public static final RankFeatureShardResult EMPTY_RESULT = new RankFeatureShardResult(new RankFeatureDoc[0]); + + public RankFeatureShardPhase() {} + + public void prepareForFetch(SearchContext searchContext, RankFeatureShardRequest request) { + if (logger.isTraceEnabled()) { + logger.trace("{}", new SearchContextSourcePrinter(searchContext)); + } + + if (searchContext.isCancelled()) { + throw new TaskCancelledException("cancelled"); + } + + RankFeaturePhaseRankShardContext rankFeaturePhaseRankShardContext = shardContext(searchContext); + if (rankFeaturePhaseRankShardContext != null) { + assert rankFeaturePhaseRankShardContext.getField() != null : "field must not be null"; + searchContext.fetchFieldsContext( + new FetchFieldsContext(Collections.singletonList(new FieldAndFormat(rankFeaturePhaseRankShardContext.getField(), null))) + ); + searchContext.storedFieldsContext(StoredFieldsContext.fromList(Collections.singletonList(StoredFieldsContext._NONE_))); + searchContext.addFetchResult(); + Arrays.sort(request.getDocIds()); + } + } + + public void processFetch(SearchContext searchContext) { + if (logger.isTraceEnabled()) { + logger.trace("{}", new SearchContextSourcePrinter(searchContext)); + } + + if (searchContext.isCancelled()) { + throw new TaskCancelledException("cancelled"); + } + + RankFeaturePhaseRankShardContext rankFeaturePhaseRankShardContext = searchContext.request().source().rankBuilder() != null + ? searchContext.request().source().rankBuilder().buildRankFeaturePhaseShardContext() + : null; + if (rankFeaturePhaseRankShardContext != null) { + // TODO: here we populate the profile part of the fetchResult as well + // we need to see what info we want to include on the overall profiling section. This is something that is per-shard + // so most likely we will still care about the `FetchFieldPhase` profiling info as we could potentially + // operate on `rank_window_size` instead of just `size` results, so this could be much more expensive. + FetchSearchResult fetchSearchResult = searchContext.fetchResult(); + if (fetchSearchResult == null || fetchSearchResult.hits() == null) { + return; + } + // this cannot be null; as we have either already checked for it, or we would have thrown in + // FetchSearchResult#shardResult() + SearchHits hits = fetchSearchResult.hits(); + RankFeatureShardResult featureRankShardResult = (RankFeatureShardResult) rankFeaturePhaseRankShardContext + .buildRankFeatureShardResult(hits, searchContext.shardTarget().getShardId().id()); + // save the result in the search context + // need to add profiling info as well available from fetch + if (featureRankShardResult != null) { + searchContext.rankFeatureResult().shardResult(featureRankShardResult); + } + } + } + + private RankFeaturePhaseRankShardContext shardContext(SearchContext searchContext) { + return searchContext.request().source() != null && searchContext.request().source().rankBuilder() != null + ? searchContext.request().source().rankBuilder().buildRankFeaturePhaseShardContext() + : null; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardRequest.java b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardRequest.java new file mode 100644 index 0000000000000..d487fb63a0102 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardRequest.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.rank.feature; + +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchShardTask; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.transport.TransportRequest; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.stream.IntStream; + +/** + * Shard level request for extracting all needed feature for a global reranker + */ + +public class RankFeatureShardRequest extends TransportRequest implements IndicesRequest { + + private final OriginalIndices originalIndices; + private final ShardSearchRequest shardSearchRequest; + + private final ShardSearchContextId contextId; + + private final int[] docIds; + + public RankFeatureShardRequest( + OriginalIndices originalIndices, + ShardSearchContextId contextId, + ShardSearchRequest shardSearchRequest, + List docIds + ) { + this.originalIndices = originalIndices; + this.shardSearchRequest = shardSearchRequest; + this.docIds = docIds.stream().flatMapToInt(IntStream::of).toArray(); + this.contextId = contextId; + } + + public RankFeatureShardRequest(StreamInput in) throws IOException { + super(in); + originalIndices = OriginalIndices.readOriginalIndices(in); + shardSearchRequest = in.readOptionalWriteable(ShardSearchRequest::new); + docIds = in.readIntArray(); + contextId = in.readOptionalWriteable(ShardSearchContextId::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + OriginalIndices.writeOriginalIndices(originalIndices, out); + out.writeOptionalWriteable(shardSearchRequest); + out.writeIntArray(docIds); + out.writeOptionalWriteable(contextId); + } + + @Override + public String[] indices() { + if (originalIndices == null) { + return null; + } + return originalIndices.indices(); + } + + @Override + public IndicesOptions indicesOptions() { + if (originalIndices == null) { + return null; + } + return originalIndices.indicesOptions(); + } + + public ShardSearchRequest getShardSearchRequest() { + return shardSearchRequest; + } + + public int[] getDocIds() { + return docIds; + } + + public ShardSearchContextId contextId() { + return contextId; + } + + @Override + public SearchShardTask createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new SearchShardTask(id, type, action, getDescription(), parentTaskId, headers); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardResult.java b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardResult.java new file mode 100644 index 0000000000000..e06b963621c60 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardResult.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.rank.feature; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.rank.RankShardResult; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +/** + * The result set of {@link RankFeatureDoc} docs for the shard. + */ +public class RankFeatureShardResult implements RankShardResult { + + public final RankFeatureDoc[] rankFeatureDocs; + + public RankFeatureShardResult(RankFeatureDoc[] rankFeatureDocs) { + this.rankFeatureDocs = Objects.requireNonNull(rankFeatureDocs); + } + + public RankFeatureShardResult(StreamInput in) throws IOException { + rankFeatureDocs = in.readArray(RankFeatureDoc::new, RankFeatureDoc[]::new); + } + + @Override + public String getWriteableName() { + return "rank_feature_shard"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.RANK_FEATURE_PHASE_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeArray(rankFeatureDocs); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RankFeatureShardResult that = (RankFeatureShardResult) o; + return Arrays.equals(rankFeatureDocs, that.rankFeatureDocs); + } + + @Override + public int hashCode() { + return 31 * Arrays.hashCode(rankFeatureDocs); + } + + @Override + public String toString() { + return this.getClass().getSimpleName() + "{rankFeatureDocs=" + Arrays.toString(rankFeatureDocs) + '}'; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java index 3c4355e56d21d..b369324b3ee52 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/KnnRetrieverBuilder.java @@ -133,6 +133,9 @@ public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder if (preFilterQueryBuilders != null) { knnSearchBuilder.addFilterQueries(preFilterQueryBuilders); } + if (retrieverName != null) { + knnSearchBuilder.queryName(retrieverName); + } List knnSearchBuilders = new ArrayList<>(searchSourceBuilder.knnSearch()); knnSearchBuilders.add(knnSearchBuilder); searchSourceBuilder.knnSearch(knnSearchBuilders); diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java index c9b12f03beb53..6e3d2a58dbd5d 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java @@ -48,6 +48,8 @@ public abstract class RetrieverBuilder implements ToXContent { public static final ParseField PRE_FILTER_FIELD = new ParseField("filter"); + public static final ParseField NAME_FIELD = new ParseField("_name"); + protected static void declareBaseParserFields( String name, AbstractObjectParser parser @@ -57,6 +59,11 @@ protected static void declareBaseParserFields( c.trackSectionUsage(name + ":" + PRE_FILTER_FIELD.getPreferredName()); return preFilterQueryBuilder; }, PRE_FILTER_FIELD); + parser.declareString(RetrieverBuilder::retrieverName, NAME_FIELD); + } + + private void retrieverName(String retrieverName) { + this.retrieverName = retrieverName; } /** @@ -172,6 +179,8 @@ protected static RetrieverBuilder parseInnerRetrieverBuilder(XContentParser pars protected List preFilterQueryBuilders = new ArrayList<>(); + protected String retrieverName; + /** * Gets the filters for this retriever. */ diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 74b8a3e12dad5..453d0b3201560 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -795,13 +795,13 @@ public void shardFailed(ShardRouting failedShard, UnassignedInfo unassignedInfo) // mark restore entry for this shard as failed when it's due to a file corruption. There is no need wait on retries // to restore this shard on another node if the snapshot files are corrupt. In case where a node just left or crashed, // however, we only want to acknowledge the restore operation once it has been successfully restored on another node. - if (unassignedInfo.getFailure() != null && Lucene.isCorruptionException(unassignedInfo.getFailure().getCause())) { + if (unassignedInfo.failure() != null && Lucene.isCorruptionException(unassignedInfo.failure().getCause())) { changes(recoverySource).put( failedShard.shardId(), new ShardRestoreStatus( failedShard.currentNodeId(), RestoreInProgress.State.FAILURE, - unassignedInfo.getFailure().getCause().getMessage() + unassignedInfo.failure().getCause().getMessage() ) ); } @@ -829,7 +829,7 @@ public void shardInitialized(ShardRouting unassignedShard, ShardRouting initiali public void unassignedInfoUpdated(ShardRouting unassignedShard, UnassignedInfo newUnassignedInfo) { RecoverySource recoverySource = unassignedShard.recoverySource(); if (recoverySource.getType() == RecoverySource.Type.SNAPSHOT) { - if (newUnassignedInfo.getLastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO) { + if (newUnassignedInfo.lastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO) { String reason = "shard could not be allocated to any of the nodes"; changes(recoverySource).put( unassignedShard.shardId(), diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index b4c15e7b77b78..7ca92ebfdcf32 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -395,7 +395,7 @@ public ClusterState execute(ClusterState currentState) { @Override public void onFailure(Exception e) { initializingClones.remove(snapshot); - logger.warn(() -> format("[%s][%s] failed to clone snapshot", repositoryName, snapshotName), e); + logSnapshotFailure("clone", snapshot, e); listener.onFailure(e); } @@ -3845,28 +3845,33 @@ private record CreateSnapshotTask( @Override public void onFailure(Exception e) { - final var logLevel = snapshotFailureLogLevel(e); - if (logLevel == Level.INFO && logger.isDebugEnabled() == false) { - // suppress stack trace at INFO unless extra verbosity is configured - logger.info( - format( - "[%s][%s] failed to create snapshot: %s", - snapshot.getRepository(), - snapshot.getSnapshotId().getName(), - e.getMessage() - ) - ); - } else { - logger.log( - logLevel, - () -> format("[%s][%s] failed to create snapshot", snapshot.getRepository(), snapshot.getSnapshotId().getName()), - e - ); - } + logSnapshotFailure("create", snapshot, e); listener.onFailure(e); } } + private static void logSnapshotFailure(String operation, Snapshot snapshot, Exception e) { + final var logLevel = snapshotFailureLogLevel(e); + if (logLevel == Level.INFO && logger.isDebugEnabled() == false) { + // suppress stack trace at INFO unless extra verbosity is configured + logger.info( + format( + "[%s][%s] failed to %s snapshot: %s", + snapshot.getRepository(), + snapshot.getSnapshotId().getName(), + operation, + e.getMessage() + ) + ); + } else { + logger.log( + logLevel, + () -> format("[%s][%s] failed to %s snapshot", snapshot.getRepository(), snapshot.getSnapshotId().getName(), operation), + e + ); + } + } + private static Level snapshotFailureLogLevel(Exception e) { if (MasterService.isPublishFailureException(e)) { // no action needed, the new master will take things from here diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java index 419f2d0726880..7d8b966451d37 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java @@ -47,7 +47,9 @@ public class TaskCancellationService { public static final String BAN_PARENT_ACTION_NAME = "internal:admin/tasks/ban"; + public static final String REMOTE_CLUSTER_BAN_PARENT_ACTION_NAME = "cluster:internal/admin/tasks/ban"; public static final String CANCEL_CHILD_ACTION_NAME = "internal:admin/tasks/cancel_child"; + public static final String REMOTE_CLUSTER_CANCEL_CHILD_ACTION_NAME = "cluster:internal/admin/tasks/cancel_child"; public static final TransportVersion VERSION_SUPPORTING_CANCEL_CHILD_ACTION = TransportVersions.V_8_8_0; private static final Logger logger = LogManager.getLogger(TaskCancellationService.class); private final TransportService transportService; @@ -64,12 +66,24 @@ public TaskCancellationService(TransportService transportService) { BanParentTaskRequest::new, new BanParentRequestHandler() ); + transportService.registerRequestHandler( + REMOTE_CLUSTER_BAN_PARENT_ACTION_NAME, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + BanParentTaskRequest::new, + new BanParentRequestHandler() + ); transportService.registerRequestHandler( CANCEL_CHILD_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, CancelChildRequest::new, new CancelChildRequestHandler() ); + transportService.registerRequestHandler( + REMOTE_CLUSTER_CANCEL_CHILD_ACTION_NAME, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + CancelChildRequest::new, + new CancelChildRequestHandler() + ); } private String localNodeId() { @@ -425,7 +439,7 @@ public void cancelChildRemote(TaskId parentTask, long childRequestId, Transport. reason ); final CancelChildRequest request = CancelChildRequest.createCancelChildRequest(parentTask, childRequestId, reason); - transportService.sendRequest(childNode, CANCEL_CHILD_ACTION_NAME, request, TransportRequestOptions.EMPTY, NOOP_HANDLER); + transportService.sendRequest(childConnection, CANCEL_CHILD_ACTION_NAME, request, TransportRequestOptions.EMPTY, NOOP_HANDLER); } } diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 526f327b91c19..ef0c641bed04f 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -120,3 +120,4 @@ 8.13.2,8595000 8.13.3,8595000 8.13.4,8595001 +8.14.0,8636001 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 39f2a701726af..73f60f2e5ea7e 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -120,3 +120,4 @@ 8.13.2,8503000 8.13.3,8503000 8.13.4,8503000 +8.14.0,8505000 diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainActionTests.java index f9483bd23f216..eb1a64ef66bbd 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainActionTests.java @@ -103,9 +103,9 @@ public ShardAllocationDecision decideShardAllocation(ShardRouting shard, Routing """ ,"unassigned_info": {"reason": "%s", "at": "%s", "last_allocation_status": "%s"} """, - shard.unassignedInfo().getReason(), - UnassignedInfo.DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(shard.unassignedInfo().getUnassignedTimeInMillis())), - AllocationDecision.fromAllocationStatus(shard.unassignedInfo().getLastAllocationStatus()) + shard.unassignedInfo().reason(), + UnassignedInfo.DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(shard.unassignedInfo().unassignedTimeMillis())), + AllocationDecision.fromAllocationStatus(shard.unassignedInfo().lastAllocationStatus()) ) : "", cae.getCurrentNode().getId(), diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 882da84d22fe2..f943ff14002c5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -61,6 +61,7 @@ import static java.util.Collections.emptySet; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.elasticsearch.test.transport.MockTransportService.createTaskManager; /** * The test case for unit testing task manager and related transport actions @@ -176,12 +177,7 @@ public TestNode(String name, ThreadPool threadPool, Settings settings) { discoveryNode.set(DiscoveryNodeUtils.create(name, address.publishAddress(), emptyMap(), emptySet())); return discoveryNode.get(); }; - TaskManager taskManager; - if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) { - taskManager = new MockTaskManager(settings, threadPool, emptySet()); - } else { - taskManager = new TaskManager(settings, threadPool, emptySet()); - } + TaskManager taskManager = createTaskManager(settings, threadPool, emptySet(), Tracer.NOOP); transportService = new TransportService( settings, new Netty4Transport( diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java index a6d380bc7683c..19c268100d4a0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java @@ -162,7 +162,7 @@ public void testClusterStateUpdateTask() { private void assertStateAndFailedAllocations(IndexRoutingTable indexRoutingTable, ShardRoutingState state, int failedAllocations) { assertThat(indexRoutingTable.size(), equalTo(1)); assertThat(indexRoutingTable.shard(0).shard(0).state(), equalTo(state)); - assertThat(indexRoutingTable.shard(0).shard(0).unassignedInfo().getNumFailedAllocations(), equalTo(failedAllocations)); + assertThat(indexRoutingTable.shard(0).shard(0).unassignedInfo().failedAllocations(), equalTo(failedAllocations)); } private ClusterState createInitialClusterState(AllocationService service) { diff --git a/server/src/test/java/org/elasticsearch/action/search/RankFeaturePhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/RankFeaturePhaseTests.java new file mode 100644 index 0000000000000..836e88b3a12ef --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/search/RankFeaturePhaseTests.java @@ -0,0 +1,1178 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.action.search; + +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.tests.store.MockDirectoryWrapper; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.rank.RankBuilder; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.search.rank.RankShardResult; +import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; +import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.search.rank.feature.RankFeatureResult; +import org.elasticsearch.search.rank.feature.RankFeatureShardRequest; +import org.elasticsearch.search.rank.feature.RankFeatureShardResult; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.InternalAggregationTestCase; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; + +public class RankFeaturePhaseTests extends ESTestCase { + + private static final int DEFAULT_RANK_WINDOW_SIZE = 10; + private static final int DEFAULT_FROM = 0; + private static final int DEFAULT_SIZE = 10; + private static final String DEFAULT_FIELD = "some_field"; + + private final RankBuilder DEFAULT_RANK_BUILDER = rankBuilder( + DEFAULT_RANK_WINDOW_SIZE, + defaultQueryPhaseRankShardContext(new ArrayList<>(), DEFAULT_RANK_WINDOW_SIZE), + defaultQueryPhaseRankCoordinatorContext(DEFAULT_RANK_WINDOW_SIZE), + defaultRankFeaturePhaseRankShardContext(DEFAULT_FIELD), + defaultRankFeaturePhaseRankCoordinatorContext(DEFAULT_SIZE, DEFAULT_FROM, DEFAULT_RANK_WINDOW_SIZE) + ); + + private record ExpectedRankFeatureDoc(int doc, int rank, float score, String featureData) {} + + public void testRankFeaturePhaseWith1Shard() { + // request params used within SearchSourceBuilder and *RankContext classes + AtomicBoolean phaseDone = new AtomicBoolean(false); + final ScoreDoc[][] finalResults = new ScoreDoc[1][1]; + + // create a SearchSource to attach to the request + SearchSourceBuilder searchSourceBuilder = searchSourceWithRankBuilder(DEFAULT_RANK_BUILDER); + + SearchPhaseController controller = searchPhaseController(); + SearchShardTarget shard1Target = new SearchShardTarget("node0", new ShardId("test", "na", 0), null); + + MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); + mockSearchPhaseContext.getRequest().source(searchSourceBuilder); + try (SearchPhaseResults results = searchPhaseResults(controller, mockSearchPhaseContext)) { + final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123); + QuerySearchResult queryResult = new QuerySearchResult(ctx, shard1Target, null); + try { + queryResult.setShardIndex(shard1Target.getShardId().getId()); + // generate the QuerySearchResults that the RankFeaturePhase would have received from QueryPhase + // here we have 2 results, with doc ids 1 and 2 + int totalHits = randomIntBetween(2, 100); + final ScoreDoc[] shard1Docs = new ScoreDoc[] { new ScoreDoc(1, 10.0F), new ScoreDoc(2, 9.0F) }; + populateQuerySearchResult(queryResult, totalHits, shard1Docs); + results.consumeResult(queryResult, () -> {}); + // do not make an actual http request, but rather generate the response + // as if we would have read it from the RankFeatureShardPhase + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + public void sendExecuteRankFeature( + Transport.Connection connection, + final RankFeatureShardRequest request, + SearchTask task, + final SearchActionListener listener + ) { + // make sure to match the context id generated above, otherwise we throw + if (request.contextId().getId() == 123 && Arrays.equals(request.getDocIds(), new int[] { 1, 2 })) { + RankFeatureResult rankFeatureResult = new RankFeatureResult(); + buildRankFeatureResult( + mockSearchPhaseContext.getRequest().source().rankBuilder(), + rankFeatureResult, + shard1Target, + totalHits, + shard1Docs + ); + listener.onResponse(rankFeatureResult); + } else { + listener.onFailure(new MockDirectoryWrapper.FakeIOException()); + } + } + }; + } finally { + queryResult.decRef(); + } + + RankFeaturePhase rankFeaturePhase = rankFeaturePhase(results, mockSearchPhaseContext, finalResults, phaseDone); + try { + rankFeaturePhase.run(); + + mockSearchPhaseContext.assertNoFailure(); + assertTrue(mockSearchPhaseContext.failures.isEmpty()); + assertTrue(phaseDone.get()); + assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); + + SearchPhaseResults rankPhaseResults = rankFeaturePhase.rankPhaseResults; + assertNotNull(rankPhaseResults.getAtomicArray()); + assertEquals(1, rankPhaseResults.getAtomicArray().length()); + assertEquals(1, rankPhaseResults.getSuccessfulResults().count()); + + SearchPhaseResult shard1Result = rankPhaseResults.getAtomicArray().get(0); + List expectedShardResults = List.of( + new ExpectedRankFeatureDoc(1, 1, 110.0F, "ranked_1"), + new ExpectedRankFeatureDoc(2, 2, 109.0F, "ranked_2") + ); + List expectedFinalResults = new ArrayList<>(expectedShardResults); + assertShardResults(shard1Result, expectedShardResults); + assertFinalResults(finalResults[0], expectedFinalResults); + } finally { + rankFeaturePhase.rankPhaseResults.close(); + } + } finally { + if (mockSearchPhaseContext.searchResponse.get() != null) { + mockSearchPhaseContext.searchResponse.get().decRef(); + } + } + } + + public void testRankFeaturePhaseWithMultipleShardsOneEmpty() { + AtomicBoolean phaseDone = new AtomicBoolean(false); + final ScoreDoc[][] finalResults = new ScoreDoc[1][1]; + + // create a SearchSource to attach to the request + SearchSourceBuilder searchSourceBuilder = searchSourceWithRankBuilder(DEFAULT_RANK_BUILDER); + + SearchPhaseController controller = searchPhaseController(); + SearchShardTarget shard1Target = new SearchShardTarget("node0", new ShardId("test", "na", 0), null); + SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 1), null); + SearchShardTarget shard3Target = new SearchShardTarget("node2", new ShardId("test", "na", 2), null); + + MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(3); + mockSearchPhaseContext.getRequest().source(searchSourceBuilder); + try (SearchPhaseResults results = searchPhaseResults(controller, mockSearchPhaseContext)) { + // generate the QuerySearchResults that the RankFeaturePhase would have received from QueryPhase + // here we have 2 results, with doc ids 1 and 2 found on shards 0 and 1 respectively + final ShardSearchContextId ctxShard1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); + final ShardSearchContextId ctxShard2 = new ShardSearchContextId(UUIDs.base64UUID(), 456); + final ShardSearchContextId ctxShard3 = new ShardSearchContextId(UUIDs.base64UUID(), 789); + + QuerySearchResult queryResultShard1 = new QuerySearchResult(ctxShard1, shard1Target, null); + QuerySearchResult queryResultShard2 = new QuerySearchResult(ctxShard2, shard2Target, null); + QuerySearchResult queryResultShard3 = new QuerySearchResult(ctxShard3, shard2Target, null); + try { + queryResultShard1.setShardIndex(shard1Target.getShardId().getId()); + queryResultShard2.setShardIndex(shard2Target.getShardId().getId()); + queryResultShard3.setShardIndex(shard3Target.getShardId().getId()); + + final int shard1Results = randomIntBetween(1, 100); + final int shard2Results = randomIntBetween(1, 100); + final int shard3Results = 0; + + final ScoreDoc[] shard1Docs = new ScoreDoc[] { new ScoreDoc(1, 10.0F) }; + populateQuerySearchResult(queryResultShard1, shard1Results, shard1Docs); + final ScoreDoc[] shard2Docs = new ScoreDoc[] { new ScoreDoc(2, 9.0F) }; + populateQuerySearchResult(queryResultShard2, shard2Results, shard2Docs); + final ScoreDoc[] shard3Docs = new ScoreDoc[0]; + populateQuerySearchResult(queryResultShard3, shard3Results, shard3Docs); + + results.consumeResult(queryResultShard2, () -> {}); + results.consumeResult(queryResultShard3, () -> {}); + results.consumeResult(queryResultShard1, () -> {}); + + // do not make an actual http request, but rather generate the response + // as if we would have read it from the RankFeatureShardPhase + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + public void sendExecuteRankFeature( + Transport.Connection connection, + final RankFeatureShardRequest request, + SearchTask task, + final SearchActionListener listener + ) { + // make sure to match the context id generated above, otherwise we throw + // first shard + RankFeatureResult rankFeatureResult = new RankFeatureResult(); + if (request.contextId().getId() == 123 && Arrays.equals(request.getDocIds(), new int[] { 1 })) { + buildRankFeatureResult( + mockSearchPhaseContext.getRequest().source().rankBuilder(), + rankFeatureResult, + shard1Target, + shard1Results, + shard1Docs + ); + listener.onResponse(rankFeatureResult); + } else if (request.contextId().getId() == 456 && Arrays.equals(request.getDocIds(), new int[] { 2 })) { + // second shard + buildRankFeatureResult( + mockSearchPhaseContext.getRequest().source().rankBuilder(), + rankFeatureResult, + shard2Target, + shard2Results, + shard2Docs + ); + listener.onResponse(rankFeatureResult); + } else if (request.contextId().getId() == 789) { + listener.onResponse(rankFeatureResult); + } else { + listener.onFailure(new MockDirectoryWrapper.FakeIOException()); + } + } + }; + } finally { + queryResultShard1.decRef(); + queryResultShard2.decRef(); + queryResultShard3.decRef(); + } + RankFeaturePhase rankFeaturePhase = rankFeaturePhase(results, mockSearchPhaseContext, finalResults, phaseDone); + try { + rankFeaturePhase.run(); + mockSearchPhaseContext.assertNoFailure(); + assertTrue(mockSearchPhaseContext.failures.isEmpty()); + assertTrue(phaseDone.get()); + SearchPhaseResults rankPhaseResults = rankFeaturePhase.rankPhaseResults; + assertNotNull(rankPhaseResults.getAtomicArray()); + assertEquals(3, rankPhaseResults.getAtomicArray().length()); + // one result is null + assertEquals(2, rankPhaseResults.getSuccessfulResults().count()); + + SearchPhaseResult shard1Result = rankPhaseResults.getAtomicArray().get(0); + List expectedShard1Results = List.of(new ExpectedRankFeatureDoc(1, 1, 110.0F, "ranked_1")); + assertShardResults(shard1Result, expectedShard1Results); + + SearchPhaseResult shard2Result = rankPhaseResults.getAtomicArray().get(1); + List expectedShard2Results = List.of(new ExpectedRankFeatureDoc(2, 1, 109.0F, "ranked_2")); + assertShardResults(shard2Result, expectedShard2Results); + + SearchPhaseResult shard3Result = rankPhaseResults.getAtomicArray().get(2); + assertNull(shard3Result); + + List expectedFinalResults = List.of( + new ExpectedRankFeatureDoc(1, 1, 110.0F, "ranked_1"), + new ExpectedRankFeatureDoc(2, 2, 109.0F, "ranked_2") + ); + assertFinalResults(finalResults[0], expectedFinalResults); + } finally { + rankFeaturePhase.rankPhaseResults.close(); + } + } finally { + if (mockSearchPhaseContext.searchResponse.get() != null) { + mockSearchPhaseContext.searchResponse.get().decRef(); + } + } + } + + public void testRankFeaturePhaseNoNeedForFetchingFieldData() { + AtomicBoolean phaseDone = new AtomicBoolean(false); + final ScoreDoc[][] finalResults = new ScoreDoc[1][1]; + + // build the appropriate RankBuilder; using a null rankFeaturePhaseRankShardContext + // and non-field based rankFeaturePhaseRankCoordinatorContext + RankBuilder rankBuilder = rankBuilder( + DEFAULT_RANK_WINDOW_SIZE, + defaultQueryPhaseRankShardContext(Collections.emptyList(), DEFAULT_RANK_WINDOW_SIZE), + negatingScoresQueryFeaturePhaseRankCoordinatorContext(DEFAULT_SIZE, DEFAULT_FROM, DEFAULT_RANK_WINDOW_SIZE), + null, + null + ); + // create a SearchSource to attach to the request + SearchSourceBuilder searchSourceBuilder = searchSourceWithRankBuilder(rankBuilder); + + SearchPhaseController controller = searchPhaseController(); + SearchShardTarget shard1Target = new SearchShardTarget("node0", new ShardId("test", "na", 0), null); + + MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); + mockSearchPhaseContext.getRequest().source(searchSourceBuilder); + try (SearchPhaseResults results = searchPhaseResults(controller, mockSearchPhaseContext)) { + // generate the QuerySearchResults that the RankFeaturePhase would have received from QueryPhase + // here we have 2 results, with doc ids 1 and 2 + final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123); + QuerySearchResult queryResult = new QuerySearchResult(ctx, shard1Target, null); + + try { + queryResult.setShardIndex(shard1Target.getShardId().getId()); + int totalHits = randomIntBetween(2, 100); + final ScoreDoc[] shard1Docs = new ScoreDoc[] { new ScoreDoc(1, 10.0F), new ScoreDoc(2, 9.0F) }; + populateQuerySearchResult(queryResult, totalHits, shard1Docs); + results.consumeResult(queryResult, () -> {}); + // do not make an actual http request, but rather generate the response + // as if we would have read it from the RankFeatureShardPhase + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + public void sendExecuteRankFeature( + Transport.Connection connection, + final RankFeatureShardRequest request, + SearchTask task, + final SearchActionListener listener + ) { + // make sure to match the context id generated above, otherwise we throw + if (request.contextId().getId() == 123 && Arrays.equals(request.getDocIds(), new int[] { 1, 2 })) { + listener.onFailure(new UnsupportedOperationException("should not have reached here")); + } else { + listener.onFailure(new MockDirectoryWrapper.FakeIOException()); + } + } + }; + } finally { + queryResult.decRef(); + } + // override the RankFeaturePhase to skip moving to next phase + RankFeaturePhase rankFeaturePhase = rankFeaturePhase(results, mockSearchPhaseContext, finalResults, phaseDone); + try { + rankFeaturePhase.run(); + mockSearchPhaseContext.assertNoFailure(); + assertTrue(mockSearchPhaseContext.failures.isEmpty()); + assertTrue(phaseDone.get()); + + // in this case there was no additional "RankFeature" results on shards, so we shortcut directly to queryPhaseResults + SearchPhaseResults rankPhaseResults = rankFeaturePhase.queryPhaseResults; + assertNotNull(rankPhaseResults.getAtomicArray()); + assertEquals(1, rankPhaseResults.getAtomicArray().length()); + assertEquals(1, rankPhaseResults.getSuccessfulResults().count()); + + SearchPhaseResult shardResult = rankPhaseResults.getAtomicArray().get(0); + assertTrue(shardResult instanceof QuerySearchResult); + QuerySearchResult rankResult = (QuerySearchResult) shardResult; + assertNull(rankResult.rankFeatureResult()); + assertNotNull(rankResult.queryResult()); + + List expectedFinalResults = List.of( + new ExpectedRankFeatureDoc(2, 1, -9.0F, null), + new ExpectedRankFeatureDoc(1, 2, -10.0F, null) + ); + assertFinalResults(finalResults[0], expectedFinalResults); + } finally { + rankFeaturePhase.rankPhaseResults.close(); + } + } finally { + if (mockSearchPhaseContext.searchResponse.get() != null) { + mockSearchPhaseContext.searchResponse.get().decRef(); + } + } + } + + public void testRankFeaturePhaseOneShardFails() { + AtomicBoolean phaseDone = new AtomicBoolean(false); + final ScoreDoc[][] finalResults = new ScoreDoc[1][1]; + + // create a SearchSource to attach to the request + SearchSourceBuilder searchSourceBuilder = searchSourceWithRankBuilder(DEFAULT_RANK_BUILDER); + + SearchPhaseController controller = searchPhaseController(); + SearchShardTarget shard1Target = new SearchShardTarget("node0", new ShardId("test", "na", 0), null); + SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 1), null); + + MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); + mockSearchPhaseContext.getRequest().source(searchSourceBuilder); + try (SearchPhaseResults results = searchPhaseResults(controller, mockSearchPhaseContext)) { + // generate the QuerySearchResults that the RankFeaturePhase would have received from QueryPhase + // here we have 2 results, with doc ids 1 and 2 found on shards 0 and 1 respectively + final ShardSearchContextId ctxShard1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); + final ShardSearchContextId ctxShard2 = new ShardSearchContextId(UUIDs.base64UUID(), 456); + + QuerySearchResult queryResultShard1 = new QuerySearchResult(ctxShard1, shard1Target, null); + QuerySearchResult queryResultShard2 = new QuerySearchResult(ctxShard2, shard2Target, null); + try { + queryResultShard1.setShardIndex(shard1Target.getShardId().getId()); + queryResultShard2.setShardIndex(shard2Target.getShardId().getId()); + + final int shard1Results = randomIntBetween(1, 100); + final ScoreDoc[] shard1Docs = new ScoreDoc[] { new ScoreDoc(1, 10.0F) }; + populateQuerySearchResult(queryResultShard1, shard1Results, shard1Docs); + + final int shard2Results = randomIntBetween(1, 100); + final ScoreDoc[] shard2Docs = new ScoreDoc[] { new ScoreDoc(2, 9.0F) }; + populateQuerySearchResult(queryResultShard2, shard2Results, shard2Docs); + + results.consumeResult(queryResultShard2, () -> {}); + results.consumeResult(queryResultShard1, () -> {}); + + // do not make an actual http request, but rather generate the response + // as if we would have read it from the RankFeatureShardPhase + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + public void sendExecuteRankFeature( + Transport.Connection connection, + final RankFeatureShardRequest request, + SearchTask task, + final SearchActionListener listener + ) { + // make sure to match the context id generated above, otherwise we throw + // first shard + if (request.contextId().getId() == 456 && Arrays.equals(request.getDocIds(), new int[] { 2 })) { + RankFeatureResult rankFeatureResult = new RankFeatureResult(); + buildRankFeatureResult( + mockSearchPhaseContext.getRequest().source().rankBuilder(), + rankFeatureResult, + shard2Target, + shard2Results, + shard2Docs + ); + listener.onResponse(rankFeatureResult); + + } else if (request.contextId().getId() == 123 && Arrays.equals(request.getDocIds(), new int[] { 1 })) { + // other shard; this one throws an exception + listener.onFailure(new IllegalArgumentException("simulated failure")); + } else { + listener.onFailure(new MockDirectoryWrapper.FakeIOException()); + } + } + }; + } finally { + queryResultShard1.decRef(); + queryResultShard2.decRef(); + } + RankFeaturePhase rankFeaturePhase = rankFeaturePhase(results, mockSearchPhaseContext, finalResults, phaseDone); + try { + rankFeaturePhase.run(); + + mockSearchPhaseContext.assertNoFailure(); + assertEquals(1, mockSearchPhaseContext.failures.size()); + assertTrue(mockSearchPhaseContext.failures.get(0).getCause().getMessage().contains("simulated failure")); + assertTrue(phaseDone.get()); + + SearchPhaseResults rankPhaseResults = rankFeaturePhase.rankPhaseResults; + assertNotNull(rankPhaseResults.getAtomicArray()); + assertEquals(2, rankPhaseResults.getAtomicArray().length()); + // one shard failed + assertEquals(1, rankPhaseResults.getSuccessfulResults().count()); + + SearchPhaseResult shard1Result = rankPhaseResults.getAtomicArray().get(0); + assertNull(shard1Result); + + SearchPhaseResult shard2Result = rankPhaseResults.getAtomicArray().get(1); + List expectedShard2Results = List.of(new ExpectedRankFeatureDoc(2, 1, 109.0F, "ranked_2")); + List expectedFinalResults = new ArrayList<>(expectedShard2Results); + assertShardResults(shard2Result, expectedShard2Results); + assertFinalResults(finalResults[0], expectedFinalResults); + } finally { + rankFeaturePhase.rankPhaseResults.close(); + } + } finally { + if (mockSearchPhaseContext.searchResponse.get() != null) { + mockSearchPhaseContext.searchResponse.get().decRef(); + } + } + } + + public void testRankFeaturePhaseExceptionThrownOnPhase() { + AtomicBoolean phaseDone = new AtomicBoolean(false); + final ScoreDoc[][] finalResults = new ScoreDoc[1][1]; + + // create a SearchSource to attach to the request + SearchSourceBuilder searchSourceBuilder = searchSourceWithRankBuilder(DEFAULT_RANK_BUILDER); + + SearchPhaseController controller = searchPhaseController(); + SearchShardTarget shard1Target = new SearchShardTarget("node0", new ShardId("test", "na", 0), null); + + MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); + mockSearchPhaseContext.getRequest().source(searchSourceBuilder); + try (SearchPhaseResults results = searchPhaseResults(controller, mockSearchPhaseContext)) { + // generate the QuerySearchResults that the RankFeaturePhase would have received from QueryPhase + // here we have 2 results, with doc ids 1 and 2 + final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123); + QuerySearchResult queryResult = new QuerySearchResult(ctx, shard1Target, null); + try { + queryResult.setShardIndex(shard1Target.getShardId().getId()); + int totalHits = randomIntBetween(2, 100); + final ScoreDoc[] shard1Docs = new ScoreDoc[] { new ScoreDoc(1, 10.0F), new ScoreDoc(2, 9.0F) }; + populateQuerySearchResult(queryResult, totalHits, shard1Docs); + results.consumeResult(queryResult, () -> {}); + + // do not make an actual http request, but rather generate the response + // as if we would have read it from the RankFeatureShardPhase + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + public void sendExecuteRankFeature( + Transport.Connection connection, + final RankFeatureShardRequest request, + SearchTask task, + final SearchActionListener listener + ) { + // make sure to match the context id generated above, otherwise we throw + if (request.contextId().getId() == 123 && Arrays.equals(request.getDocIds(), new int[] { 1, 2 })) { + RankFeatureResult rankFeatureResult = new RankFeatureResult(); + buildRankFeatureResult( + mockSearchPhaseContext.getRequest().source().rankBuilder(), + rankFeatureResult, + shard1Target, + totalHits, + shard1Docs + ); + listener.onResponse(rankFeatureResult); + } else { + listener.onFailure(new MockDirectoryWrapper.FakeIOException()); + } + } + }; + } finally { + queryResult.decRef(); + } + // override the RankFeaturePhase to raise an exception + RankFeaturePhase rankFeaturePhase = new RankFeaturePhase(results, null, mockSearchPhaseContext) { + @Override + void innerRun() { + throw new IllegalArgumentException("simulated failure"); + } + + @Override + public void moveToNextPhase( + SearchPhaseResults phaseResults, + SearchPhaseController.ReducedQueryPhase reducedQueryPhase + ) { + // this is called after the RankFeaturePhaseCoordinatorContext has been executed + phaseDone.set(true); + finalResults[0] = reducedQueryPhase.sortedTopDocs().scoreDocs(); + logger.debug("Skipping moving to next phase"); + } + }; + assertEquals("rank-feature", rankFeaturePhase.getName()); + try { + rankFeaturePhase.run(); + assertNotNull(mockSearchPhaseContext.phaseFailure.get()); + assertTrue(mockSearchPhaseContext.phaseFailure.get().getMessage().contains("simulated failure")); + assertTrue(mockSearchPhaseContext.failures.isEmpty()); + assertFalse(phaseDone.get()); + assertTrue(rankFeaturePhase.rankPhaseResults.getAtomicArray().asList().isEmpty()); + assertNull(finalResults[0][0]); + } finally { + rankFeaturePhase.rankPhaseResults.close(); + } + } finally { + if (mockSearchPhaseContext.searchResponse.get() != null) { + mockSearchPhaseContext.searchResponse.get().decRef(); + } + } + } + + public void testRankFeatureWithPagination() { + // request params used within SearchSourceBuilder and *RankContext classes + final int from = 1; + final int size = 1; + AtomicBoolean phaseDone = new AtomicBoolean(false); + final ScoreDoc[][] finalResults = new ScoreDoc[1][1]; + + // build the appropriate RankBuilder + RankBuilder rankBuilder = rankBuilder( + DEFAULT_RANK_WINDOW_SIZE, + defaultQueryPhaseRankShardContext(Collections.emptyList(), DEFAULT_RANK_WINDOW_SIZE), + defaultQueryPhaseRankCoordinatorContext(DEFAULT_RANK_WINDOW_SIZE), + defaultRankFeaturePhaseRankShardContext(DEFAULT_FIELD), + defaultRankFeaturePhaseRankCoordinatorContext(size, from, DEFAULT_RANK_WINDOW_SIZE) + ); + // create a SearchSource to attach to the request + SearchSourceBuilder searchSourceBuilder = searchSourceWithRankBuilder(rankBuilder); + + SearchPhaseController controller = searchPhaseController(); + SearchShardTarget shard1Target = new SearchShardTarget("node0", new ShardId("test", "na", 0), null); + SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 1), null); + SearchShardTarget shard3Target = new SearchShardTarget("node2", new ShardId("test", "na", 2), null); + + MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(3); + mockSearchPhaseContext.getRequest().source(searchSourceBuilder); + try (SearchPhaseResults results = searchPhaseResults(controller, mockSearchPhaseContext)) { + // generate the QuerySearchResults that the RankFeaturePhase would have received from QueryPhase + // here we have 4 results, with doc ids 1 and (11, 2, 200) found on shards 0 and 1 respectively + final ShardSearchContextId ctxShard1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); + final ShardSearchContextId ctxShard2 = new ShardSearchContextId(UUIDs.base64UUID(), 456); + final ShardSearchContextId ctxShard3 = new ShardSearchContextId(UUIDs.base64UUID(), 789); + + QuerySearchResult queryResultShard1 = new QuerySearchResult(ctxShard1, shard1Target, null); + QuerySearchResult queryResultShard2 = new QuerySearchResult(ctxShard2, shard2Target, null); + QuerySearchResult queryResultShard3 = new QuerySearchResult(ctxShard3, shard2Target, null); + + try { + queryResultShard1.setShardIndex(shard1Target.getShardId().getId()); + queryResultShard2.setShardIndex(shard2Target.getShardId().getId()); + queryResultShard3.setShardIndex(shard3Target.getShardId().getId()); + + final int shard1Results = randomIntBetween(1, 100); + final ScoreDoc[] shard1Docs = new ScoreDoc[] { new ScoreDoc(1, 10.0F) }; + populateQuerySearchResult(queryResultShard1, shard1Results, shard1Docs); + + final int shard2Results = randomIntBetween(1, 100); + final ScoreDoc[] shard2Docs = new ScoreDoc[] { + new ScoreDoc(11, 100.0F, -1), + new ScoreDoc(2, 9.0F), + new ScoreDoc(200, 1F, -1) }; + populateQuerySearchResult(queryResultShard2, shard2Results, shard2Docs); + + final int shard3Results = 0; + final ScoreDoc[] shard3Docs = new ScoreDoc[0]; + populateQuerySearchResult(queryResultShard3, shard3Results, shard3Docs); + + results.consumeResult(queryResultShard2, () -> {}); + results.consumeResult(queryResultShard3, () -> {}); + results.consumeResult(queryResultShard1, () -> {}); + + // do not make an actual http request, but rather generate the response + // as if we would have read it from the RankFeatureShardPhase + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + public void sendExecuteRankFeature( + Transport.Connection connection, + final RankFeatureShardRequest request, + SearchTask task, + final SearchActionListener listener + ) { + + RankFeatureResult rankFeatureResult = new RankFeatureResult(); + // make sure to match the context id generated above, otherwise we throw + // first shard + if (request.contextId().getId() == 123 && Arrays.equals(request.getDocIds(), new int[] { 1 })) { + buildRankFeatureResult( + mockSearchPhaseContext.getRequest().source().rankBuilder(), + rankFeatureResult, + shard1Target, + shard1Results, + shard1Docs + ); + listener.onResponse(rankFeatureResult); + } else if (request.contextId().getId() == 456 && Arrays.equals(request.getDocIds(), new int[] { 11, 2, 200 })) { + // second shard + + buildRankFeatureResult( + mockSearchPhaseContext.getRequest().source().rankBuilder(), + rankFeatureResult, + shard2Target, + shard2Results, + shard2Docs + ); + listener.onResponse(rankFeatureResult); + } else { + listener.onFailure(new MockDirectoryWrapper.FakeIOException()); + } + + } + }; + } finally { + queryResultShard1.decRef(); + queryResultShard2.decRef(); + queryResultShard3.decRef(); + } + RankFeaturePhase rankFeaturePhase = rankFeaturePhase(results, mockSearchPhaseContext, finalResults, phaseDone); + try { + rankFeaturePhase.run(); + + mockSearchPhaseContext.assertNoFailure(); + assertTrue(mockSearchPhaseContext.failures.isEmpty()); + assertTrue(phaseDone.get()); + SearchPhaseResults rankPhaseResults = rankFeaturePhase.rankPhaseResults; + assertNotNull(rankPhaseResults.getAtomicArray()); + assertEquals(3, rankPhaseResults.getAtomicArray().length()); + // one result is null + assertEquals(2, rankPhaseResults.getSuccessfulResults().count()); + + SearchPhaseResult shard1Result = rankPhaseResults.getAtomicArray().get(0); + List expectedShard1Results = List.of(new ExpectedRankFeatureDoc(1, 1, 110.0F, "ranked_1")); + assertShardResults(shard1Result, expectedShard1Results); + + SearchPhaseResult shard2Result = rankPhaseResults.getAtomicArray().get(1); + List expectedShard2Results = List.of( + new ExpectedRankFeatureDoc(11, 1, 200.0F, "ranked_11"), + new ExpectedRankFeatureDoc(2, 2, 109.0F, "ranked_2"), + new ExpectedRankFeatureDoc(200, 3, 101.0F, "ranked_200") + + ); + assertShardResults(shard2Result, expectedShard2Results); + + SearchPhaseResult shard3Result = rankPhaseResults.getAtomicArray().get(2); + assertNull(shard3Result); + + List expectedFinalResults = List.of(new ExpectedRankFeatureDoc(1, 2, 110.0F, "ranked_1")); + assertFinalResults(finalResults[0], expectedFinalResults); + } finally { + rankFeaturePhase.rankPhaseResults.close(); + } + } finally { + if (mockSearchPhaseContext.searchResponse.get() != null) { + mockSearchPhaseContext.searchResponse.get().decRef(); + } + } + } + + public void testRankFeatureCollectOnlyRankWindowSizeFeatures() { + // request params used within SearchSourceBuilder and *RankContext classes + final int rankWindowSize = 2; + AtomicBoolean phaseDone = new AtomicBoolean(false); + final ScoreDoc[][] finalResults = new ScoreDoc[1][1]; + + // build the appropriate RankBuilder + RankBuilder rankBuilder = rankBuilder( + rankWindowSize, + defaultQueryPhaseRankShardContext(Collections.emptyList(), rankWindowSize), + defaultQueryPhaseRankCoordinatorContext(rankWindowSize), + defaultRankFeaturePhaseRankShardContext(DEFAULT_FIELD), + defaultRankFeaturePhaseRankCoordinatorContext(DEFAULT_SIZE, DEFAULT_FROM, rankWindowSize) + ); + // create a SearchSource to attach to the request + SearchSourceBuilder searchSourceBuilder = searchSourceWithRankBuilder(rankBuilder); + + SearchPhaseController controller = searchPhaseController(); + SearchShardTarget shard1Target = new SearchShardTarget("node0", new ShardId("test", "na", 0), null); + SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 1), null); + SearchShardTarget shard3Target = new SearchShardTarget("node2", new ShardId("test", "na", 2), null); + + MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(3); + mockSearchPhaseContext.getRequest().source(searchSourceBuilder); + try (SearchPhaseResults results = searchPhaseResults(controller, mockSearchPhaseContext)) { + // generate the QuerySearchResults that the RankFeaturePhase would have received from QueryPhase + // here we have 3 results, with doc ids 1, and (11, 2) found on shards 0 and 1 respectively + final ShardSearchContextId ctxShard1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); + final ShardSearchContextId ctxShard2 = new ShardSearchContextId(UUIDs.base64UUID(), 456); + final ShardSearchContextId ctxShard3 = new ShardSearchContextId(UUIDs.base64UUID(), 789); + + QuerySearchResult queryResultShard1 = new QuerySearchResult(ctxShard1, shard1Target, null); + QuerySearchResult queryResultShard2 = new QuerySearchResult(ctxShard2, shard2Target, null); + QuerySearchResult queryResultShard3 = new QuerySearchResult(ctxShard3, shard2Target, null); + + try { + queryResultShard1.setShardIndex(shard1Target.getShardId().getId()); + queryResultShard2.setShardIndex(shard2Target.getShardId().getId()); + queryResultShard3.setShardIndex(shard3Target.getShardId().getId()); + + final int shard1Results = randomIntBetween(1, 100); + final ScoreDoc[] shard1Docs = new ScoreDoc[] { new ScoreDoc(1, 10.0F) }; + populateQuerySearchResult(queryResultShard1, shard1Results, shard1Docs); + + final int shard2Results = randomIntBetween(1, 100); + final ScoreDoc[] shard2Docs = new ScoreDoc[] { new ScoreDoc(11, 100.0F), new ScoreDoc(2, 9.0F) }; + populateQuerySearchResult(queryResultShard2, shard2Results, shard2Docs); + + final int shard3Results = 0; + final ScoreDoc[] shard3Docs = new ScoreDoc[0]; + populateQuerySearchResult(queryResultShard3, shard3Results, shard3Docs); + + results.consumeResult(queryResultShard2, () -> {}); + results.consumeResult(queryResultShard3, () -> {}); + results.consumeResult(queryResultShard1, () -> {}); + + // do not make an actual http request, but rather generate the response + // as if we would have read it from the RankFeatureShardPhase + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + public void sendExecuteRankFeature( + Transport.Connection connection, + final RankFeatureShardRequest request, + SearchTask task, + final SearchActionListener listener + ) { + RankFeatureResult rankFeatureResult = new RankFeatureResult(); + // make sure to match the context id generated above, otherwise we throw + // first shard + if (request.contextId().getId() == 123 && Arrays.equals(request.getDocIds(), new int[] { 1 })) { + buildRankFeatureResult( + mockSearchPhaseContext.getRequest().source().rankBuilder(), + rankFeatureResult, + shard1Target, + shard1Results, + shard1Docs + ); + listener.onResponse(rankFeatureResult); + } else if (request.contextId().getId() == 456 && Arrays.equals(request.getDocIds(), new int[] { 11 })) { + // second shard + buildRankFeatureResult( + mockSearchPhaseContext.getRequest().source().rankBuilder(), + rankFeatureResult, + shard2Target, + shard2Results, + new ScoreDoc[] { shard2Docs[0] } + ); + listener.onResponse(rankFeatureResult); + } else { + listener.onFailure(new MockDirectoryWrapper.FakeIOException()); + } + } + }; + } finally { + queryResultShard1.decRef(); + queryResultShard2.decRef(); + queryResultShard3.decRef(); + } + RankFeaturePhase rankFeaturePhase = rankFeaturePhase(results, mockSearchPhaseContext, finalResults, phaseDone); + try { + rankFeaturePhase.run(); + mockSearchPhaseContext.assertNoFailure(); + assertTrue(mockSearchPhaseContext.failures.isEmpty()); + assertTrue(phaseDone.get()); + SearchPhaseResults rankPhaseResults = rankFeaturePhase.rankPhaseResults; + assertNotNull(rankPhaseResults.getAtomicArray()); + assertEquals(3, rankPhaseResults.getAtomicArray().length()); + // one result is null + assertEquals(2, rankPhaseResults.getSuccessfulResults().count()); + + SearchPhaseResult shard1Result = rankPhaseResults.getAtomicArray().get(0); + List expectedShardResults = List.of(new ExpectedRankFeatureDoc(1, 1, 110.0F, "ranked_1")); + assertShardResults(shard1Result, expectedShardResults); + + SearchPhaseResult shard2Result = rankPhaseResults.getAtomicArray().get(1); + List expectedShard2Results = List.of(new ExpectedRankFeatureDoc(11, 1, 200.0F, "ranked_11")); + assertShardResults(shard2Result, expectedShard2Results); + + SearchPhaseResult shard3Result = rankPhaseResults.getAtomicArray().get(2); + assertNull(shard3Result); + + List expectedFinalResults = List.of( + new ExpectedRankFeatureDoc(11, 1, 200.0F, "ranked_11"), + new ExpectedRankFeatureDoc(1, 2, 110.0F, "ranked_1") + ); + assertFinalResults(finalResults[0], expectedFinalResults); + } finally { + rankFeaturePhase.rankPhaseResults.close(); + } + } finally { + if (mockSearchPhaseContext.searchResponse.get() != null) { + mockSearchPhaseContext.searchResponse.get().decRef(); + } + } + } + + private RankFeaturePhaseRankCoordinatorContext defaultRankFeaturePhaseRankCoordinatorContext(int size, int from, int rankWindowSize) { + return new RankFeaturePhaseRankCoordinatorContext(size, from, rankWindowSize) { + + @Override + protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener) { + // no-op + // this one is handled directly in rankGlobalResults to create a RankFeatureDoc + // and avoid modifying in-place the ScoreDoc's rank + } + + @Override + public void rankGlobalResults(List rankSearchResults, ActionListener rankListener) { + List features = new ArrayList<>(); + for (RankFeatureResult rankFeatureResult : rankSearchResults) { + RankFeatureShardResult shardResult = rankFeatureResult.shardResult(); + features.addAll(Arrays.stream(shardResult.rankFeatureDocs).toList()); + } + rankListener.onResponse(features.toArray(new RankFeatureDoc[0])); + } + + @Override + public RankFeatureDoc[] rankAndPaginate(RankFeatureDoc[] rankFeatureDocs) { + Arrays.sort(rankFeatureDocs, Comparator.comparing((RankFeatureDoc doc) -> doc.score).reversed()); + RankFeatureDoc[] topResults = new RankFeatureDoc[Math.max(0, Math.min(size, rankFeatureDocs.length - from))]; + // perform pagination + for (int rank = 0; rank < topResults.length; ++rank) { + RankFeatureDoc rfd = rankFeatureDocs[from + rank]; + topResults[rank] = new RankFeatureDoc(rfd.doc, rfd.score, rfd.shardIndex); + topResults[rank].rank = from + rank + 1; + } + return topResults; + } + }; + } + + private QueryPhaseRankCoordinatorContext negatingScoresQueryFeaturePhaseRankCoordinatorContext(int size, int from, int rankWindowSize) { + return new QueryPhaseRankCoordinatorContext(rankWindowSize) { + @Override + public ScoreDoc[] rankQueryPhaseResults( + List rankSearchResults, + SearchPhaseController.TopDocsStats topDocsStats + ) { + List docScores = new ArrayList<>(); + for (QuerySearchResult phaseResults : rankSearchResults) { + docScores.addAll(Arrays.asList(phaseResults.topDocs().topDocs.scoreDocs)); + } + ScoreDoc[] sortedDocs = docScores.toArray(new ScoreDoc[0]); + // negating scores + Arrays.stream(sortedDocs).forEach(doc -> doc.score *= -1); + + Arrays.sort(sortedDocs, Comparator.comparing((ScoreDoc doc) -> doc.score).reversed()); + sortedDocs = Arrays.stream(sortedDocs).limit(rankWindowSize).toArray(ScoreDoc[]::new); + RankFeatureDoc[] topResults = new RankFeatureDoc[Math.max(0, Math.min(size, sortedDocs.length - from))]; + // perform pagination + for (int rank = 0; rank < topResults.length; ++rank) { + ScoreDoc base = sortedDocs[from + rank]; + topResults[rank] = new RankFeatureDoc(base.doc, base.score, base.shardIndex); + topResults[rank].rank = from + rank + 1; + } + topDocsStats.fetchHits = topResults.length; + return topResults; + } + }; + } + + private RankFeaturePhaseRankShardContext defaultRankFeaturePhaseRankShardContext(String field) { + return new RankFeaturePhaseRankShardContext(field) { + @Override + public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) { + RankFeatureDoc[] rankFeatureDocs = new RankFeatureDoc[hits.getHits().length]; + for (int i = 0; i < hits.getHits().length; i++) { + SearchHit hit = hits.getHits()[i]; + rankFeatureDocs[i] = new RankFeatureDoc(hit.docId(), hit.getScore(), shardId); + rankFeatureDocs[i].score += 100f; + rankFeatureDocs[i].featureData("ranked_" + hit.docId()); + rankFeatureDocs[i].rank = i + 1; + } + return new RankFeatureShardResult(rankFeatureDocs); + } + }; + } + + private QueryPhaseRankCoordinatorContext defaultQueryPhaseRankCoordinatorContext(int rankWindowSize) { + return new QueryPhaseRankCoordinatorContext(rankWindowSize) { + @Override + public ScoreDoc[] rankQueryPhaseResults( + List querySearchResults, + SearchPhaseController.TopDocsStats topDocStats + ) { + List rankDocs = new ArrayList<>(); + for (int i = 0; i < querySearchResults.size(); i++) { + QuerySearchResult querySearchResult = querySearchResults.get(i); + RankFeatureShardResult shardResult = (RankFeatureShardResult) querySearchResult.getRankShardResult(); + for (RankFeatureDoc frd : shardResult.rankFeatureDocs) { + frd.shardIndex = i; + rankDocs.add(frd); + } + } + rankDocs.sort(Comparator.comparing((RankFeatureDoc doc) -> doc.score).reversed()); + RankFeatureDoc[] topResults = rankDocs.stream().limit(rankWindowSize).toArray(RankFeatureDoc[]::new); + topDocStats.fetchHits = topResults.length; + return topResults; + } + }; + } + + private QueryPhaseRankShardContext defaultQueryPhaseRankShardContext(List queries, int rankWindowSize) { + return new QueryPhaseRankShardContext(queries, rankWindowSize) { + @Override + public RankShardResult combineQueryPhaseResults(List rankResults) { + throw new UnsupportedOperationException( + "shard-level QueryPhase context should not be accessed as part of the RankFeature phase" + ); + } + }; + } + + private SearchPhaseController searchPhaseController() { + return new SearchPhaseController((task, request) -> InternalAggregationTestCase.emptyReduceContextBuilder()); + } + + private RankBuilder rankBuilder( + int rankWindowSize, + QueryPhaseRankShardContext queryPhaseRankShardContext, + QueryPhaseRankCoordinatorContext queryPhaseRankCoordinatorContext, + RankFeaturePhaseRankShardContext rankFeaturePhaseRankShardContext, + RankFeaturePhaseRankCoordinatorContext rankFeaturePhaseRankCoordinatorContext + ) { + return new RankBuilder(rankWindowSize) { + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + // no-op + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + // no-op + } + + @Override + public boolean isCompoundBuilder() { + return true; + } + + @Override + public Explanation explainHit(Explanation baseExplanation, RankDoc scoreDoc, List queryNames) { + // no-op + return baseExplanation; + } + + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + return queryPhaseRankShardContext; + } + + @Override + public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from) { + return queryPhaseRankCoordinatorContext; + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + return rankFeaturePhaseRankShardContext; + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from) { + return rankFeaturePhaseRankCoordinatorContext; + } + + @Override + protected boolean doEquals(RankBuilder other) { + return other != null && other.rankWindowSize() == rankWindowSize; + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + public String getWriteableName() { + return "test-rank-builder"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.V_8_12_0; + } + }; + } + + private SearchSourceBuilder searchSourceWithRankBuilder(RankBuilder rankBuilder) { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.rankBuilder(rankBuilder); + return searchSourceBuilder; + } + + private SearchPhaseResults searchPhaseResults( + SearchPhaseController controller, + MockSearchPhaseContext mockSearchPhaseContext + ) { + return controller.newSearchPhaseResults( + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + mockSearchPhaseContext.getRequest(), + mockSearchPhaseContext.numShards, + exc -> {} + ); + } + + private void buildRankFeatureResult( + RankBuilder shardRankBuilder, + RankFeatureResult rankFeatureResult, + SearchShardTarget shardTarget, + int totalHits, + ScoreDoc[] scoreDocs + ) { + rankFeatureResult.setSearchShardTarget(shardTarget); + // these are the SearchHits generated by the FetchFieldPhase processor + SearchHit[] searchHits = new SearchHit[scoreDocs.length]; + float maxScore = Float.MIN_VALUE; + for (int i = 0; i < searchHits.length; i++) { + searchHits[i] = SearchHit.unpooled(scoreDocs[i].doc); + searchHits[i].shard(shardTarget); + searchHits[i].score(scoreDocs[i].score); + searchHits[i].setDocumentField(DEFAULT_FIELD, new DocumentField(DEFAULT_FIELD, Collections.singletonList(scoreDocs[i].doc))); + if (scoreDocs[i].score > maxScore) { + maxScore = scoreDocs[i].score; + } + } + SearchHits hits = null; + try { + hits = SearchHits.unpooled(searchHits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore); + // construct the appropriate RankFeatureDoc objects based on the rank builder + RankFeaturePhaseRankShardContext rankFeaturePhaseRankShardContext = shardRankBuilder.buildRankFeaturePhaseShardContext(); + RankFeatureShardResult rankShardResult = (RankFeatureShardResult) rankFeaturePhaseRankShardContext.buildRankFeatureShardResult( + hits, + shardTarget.getShardId().id() + ); + rankFeatureResult.shardResult(rankShardResult); + } finally { + if (hits != null) { + hits.decRef(); + } + } + } + + private void populateQuerySearchResult(QuerySearchResult queryResult, int totalHits, ScoreDoc[] scoreDocs) { + // this would have been populated during the QueryPhase by the appropriate QueryPhaseShardContext + float maxScore = Float.MIN_VALUE; + RankFeatureDoc[] rankFeatureDocs = new RankFeatureDoc[scoreDocs.length]; + for (int i = 0; i < scoreDocs.length; i++) { + if (scoreDocs[i].score > maxScore) { + maxScore = scoreDocs[i].score; + } + rankFeatureDocs[i] = new RankFeatureDoc(scoreDocs[i].doc, scoreDocs[i].score, scoreDocs[i].shardIndex); + } + queryResult.setRankShardResult(new RankFeatureShardResult(rankFeatureDocs)); + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopDocs(new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), scoreDocs), + maxScore + + ), + new DocValueFormat[0] + ); + queryResult.size(totalHits); + } + + private RankFeaturePhase rankFeaturePhase( + SearchPhaseResults results, + MockSearchPhaseContext mockSearchPhaseContext, + ScoreDoc[][] finalResults, + AtomicBoolean phaseDone + ) { + // override the RankFeaturePhase to skip moving to next phase + return new RankFeaturePhase(results, null, mockSearchPhaseContext) { + @Override + public void moveToNextPhase( + SearchPhaseResults phaseResults, + SearchPhaseController.ReducedQueryPhase reducedQueryPhase + ) { + // this is called after the RankFeaturePhaseCoordinatorContext has been executed + phaseDone.set(true); + finalResults[0] = reducedQueryPhase.sortedTopDocs().scoreDocs(); + logger.debug("Skipping moving to next phase"); + } + }; + } + + private void assertRankFeatureResults(RankFeatureShardResult rankFeatureShardResult, List expectedResults) { + assertEquals(expectedResults.size(), rankFeatureShardResult.rankFeatureDocs.length); + for (int i = 0; i < expectedResults.size(); i++) { + ExpectedRankFeatureDoc expected = expectedResults.get(i); + RankFeatureDoc actual = rankFeatureShardResult.rankFeatureDocs[i]; + assertEquals(expected.doc, actual.doc); + assertEquals(expected.rank, actual.rank); + assertEquals(expected.score, actual.score, 10E-5); + assertEquals(expected.featureData, actual.featureData); + } + } + + private void assertFinalResults(ScoreDoc[] finalResults, List expectedResults) { + assertEquals(expectedResults.size(), finalResults.length); + for (int i = 0; i < expectedResults.size(); i++) { + ExpectedRankFeatureDoc expected = expectedResults.get(i); + RankFeatureDoc actual = (RankFeatureDoc) finalResults[i]; + assertEquals(expected.doc, actual.doc); + assertEquals(expected.rank, actual.rank); + assertEquals(expected.score, actual.score, 10E-5); + } + } + + private void assertShardResults(SearchPhaseResult shardResult, List expectedShardResults) { + assertTrue(shardResult instanceof RankFeatureResult); + RankFeatureResult rankResult = (RankFeatureResult) shardResult; + assertNotNull(rankResult.rankFeatureResult()); + assertNull(rankResult.queryResult()); + assertNotNull(rankResult.rankFeatureResult().shardResult()); + RankFeatureShardResult rankFeatureShardResult = rankResult.rankFeatureResult().shardResult(); + assertRankFeatureResults(rankFeatureShardResult, expectedShardResults); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index d8c7d3e134571..475f44238f36e 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -517,30 +517,6 @@ public void testValidate() throws IOException { assertEquals(1, validationErrors.validationErrors().size()); assertEquals("[rank] cannot be used with [point in time]", validationErrors.validationErrors().get(0)); } - { - SearchRequest searchRequest = new SearchRequest().source( - new SearchSourceBuilder().rankBuilder(new TestRankBuilder(100)) - .query(QueryBuilders.termQuery("field", "term")) - .knnSearch(List.of(new KnnSearchBuilder("vector", new float[] { 0f }, 10, 100, null))) - .profile(true) - ); - ActionRequestValidationException validationErrors = searchRequest.validate(); - assertNotNull(validationErrors); - assertEquals(1, validationErrors.validationErrors().size()); - assertEquals("[rank] requires [profile] is [false]", validationErrors.validationErrors().get(0)); - } - { - SearchRequest searchRequest = new SearchRequest().source( - new SearchSourceBuilder().rankBuilder(new TestRankBuilder(100)) - .query(QueryBuilders.termQuery("field", "term")) - .knnSearch(List.of(new KnnSearchBuilder("vector", new float[] { 0f }, 10, 100, null))) - .explain(true) - ); - ActionRequestValidationException validationErrors = searchRequest.validate(); - assertNotNull(validationErrors); - assertEquals(1, validationErrors.validationErrors().size()); - assertEquals("[rank] requires [explain] is [false]", validationErrors.validationErrors().get(0)); - } { SearchRequest searchRequest = new SearchRequest("test").source( new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(BytesArray.EMPTY)) diff --git a/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index 05e345bf4b52b..96ff00488a1d2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -559,10 +559,10 @@ private boolean primaryInactiveDueToRecovery(final String indexName, final Clust && primaryShard.recoverySource().getType() == RecoverySource.Type.EXISTING_STORE) { return false; } - if (primaryShard.unassignedInfo().getNumFailedAllocations() > 0) { + if (primaryShard.unassignedInfo().failedAllocations() > 0) { return false; } - if (primaryShard.unassignedInfo().getLastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO) { + if (primaryShard.unassignedInfo().lastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO) { return false; } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 1c1f6b314fa70..0277855db9c4c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -731,6 +731,15 @@ public void testReplaceBackingIndexThrowsExceptionIfIndexNotPartOfDataStream() { expectThrows(IllegalArgumentException.class, () -> original.replaceBackingIndex(standaloneIndex, newBackingIndex)); } + public void testReplaceBackingIndexThrowsExceptionIfIndexPartOfFailureStore() { + DataStream original = createRandomDataStream(); + int indexToReplace = randomIntBetween(1, original.getFailureIndices().getIndices().size() - 1) - 1; + + Index failureIndex = original.getFailureIndices().getIndices().get(indexToReplace); + Index newBackingIndex = new Index("replacement-index", UUIDs.randomBase64UUID(random())); + expectThrows(IllegalArgumentException.class, () -> original.replaceBackingIndex(failureIndex, newBackingIndex)); + } + public void testReplaceBackingIndexThrowsExceptionIfReplacingWriteIndex() { int numBackingIndices = randomIntBetween(2, 32); int writeIndexPosition = numBackingIndices - 1; @@ -761,6 +770,78 @@ public void testReplaceBackingIndexThrowsExceptionIfReplacingWriteIndex() { ); } + public void testReplaceFailureIndex() { + DataStream original = createRandomDataStream(); + int indexToReplace = randomIntBetween(1, original.getFailureIndices().getIndices().size() - 1) - 1; + + Index newFailureIndex = new Index("replacement-index", UUIDs.randomBase64UUID(random())); + DataStream updated = original.replaceFailureStoreIndex( + original.getFailureIndices().getIndices().get(indexToReplace), + newFailureIndex + ); + assertThat(updated.getName(), equalTo(original.getName())); + assertThat(updated.getGeneration(), equalTo(original.getGeneration() + 1)); + assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size())); + assertThat(updated.getFailureIndices().getIndices().get(indexToReplace), equalTo(newFailureIndex)); + + for (int i = 0; i < original.getFailureIndices().getIndices().size(); i++) { + if (i != indexToReplace) { + assertThat(updated.getFailureIndices().getIndices().get(i), equalTo(original.getFailureIndices().getIndices().get(i))); + } + } + } + + public void testReplaceFailureIndexThrowsExceptionIfIndexNotPartOfDataStream() { + DataStream original = createRandomDataStream(); + + Index standaloneIndex = new Index("index-foo", UUIDs.randomBase64UUID(random())); + Index newFailureIndex = new Index("replacement-index", UUIDs.randomBase64UUID(random())); + expectThrows(IllegalArgumentException.class, () -> original.replaceFailureStoreIndex(standaloneIndex, newFailureIndex)); + } + + public void testReplaceFailureIndexThrowsExceptionIfIndexPartOfBackingIndices() { + DataStream original = createRandomDataStream(); + int indexToReplace = randomIntBetween(1, original.getIndices().size() - 1) - 1; + + Index backingIndex = original.getIndices().get(indexToReplace); + Index newFailureIndex = new Index("replacement-index", UUIDs.randomBase64UUID(random())); + expectThrows(IllegalArgumentException.class, () -> original.replaceFailureStoreIndex(backingIndex, newFailureIndex)); + } + + public void testReplaceFailureIndexThrowsExceptionIfReplacingWriteIndex() { + int numFailureIndices = randomIntBetween(2, 32); + int writeIndexPosition = numFailureIndices - 1; + String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + long ts = System.currentTimeMillis(); + + List indices = new ArrayList<>(1); + indices.add(new Index(DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts), UUIDs.randomBase64UUID(random()))); + + List failureIndices = new ArrayList<>(numFailureIndices); + for (int i = 1; i <= numFailureIndices; i++) { + failureIndices.add(new Index(DataStream.getDefaultFailureStoreName(dataStreamName, i, ts), UUIDs.randomBase64UUID(random()))); + } + int generation = randomBoolean() ? numFailureIndices : numFailureIndices + randomIntBetween(1, 5); + DataStream original = newInstance(dataStreamName, indices, generation, null, false, null, failureIndices); + + Index newBackingIndex = new Index("replacement-index", UUIDs.randomBase64UUID(random())); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> original.replaceFailureStoreIndex(failureIndices.get(writeIndexPosition), newBackingIndex) + ); + assertThat( + e.getMessage(), + equalTo( + String.format( + Locale.ROOT, + "cannot replace failure index [%s] of data stream [%s] because it is the failure store write index", + failureIndices.get(writeIndexPosition).getName(), + dataStreamName + ) + ) + ); + } + public void testSnapshot() { var preSnapshotDataStream = DataStreamTestHelper.randomInstance(); var indicesToRemove = randomSubsetOf(preSnapshotDataStream.getIndices()); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java index b7bd54eef2c70..344acb7a8ff40 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java @@ -274,6 +274,76 @@ public void testDeleteCurrentWriteIndexForDataStream() { ); } + public void testDeleteMultipleFailureIndexForDataStream() { + int numBackingIndices = randomIntBetween(3, 5); + int numBackingIndicesToDelete = randomIntBetween(2, numBackingIndices - 1); + String dataStreamName = randomAlphaOfLength(6).toLowerCase(Locale.ROOT); + long ts = System.currentTimeMillis(); + ClusterState before = DataStreamTestHelper.getClusterStateWithDataStreams( + List.of(new Tuple<>(dataStreamName, numBackingIndices)), + List.of(), + ts, + Settings.EMPTY, + 1, + false, + true + ); + + List indexNumbersToDelete = randomSubsetOf( + numBackingIndicesToDelete, + IntStream.rangeClosed(1, numBackingIndices - 1).boxed().toList() + ); + + Set indicesToDelete = new HashSet<>(); + for (int k : indexNumbersToDelete) { + indicesToDelete.add(before.metadata().index(DataStream.getDefaultFailureStoreName(dataStreamName, k, ts)).getIndex()); + } + ClusterState after = MetadataDeleteIndexService.deleteIndices(before, indicesToDelete, Settings.EMPTY); + + DataStream dataStream = after.metadata().dataStreams().get(dataStreamName); + assertThat(dataStream, notNullValue()); + assertThat(dataStream.getFailureIndices().getIndices().size(), equalTo(numBackingIndices - indexNumbersToDelete.size())); + for (Index i : indicesToDelete) { + assertThat(after.metadata().getIndices().get(i.getName()), nullValue()); + assertFalse(dataStream.getFailureIndices().getIndices().contains(i)); + } + assertThat(after.metadata().getIndices().size(), equalTo((2 * numBackingIndices) - indexNumbersToDelete.size())); + } + + public void testDeleteCurrentWriteFailureIndexForDataStream() { + int numBackingIndices = randomIntBetween(1, 5); + String dataStreamName = randomAlphaOfLength(6).toLowerCase(Locale.ROOT); + long ts = System.currentTimeMillis(); + ClusterState before = DataStreamTestHelper.getClusterStateWithDataStreams( + List.of(new Tuple<>(dataStreamName, numBackingIndices)), + List.of(), + ts, + Settings.EMPTY, + 1, + false, + true + ); + + Index indexToDelete = before.metadata() + .index(DataStream.getDefaultFailureStoreName(dataStreamName, numBackingIndices, ts)) + .getIndex(); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> MetadataDeleteIndexService.deleteIndices(before, Set.of(indexToDelete), Settings.EMPTY) + ); + + assertThat( + e.getMessage(), + containsString( + "index [" + + indexToDelete.getName() + + "] is the failure store write index for data stream [" + + dataStreamName + + "] and cannot be deleted" + ) + ); + } + private ClusterState clusterState(String index) { IndexMetadata indexMetadata = IndexMetadata.builder(index) .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion(random()))) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceBatchingTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceBatchingTests.java index 1cb5650d26930..a093178c04814 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceBatchingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceBatchingTests.java @@ -20,6 +20,9 @@ import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; @@ -98,8 +101,11 @@ public void testBatchCloseIndices() throws Exception { createIndex("test-3", indicesAdmin().prepareCreate("test-3")); ensureGreen("test-1", "test-2", "test-3"); - final var assertingListener = closedIndexCountListener(3); - clusterService.addListener(assertingListener); + final List observedClosedIndices = Collections.synchronizedList(new ArrayList<>()); + final ClusterStateListener closedIndicesStateListener = event -> observedClosedIndices.add( + event.state().metadata().getConcreteAllClosedIndices() + ); + clusterService.addListener(closedIndicesStateListener); final var block1 = blockMasterService(masterService); block1.run(); // wait for block @@ -123,12 +129,12 @@ public void testBatchCloseIndices() throws Exception { block2.run(); // release block // assert that the requests were acknowledged - final var resp1 = future1.get(); + final var resp1 = safeGet(future1); assertAcked(resp1); assertThat(resp1.getIndices(), hasSize(1)); assertThat(resp1.getIndices().get(0).getIndex().getName(), is("test-1")); - final var resp2 = future2.get(); + final var resp2 = safeGet(future2); assertAcked(resp2); assertThat(resp2.getIndices(), hasSize(2)); assertThat(resp2.getIndices().stream().map(r -> r.getIndex().getName()).toList(), containsInAnyOrder("test-2", "test-3")); @@ -139,7 +145,10 @@ public void testBatchCloseIndices() throws Exception { assertThat(indexMetadata.getState(), is(State.CLOSE)); } - clusterService.removeListener(assertingListener); + clusterService.removeListener(closedIndicesStateListener); + observedClosedIndices.forEach( + indices -> assertThat("unexpected closed indices: " + Arrays.toString(indices), indices.length, oneOf(0, 3)) + ); } public void testBatchBlockIndices() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java index 6e24735eba454..e034971482bcf 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceTests.java @@ -457,7 +457,7 @@ private static void assertIsClosed(final String indexName, final ClusterState cl assertThat( RoutingNodesHelper.asStream(shardRoutingTable) .map(ShardRouting::unassignedInfo) - .map(UnassignedInfo::getReason) + .map(UnassignedInfo::reason) .allMatch(info -> info == UnassignedInfo.Reason.INDEX_CLOSED), is(true) ); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationServiceTests.java index aacf9f803dde0..171fd397d65f3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationServiceTests.java @@ -109,7 +109,7 @@ public void testNoDelayedUnassigned() { assertThat(unassignedShards.size(), equalTo(0)); } else { assertThat(unassignedShards.size(), equalTo(1)); - assertThat(unassignedShards.get(0).unassignedInfo().isDelayed(), equalTo(false)); + assertThat(unassignedShards.get(0).unassignedInfo().delayed(), equalTo(false)); } delayedAllocationService.clusterChanged(new ClusterChangedEvent("test", newState, prevState)); @@ -169,7 +169,7 @@ public void testDelayedUnassignedScheduleReroute() throws Exception { // make sure the replica is marked as delayed (i.e. not reallocated) assertEquals(1, UnassignedInfo.getNumberOfDelayedUnassigned(stateWithDelayedShard)); ShardRouting delayedShard = stateWithDelayedShard.getRoutingNodes().unassigned().iterator().next(); - assertEquals(baseTimestampNanos, delayedShard.unassignedInfo().getUnassignedTimeInNanos()); + assertEquals(baseTimestampNanos, delayedShard.unassignedInfo().unassignedTimeNanos()); // mock ClusterService.submitStateUpdateTask() method CountDownLatch latch = new CountDownLatch(1); @@ -318,8 +318,8 @@ public void testDelayedUnassignedScheduleRerouteAfterDelayedReroute() throws Exc final ClusterState stateWithDelayedShards = clusterState; assertEquals(2, UnassignedInfo.getNumberOfDelayedUnassigned(stateWithDelayedShards)); RoutingNodes.UnassignedShards.UnassignedIterator iter = stateWithDelayedShards.getRoutingNodes().unassigned().iterator(); - assertEquals(baseTimestampNanos, iter.next().unassignedInfo().getUnassignedTimeInNanos()); - assertEquals(baseTimestampNanos, iter.next().unassignedInfo().getUnassignedTimeInNanos()); + assertEquals(baseTimestampNanos, iter.next().unassignedInfo().unassignedTimeNanos()); + assertEquals(baseTimestampNanos, iter.next().unassignedInfo().unassignedTimeNanos()); // mock ClusterService.submitStateUpdateTask() method CountDownLatch latch1 = new CountDownLatch(1); @@ -491,7 +491,7 @@ public void testDelayedUnassignedScheduleRerouteRescheduledOnShorterDelay() { // make sure the replica is marked as delayed (i.e. not reallocated) assertEquals(1, UnassignedInfo.getNumberOfDelayedUnassigned(stateWithDelayedShard)); ShardRouting delayedShard = stateWithDelayedShard.getRoutingNodes().unassigned().iterator().next(); - assertEquals(nodeLeftTimestampNanos, delayedShard.unassignedInfo().getUnassignedTimeInNanos()); + assertEquals(nodeLeftTimestampNanos, delayedShard.unassignedInfo().unassignedTimeNanos()); assertNull(delayedAllocationService.delayedRerouteTask.get()); long delayUntilClusterChangeEvent = TimeValue.timeValueNanos(randomInt((int) shorterDelaySetting.nanos() - 1)).nanos(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java index e6466b9237d3a..33695883aebc3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java @@ -401,7 +401,7 @@ public void testEqualsIgnoringVersion() { .withUnassignedInfo( otherRouting.unassignedInfo() == null ? new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "test") - : new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, otherRouting.unassignedInfo().getMessage() + "_1") + : new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, otherRouting.unassignedInfo().message() + "_1") ) .build(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 1d0b01a10da78..eb39d56346eb2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -137,14 +137,14 @@ public void testSerialization() throws Exception { meta.writeTo(out); out.close(); - UnassignedInfo read = new UnassignedInfo(out.bytes().streamInput()); - assertThat(read.getReason(), equalTo(meta.getReason())); - assertThat(read.getUnassignedTimeInMillis(), equalTo(meta.getUnassignedTimeInMillis())); - assertThat(read.getMessage(), equalTo(meta.getMessage())); - assertThat(read.getDetails(), equalTo(meta.getDetails())); - assertThat(read.getNumFailedAllocations(), equalTo(meta.getNumFailedAllocations())); - assertThat(read.getFailedNodeIds(), equalTo(meta.getFailedNodeIds())); - assertThat(read.getLastAllocatedNodeId(), equalTo(meta.getLastAllocatedNodeId())); + UnassignedInfo read = UnassignedInfo.fromStreamInput(out.bytes().streamInput()); + assertThat(read.reason(), equalTo(meta.reason())); + assertThat(read.unassignedTimeMillis(), equalTo(meta.unassignedTimeMillis())); + assertThat(read.message(), equalTo(meta.message())); + assertThat(read.details(), equalTo(meta.details())); + assertThat(read.failedAllocations(), equalTo(meta.failedAllocations())); + assertThat(read.failedNodeIds(), equalTo(meta.failedNodeIds())); + assertThat(read.lastAllocatedNodeId(), equalTo(meta.lastAllocatedNodeId())); } public void testIndexCreated() { @@ -161,7 +161,7 @@ public void testIndexCreated() { .routingTable(RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY).addAsNew(metadata.index("test")).build()) .build(); for (ShardRouting shard : shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED)) { - assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.INDEX_CREATED)); + assertThat(shard.unassignedInfo().reason(), equalTo(UnassignedInfo.Reason.INDEX_CREATED)); } } @@ -181,7 +181,7 @@ public void testClusterRecovered() { ) .build(); for (ShardRouting shard : shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED)) { - assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.CLUSTER_RECOVERED)); + assertThat(shard.unassignedInfo().reason(), equalTo(UnassignedInfo.Reason.CLUSTER_RECOVERED)); } } @@ -296,8 +296,8 @@ private void assertLastAllocatedNodeIdsAssigned( for (int shardCopy = 0; shardCopy < shardRoutingTable.size(); shardCopy++) { final var shard = shardRoutingTable.shard(shardCopy); assertTrue(shard.unassigned()); - assertThat(shard.unassignedInfo().getReason(), equalTo(expectedUnassignedReason)); - final var lastAllocatedNodeId = shard.unassignedInfo().getLastAllocatedNodeId(); + assertThat(shard.unassignedInfo().reason(), equalTo(expectedUnassignedReason)); + final var lastAllocatedNodeId = shard.unassignedInfo().lastAllocatedNodeId(); if (lastAllocatedNodeId == null) { // restoring an index may change the number of shards/replicas so no guarantee that lastAllocatedNodeId is populated assertTrue(shardCountChanged); @@ -309,7 +309,7 @@ private void assertLastAllocatedNodeIdsAssigned( if (shardCountChanged == false) { assertNotNull(previousShardRoutingTable); assertThat( - shardRoutingTable.primaryShard().unassignedInfo().getLastAllocatedNodeId(), + shardRoutingTable.primaryShard().unassignedInfo().lastAllocatedNodeId(), equalTo(previousShardRoutingTable.primaryShard().currentNodeId()) ); } @@ -335,7 +335,7 @@ public void testIndexReopened() { ) .build(); for (ShardRouting shard : shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED)) { - assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.INDEX_REOPENED)); + assertThat(shard.unassignedInfo().reason(), equalTo(UnassignedInfo.Reason.INDEX_REOPENED)); } } @@ -366,7 +366,7 @@ public void testNewIndexRestored() { ) .build(); for (ShardRouting shard : shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED)) { - assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NEW_INDEX_RESTORED)); + assertThat(shard.unassignedInfo().reason(), equalTo(UnassignedInfo.Reason.NEW_INDEX_RESTORED)); } } @@ -471,7 +471,7 @@ public void testDanglingIndexImported() { ) .build(); for (ShardRouting shard : shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED)) { - assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.DANGLING_INDEX_IMPORTED)); + assertThat(shard.unassignedInfo().reason(), equalTo(UnassignedInfo.Reason.DANGLING_INDEX_IMPORTED)); } } @@ -501,7 +501,7 @@ public void testReplicaAdded() { assertThat(shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).size(), equalTo(1)); assertThat(shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo(), notNullValue()); assertThat( - shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().getReason(), + shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().reason(), equalTo(UnassignedInfo.Reason.REPLICA_ADDED) ); } @@ -551,11 +551,11 @@ public void testNodeLeave() { assertThat(shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).size(), equalTo(1)); assertThat(shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo(), notNullValue()); assertThat( - shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().getReason(), + shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().reason(), equalTo(UnassignedInfo.Reason.NODE_LEFT) ); assertThat( - shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().getUnassignedTimeInMillis(), + shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().unassignedTimeMillis(), greaterThan(0L) ); } @@ -593,19 +593,19 @@ public void testFailedShard() { assertThat(shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).size(), equalTo(1)); assertThat(shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo(), notNullValue()); assertThat( - shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().getReason(), + shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().reason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED) ); assertThat( - shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().getMessage(), + shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().message(), equalTo("failed shard on node [" + shardToFail.currentNodeId() + "]: test fail") ); assertThat( - shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().getDetails(), + shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().details(), equalTo("failed shard on node [" + shardToFail.currentNodeId() + "]: test fail") ); assertThat( - shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().getUnassignedTimeInMillis(), + shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).get(0).unassignedInfo().unassignedTimeMillis(), greaterThan(0L) ); } @@ -768,14 +768,14 @@ private void checkRemainingDelayCalculation( final Settings indexSettings = Settings.builder() .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), indexLevelTimeoutSetting) .build(); - long delay = unassignedInfo.getRemainingDelay(baseTime, indexSettings, nodeShutdowns); + long delay = unassignedInfo.remainingDelay(baseTime, indexSettings, nodeShutdowns); assertThat(delay, equalTo(totalDelayNanos)); long delta1 = randomLongBetween(1, (totalDelayNanos - 1)); - delay = unassignedInfo.getRemainingDelay(baseTime + delta1, indexSettings, nodeShutdowns); + delay = unassignedInfo.remainingDelay(baseTime + delta1, indexSettings, nodeShutdowns); assertThat(delay, equalTo(totalDelayNanos - delta1)); - delay = unassignedInfo.getRemainingDelay(baseTime + totalDelayNanos, indexSettings, nodeShutdowns); + delay = unassignedInfo.remainingDelay(baseTime + totalDelayNanos, indexSettings, nodeShutdowns); assertThat(delay, equalTo(0L)); - delay = unassignedInfo.getRemainingDelay(baseTime + totalDelayNanos + randomIntBetween(1, 20), indexSettings, nodeShutdowns); + delay = unassignedInfo.remainingDelay(baseTime + totalDelayNanos + randomIntBetween(1, 20), indexSettings, nodeShutdowns); assertThat(delay, equalTo(0L)); } @@ -918,25 +918,25 @@ public void testSummaryContainsImportantFields() { var info = randomUnassignedInfo(randomBoolean() ? randomIdentifier() : null); var summary = info.shortSummary(); - assertThat("reason", summary, containsString("[reason=" + info.getReason() + ']')); + assertThat("reason", summary, containsString("[reason=" + info.reason() + ']')); assertThat( "delay", summary, - containsString("at[" + UnassignedInfo.DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(info.getUnassignedTimeInMillis())) + ']') + containsString("at[" + UnassignedInfo.DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(info.unassignedTimeMillis())) + ']') ); - if (info.getNumFailedAllocations() > 0) { - assertThat("failed_allocations", summary, containsString("failed_attempts[" + info.getNumFailedAllocations() + ']')); + if (info.failedAllocations() > 0) { + assertThat("failed_allocations", summary, containsString("failed_attempts[" + info.failedAllocations() + ']')); } - if (info.getFailedNodeIds().isEmpty() == false) { - assertThat("failed_nodes", summary, containsString("failed_nodes[" + info.getFailedNodeIds() + ']')); + if (info.failedNodeIds().isEmpty() == false) { + assertThat("failed_nodes", summary, containsString("failed_nodes[" + info.failedNodeIds() + ']')); } - assertThat("delayed", summary, containsString("delayed=" + info.isDelayed())); - if (info.getLastAllocatedNodeId() != null) { - assertThat("last_node", summary, containsString("last_node[" + info.getLastAllocatedNodeId() + ']')); + assertThat("delayed", summary, containsString("delayed=" + info.delayed())); + if (info.lastAllocatedNodeId() != null) { + assertThat("last_node", summary, containsString("last_node[" + info.lastAllocatedNodeId() + ']')); } - if (info.getMessage() != null) { - assertThat("details", summary, containsString("details[" + info.getMessage() + ']')); + if (info.message() != null) { + assertThat("details", summary, containsString("details[" + info.message() + ']')); } - assertThat("allocation_status", summary, containsString("allocation_status[" + info.getLastAllocationStatus().value() + ']')); + assertThat("allocation_status", summary, containsString("allocation_status[" + info.lastAllocationStatus().value() + ']')); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java index d28c1875100bb..e863aca526da7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java @@ -91,8 +91,8 @@ public void testSingleRetryOnIgnore() { routingTable = newState.routingTable(); assertEquals(routingTable.index("idx").size(), 1); assertEquals(routingTable.index("idx").shard(0).shard(0).state(), INITIALIZING); - assertEquals(routingTable.index("idx").shard(0).shard(0).unassignedInfo().getNumFailedAllocations(), i + 1); - assertThat(routingTable.index("idx").shard(0).shard(0).unassignedInfo().getMessage(), containsString("boom" + i)); + assertEquals(routingTable.index("idx").shard(0).shard(0).unassignedInfo().failedAllocations(), i + 1); + assertThat(routingTable.index("idx").shard(0).shard(0).unassignedInfo().message(), containsString("boom" + i)); } // now we go and check that we are actually stick to unassigned on the next failure ClusterState newState = applyShardFailure(clusterState, routingTable.index("idx").shard(0).shard(0), "boom"); @@ -100,9 +100,9 @@ public void testSingleRetryOnIgnore() { clusterState = newState; routingTable = newState.routingTable(); assertEquals(routingTable.index("idx").size(), 1); - assertEquals(routingTable.index("idx").shard(0).shard(0).unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(routingTable.index("idx").shard(0).shard(0).unassignedInfo().failedAllocations(), retries); assertEquals(routingTable.index("idx").shard(0).shard(0).state(), UNASSIGNED); - assertThat(routingTable.index("idx").shard(0).shard(0).unassignedInfo().getMessage(), containsString("boom")); + assertThat(routingTable.index("idx").shard(0).shard(0).unassignedInfo().message(), containsString("boom")); // manual resetting of retry count newState = strategy.reroute(clusterState, new AllocationCommands(), false, true, false, ActionListener.noop()).clusterState(); @@ -112,9 +112,9 @@ public void testSingleRetryOnIgnore() { clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertEquals(routingTable.index("idx").size(), 1); - assertEquals(0, routingTable.index("idx").shard(0).shard(0).unassignedInfo().getNumFailedAllocations()); + assertEquals(0, routingTable.index("idx").shard(0).shard(0).unassignedInfo().failedAllocations()); assertEquals(INITIALIZING, routingTable.index("idx").shard(0).shard(0).state()); - assertThat(routingTable.index("idx").shard(0).shard(0).unassignedInfo().getMessage(), containsString("boom")); + assertThat(routingTable.index("idx").shard(0).shard(0).unassignedInfo().message(), containsString("boom")); // again fail it N-1 times for (int i = 0; i < retries - 1; i++) { @@ -123,9 +123,9 @@ public void testSingleRetryOnIgnore() { clusterState = newState; routingTable = newState.routingTable(); assertEquals(routingTable.index("idx").size(), 1); - assertEquals(i + 1, routingTable.index("idx").shard(0).shard(0).unassignedInfo().getNumFailedAllocations()); + assertEquals(i + 1, routingTable.index("idx").shard(0).shard(0).unassignedInfo().failedAllocations()); assertEquals(INITIALIZING, routingTable.index("idx").shard(0).shard(0).state()); - assertThat(routingTable.index("idx").shard(0).shard(0).unassignedInfo().getMessage(), containsString("boom")); + assertThat(routingTable.index("idx").shard(0).shard(0).unassignedInfo().message(), containsString("boom")); } // now we go and check that we are actually stick to unassigned on the next failure @@ -134,9 +134,9 @@ public void testSingleRetryOnIgnore() { clusterState = newState; routingTable = newState.routingTable(); assertEquals(routingTable.index("idx").size(), 1); - assertEquals(retries, routingTable.index("idx").shard(0).shard(0).unassignedInfo().getNumFailedAllocations()); + assertEquals(retries, routingTable.index("idx").shard(0).shard(0).unassignedInfo().failedAllocations()); assertEquals(UNASSIGNED, routingTable.index("idx").shard(0).shard(0).state()); - assertThat(routingTable.index("idx").shard(0).shard(0).unassignedInfo().getMessage(), containsString("boom")); + assertThat(routingTable.index("idx").shard(0).shard(0).unassignedInfo().message(), containsString("boom")); } public void testFailedAllocation() { @@ -152,8 +152,8 @@ public void testFailedAllocation() { assertEquals(routingTable.index("idx").size(), 1); ShardRouting unassignedPrimary = routingTable.index("idx").shard(0).shard(0); assertEquals(unassignedPrimary.state(), INITIALIZING); - assertEquals(unassignedPrimary.unassignedInfo().getNumFailedAllocations(), i + 1); - assertThat(unassignedPrimary.unassignedInfo().getMessage(), containsString("boom" + i)); + assertEquals(unassignedPrimary.unassignedInfo().failedAllocations(), i + 1); + assertThat(unassignedPrimary.unassignedInfo().message(), containsString("boom" + i)); // MaxRetryAllocationDecider#canForceAllocatePrimary should return YES decisions because canAllocate returns YES here assertEquals( Decision.Type.YES, @@ -168,9 +168,9 @@ public void testFailedAllocation() { routingTable = newState.routingTable(); assertEquals(routingTable.index("idx").size(), 1); ShardRouting unassignedPrimary = routingTable.index("idx").shard(0).shard(0); - assertEquals(unassignedPrimary.unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(unassignedPrimary.unassignedInfo().failedAllocations(), retries); assertEquals(unassignedPrimary.state(), UNASSIGNED); - assertThat(unassignedPrimary.unassignedInfo().getMessage(), containsString("boom")); + assertThat(unassignedPrimary.unassignedInfo().message(), containsString("boom")); // MaxRetryAllocationDecider#canForceAllocatePrimary should return a NO decision because canAllocate returns NO here final var allocation = newRoutingAllocation(clusterState); allocation.debugDecision(true); @@ -211,9 +211,9 @@ public void testFailedAllocation() { // good we are initializing and we are maintaining failure information assertEquals(routingTable.index("idx").size(), 1); ShardRouting unassignedPrimary = routingTable.index("idx").shard(0).shard(0); - assertEquals(unassignedPrimary.unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(unassignedPrimary.unassignedInfo().failedAllocations(), retries); assertEquals(unassignedPrimary.state(), INITIALIZING); - assertThat(unassignedPrimary.unassignedInfo().getMessage(), containsString("boom")); + assertThat(unassignedPrimary.unassignedInfo().message(), containsString("boom")); // bumped up the max retry count, so canForceAllocatePrimary should return a YES decision assertEquals( Decision.Type.YES, @@ -236,9 +236,9 @@ public void testFailedAllocation() { routingTable = newState.routingTable(); assertEquals(routingTable.index("idx").size(), 1); unassignedPrimary = routingTable.index("idx").shard(0).shard(0); - assertEquals(unassignedPrimary.unassignedInfo().getNumFailedAllocations(), 1); + assertEquals(unassignedPrimary.unassignedInfo().failedAllocations(), 1); assertEquals(unassignedPrimary.state(), UNASSIGNED); - assertThat(unassignedPrimary.unassignedInfo().getMessage(), containsString("ZOOOMG")); + assertThat(unassignedPrimary.unassignedInfo().message(), containsString("ZOOOMG")); // Counter reset, so MaxRetryAllocationDecider#canForceAllocatePrimary should return a YES decision assertEquals( Decision.Type.YES, diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TrackFailedAllocationNodesTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TrackFailedAllocationNodesTests.java index 438ec85c4b997..84eead8d51dc2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TrackFailedAllocationNodesTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TrackFailedAllocationNodesTests.java @@ -59,17 +59,14 @@ public void testTrackFailedNodes() { List.of(new FailedShard(clusterState.routingTable().index("idx").shard(0).shard(0), null, null, randomBoolean())), List.of() ); - assertThat( - clusterState.routingTable().index("idx").shard(0).shard(0).unassignedInfo().getFailedNodeIds(), - equalTo(failedNodeIds) - ); + assertThat(clusterState.routingTable().index("idx").shard(0).shard(0).unassignedInfo().failedNodeIds(), equalTo(failedNodeIds)); } // reroute with retryFailed=true should discard the failedNodes assertThat(clusterState.routingTable().index("idx").shard(0).shard(0).state(), equalTo(ShardRoutingState.UNASSIGNED)); clusterState = allocationService.reroute(clusterState, new AllocationCommands(), false, true, false, ActionListener.noop()) .clusterState(); - assertThat(clusterState.routingTable().index("idx").shard(0).shard(0).unassignedInfo().getFailedNodeIds(), empty()); + assertThat(clusterState.routingTable().index("idx").shard(0).shard(0).unassignedInfo().failedNodeIds(), empty()); // do not track the failed nodes while shard is started clusterState = startInitializingShardsAndReroute(allocationService, clusterState); @@ -79,6 +76,6 @@ public void testTrackFailedNodes() { List.of(new FailedShard(clusterState.routingTable().index("idx").shard(0).primaryShard(), null, null, false)), List.of() ); - assertThat(clusterState.routingTable().index("idx").shard(0).primaryShard().unassignedInfo().getFailedNodeIds(), empty()); + assertThat(clusterState.routingTable().index("idx").shard(0).primaryShard().unassignedInfo().failedNodeIds(), empty()); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java index 2fc84c3f32e79..6c3a4157bb4ba 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java @@ -167,7 +167,7 @@ public void testIgnoresOutOfScopePrimaries() { .replicaShards() .get(0) .unassignedInfo() - .getLastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO ? 1 : 2 + .lastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO ? 1 : 2 ), new ShardId(index, 1), new ShardAssignment(Set.of("node-0", "node-1"), 2, 0, 0) @@ -198,7 +198,7 @@ public void testIgnoresOutOfScopeReplicas() { Set.of("node-0"), 2, 1, - originalReplicaShard.unassignedInfo().getLastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO ? 0 : 1 + originalReplicaShard.unassignedInfo().lastAllocationStatus() == UnassignedInfo.AllocationStatus.DECIDERS_NO ? 0 : 1 ), new ShardId(index, 1), new ShardAssignment(Set.of("node-0", "node-1"), 2, 0, 0) @@ -1301,20 +1301,20 @@ private static ShardRouting mutateAllocationStatus(ShardRouting shardRouting) { var unassignedInfo = shardRouting.unassignedInfo(); return shardRouting.updateUnassigned( new UnassignedInfo( - unassignedInfo.getReason(), - unassignedInfo.getMessage(), - unassignedInfo.getFailure(), - unassignedInfo.getNumFailedAllocations(), - unassignedInfo.getUnassignedTimeInNanos(), - unassignedInfo.getUnassignedTimeInMillis(), - unassignedInfo.isDelayed(), + unassignedInfo.reason(), + unassignedInfo.message(), + unassignedInfo.failure(), + unassignedInfo.failedAllocations(), + unassignedInfo.unassignedTimeNanos(), + unassignedInfo.unassignedTimeMillis(), + unassignedInfo.delayed(), randomFrom( UnassignedInfo.AllocationStatus.DECIDERS_NO, UnassignedInfo.AllocationStatus.NO_ATTEMPT, UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED ), - unassignedInfo.getFailedNodeIds(), - unassignedInfo.getLastAllocatedNodeId() + unassignedInfo.failedNodeIds(), + unassignedInfo.lastAllocatedNodeId() ), shardRouting.recoverySource() ); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java index 4ce5e78e308b2..0de27aea5b08f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java @@ -132,19 +132,19 @@ public void testFailsNewPrimariesIfNoDataNodes() { final var shardRouting = unassigned.next(); if (shardRouting.primary() && shardRouting.shardId().id() == 1) { final var unassignedInfo = shardRouting.unassignedInfo(); - assertThat(unassignedInfo.getLastAllocationStatus(), equalTo(UnassignedInfo.AllocationStatus.NO_ATTEMPT)); + assertThat(unassignedInfo.lastAllocationStatus(), equalTo(UnassignedInfo.AllocationStatus.NO_ATTEMPT)); unassigned.updateUnassigned( new UnassignedInfo( - unassignedInfo.getReason(), - unassignedInfo.getMessage(), - unassignedInfo.getFailure(), - unassignedInfo.getNumFailedAllocations(), - unassignedInfo.getUnassignedTimeInNanos(), - unassignedInfo.getUnassignedTimeInMillis(), - unassignedInfo.isDelayed(), + unassignedInfo.reason(), + unassignedInfo.message(), + unassignedInfo.failure(), + unassignedInfo.failedAllocations(), + unassignedInfo.unassignedTimeNanos(), + unassignedInfo.unassignedTimeMillis(), + unassignedInfo.delayed(), UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED, - unassignedInfo.getFailedNodeIds(), - unassignedInfo.getLastAllocatedNodeId() + unassignedInfo.failedNodeIds(), + unassignedInfo.lastAllocatedNodeId() ), shardRouting.recoverySource(), new RoutingChangesObserver.DelegatingRoutingChangesObserver() @@ -164,7 +164,7 @@ public void testFailsNewPrimariesIfNoDataNodes() { for (ShardRouting shardRouting : routingAllocation.routingNodes().unassigned()) { assertTrue(shardRouting.toString(), shardRouting.unassigned()); assertThat( - shardRouting.unassignedInfo().getLastAllocationStatus(), + shardRouting.unassignedInfo().lastAllocationStatus(), equalTo( shardRouting.primary() && shardRouting.shardId().id() == 1 ? UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED @@ -190,7 +190,7 @@ public void testFailsNewPrimariesIfNoDataNodes() { for (ShardRouting shardRouting : routingAllocation.routingNodes().unassigned()) { assertTrue(shardRouting.toString(), shardRouting.unassigned()); assertThat( - shardRouting.unassignedInfo().getLastAllocationStatus(), + shardRouting.unassignedInfo().lastAllocationStatus(), equalTo( // we only update primaries, and only if currently NO_ATTEMPT shardRouting.primary() @@ -677,7 +677,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing .replicaShards() .stream() .allMatch( - shardRouting -> shardRouting.unassignedInfo().getLastAllocationStatus() == UnassignedInfo.AllocationStatus.NO_ATTEMPT + shardRouting -> shardRouting.unassignedInfo().lastAllocationStatus() == UnassignedInfo.AllocationStatus.NO_ATTEMPT ) ); } @@ -724,7 +724,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing nonYesDecision == Decision.NO ? UnassignedInfo.AllocationStatus.DECIDERS_NO : UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED, - redState.routingTable().shardRoutingTable("index-0", 0).primaryShard().unassignedInfo().getLastAllocationStatus() + redState.routingTable().shardRoutingTable("index-0", 0).primaryShard().unassignedInfo().lastAllocationStatus() ); assignPrimary.set(true); @@ -733,7 +733,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing startInitializingShardsAndReroute(allocationService, redState) ); for (final var shardRouting : yellowState.routingTable().shardRoutingTable("index-0", 0).replicaShards()) { - assertEquals(UnassignedInfo.AllocationStatus.NO_ATTEMPT, shardRouting.unassignedInfo().getLastAllocationStatus()); + assertEquals(UnassignedInfo.AllocationStatus.NO_ATTEMPT, shardRouting.unassignedInfo().lastAllocationStatus()); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java index 53ac77de6fc88..e5b3393723ab1 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java @@ -100,7 +100,7 @@ public void testGatewayAllocatorStillFetching() { var shardRouting = routingTable.shardRoutingTable("test-index", 0).primaryShard(); assertFalse(shardRouting.assignedToNode()); assertThat( - shardRouting.unassignedInfo().getLastAllocationStatus(), + shardRouting.unassignedInfo().lastAllocationStatus(), equalTo(UnassignedInfo.AllocationStatus.FETCHING_SHARD_DATA) ); } @@ -111,7 +111,7 @@ public void testGatewayAllocatorDoesNothing() { testAllocate((allocation, unassignedAllocationHandler) -> {}, routingTable -> { var shardRouting = routingTable.shardRoutingTable("test-index", 0).primaryShard(); assertTrue(shardRouting.assignedToNode());// assigned by a followup reconciliation - assertThat(shardRouting.unassignedInfo().getLastAllocationStatus(), equalTo(UnassignedInfo.AllocationStatus.NO_ATTEMPT)); + assertThat(shardRouting.unassignedInfo().lastAllocationStatus(), equalTo(UnassignedInfo.AllocationStatus.NO_ATTEMPT)); }); } @@ -328,7 +328,7 @@ protected long currentNanoTime() { var unassigned = reconciledState.getRoutingNodes().unassigned(); assertThat(unassigned.size(), equalTo(1)); var unassignedShard = unassigned.iterator().next(); - assertThat(unassignedShard.unassignedInfo().isDelayed(), equalTo(true)); + assertThat(unassignedShard.unassignedInfo().delayed(), equalTo(true)); } finally { clusterService.close(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 716e7c80a6cde..d5cf73cacb782 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -1190,13 +1190,13 @@ private void doTestDiskThresholdWithSnapshotShardSizes(boolean testMaxHeadroom) assertThat( shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).stream() .map(ShardRouting::unassignedInfo) - .allMatch(unassignedInfo -> Reason.NEW_INDEX_RESTORED.equals(unassignedInfo.getReason())), + .allMatch(unassignedInfo -> Reason.NEW_INDEX_RESTORED.equals(unassignedInfo.reason())), is(true) ); assertThat( shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).stream() .map(ShardRouting::unassignedInfo) - .allMatch(unassignedInfo -> AllocationStatus.NO_ATTEMPT.equals(unassignedInfo.getLastAllocationStatus())), + .allMatch(unassignedInfo -> AllocationStatus.NO_ATTEMPT.equals(unassignedInfo.lastAllocationStatus())), is(true) ); assertThat(shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).size(), equalTo(1)); @@ -1218,7 +1218,7 @@ private void doTestDiskThresholdWithSnapshotShardSizes(boolean testMaxHeadroom) assertThat( shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).stream() .map(ShardRouting::unassignedInfo) - .allMatch(unassignedInfo -> AllocationStatus.FETCHING_SHARD_DATA.equals(unassignedInfo.getLastAllocationStatus())), + .allMatch(unassignedInfo -> AllocationStatus.FETCHING_SHARD_DATA.equals(unassignedInfo.lastAllocationStatus())), is(true) ); assertThat(shardsWithState(clusterState.getRoutingNodes(), UNASSIGNED).size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java index ea156ee48a656..ab14345cb53c4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java @@ -111,16 +111,16 @@ public void testCanAllocatePrimaryExistingInRestoreInProgress() { UnassignedInfo currentInfo = primary.unassignedInfo(); UnassignedInfo newInfo = new UnassignedInfo( - currentInfo.getReason(), - currentInfo.getMessage(), + currentInfo.reason(), + currentInfo.message(), new IOException("i/o failure"), - currentInfo.getNumFailedAllocations(), - currentInfo.getUnassignedTimeInNanos(), - currentInfo.getUnassignedTimeInMillis(), - currentInfo.isDelayed(), - currentInfo.getLastAllocationStatus(), - currentInfo.getFailedNodeIds(), - currentInfo.getLastAllocatedNodeId() + currentInfo.failedAllocations(), + currentInfo.unassignedTimeNanos(), + currentInfo.unassignedTimeMillis(), + currentInfo.delayed(), + currentInfo.lastAllocationStatus(), + currentInfo.failedNodeIds(), + currentInfo.lastAllocatedNodeId() ); primary = primary.updateUnassigned(newInfo, primary.recoverySource()); diff --git a/server/src/test/java/org/elasticsearch/common/network/ThreadWatchdogTests.java b/server/src/test/java/org/elasticsearch/common/network/ThreadWatchdogTests.java new file mode 100644 index 0000000000000..6ffbfd65dc457 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/network/ThreadWatchdogTests.java @@ -0,0 +1,305 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.network; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLog; + +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.core.TimeValue.timeValueMillis; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.not; + +public class ThreadWatchdogTests extends ESTestCase { + + public void testSimpleActivityTracking() throws InterruptedException { + final var watchdog = new ThreadWatchdog(); + final var barrier = new CyclicBarrier(2); + final var threadName = "watched-thread"; + final var thread = new Thread(() -> { + final var activityTracker = watchdog.getActivityTrackerForCurrentThread(); + + assertEquals(0L, activityTracker.get()); + if (randomBoolean()) { + // ensure overflow is no problem + activityTracker.set(Long.MAX_VALUE - randomFrom(1, 3, 5)); + } + + safeAwait(barrier); + // step 1: thread is idle + safeAwait(barrier); + + activityTracker.startActivity(); + + safeAwait(barrier); + // step 2: thread is active + safeAwait(barrier); + + for (int i = between(1, 10); i > 0; i--) { + activityTracker.stopActivity(); + activityTracker.startActivity(); + } + + safeAwait(barrier); + // step 3: thread still active, but made progress + safeAwait(barrier); + + activityTracker.stopActivity(); + + safeAwait(barrier); + // step 4: thread is idle again + safeAwait(barrier); + + }, threadName); + thread.start(); + + safeAwait(barrier); + + // step 1: thread is idle + assertEquals(List.of(), watchdog.getStuckThreadNames()); + assertEquals(List.of(), watchdog.getStuckThreadNames()); + + safeAwait(barrier); + safeAwait(barrier); + + // step 2: thread is active + assertEquals(List.of(), watchdog.getStuckThreadNames()); + assertEquals(List.of(threadName), watchdog.getStuckThreadNames()); + assertEquals(List.of(threadName), watchdog.getStuckThreadNames()); // just to check it's still reported as stuck + + safeAwait(barrier); + safeAwait(barrier); + + // step 3: thread still active, but made progress + assertEquals(List.of(), watchdog.getStuckThreadNames()); + assertEquals(List.of(threadName), watchdog.getStuckThreadNames()); + assertEquals(List.of(threadName), watchdog.getStuckThreadNames()); // just to check it's still reported as stuck + + safeAwait(barrier); + safeAwait(barrier); + + // step 4: thread is idle again + assertEquals(List.of(), watchdog.getStuckThreadNames()); + assertEquals(List.of(), watchdog.getStuckThreadNames()); + + safeAwait(barrier); + + thread.join(); + } + + public void testMultipleBlockedThreads() throws InterruptedException { + final var threadNames = randomList(2, 10, ESTestCase::randomIdentifier); + + final var watchdog = new ThreadWatchdog(); + final var barrier = new CyclicBarrier(threadNames.size() + 1); + final var threads = new Thread[threadNames.size()]; + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + safeAwait(barrier); + final var activityTracker = watchdog.getActivityTrackerForCurrentThread(); + activityTracker.startActivity(); + safeAwait(barrier); + // wait for main test thread + safeAwait(barrier); + activityTracker.stopActivity(); + }, threadNames.get(i)); + threads[i].start(); + } + + safeAwait(barrier); + safeAwait(barrier); + + try { + assertEquals(List.of(), watchdog.getStuckThreadNames()); + threadNames.sort(Comparator.naturalOrder()); // stuck threads are sorted by name + assertEquals(threadNames, watchdog.getStuckThreadNames()); + assertEquals(threadNames, watchdog.getStuckThreadNames()); // just to check they're all still reported as stuck + } finally { + safeAwait(barrier); + for (final var thread : threads) { + thread.join(); + } + } + } + + public void testConcurrency() throws Exception { + final var keepGoing = new AtomicBoolean(true); + final var watchdog = new ThreadWatchdog(); + final var threads = new Thread[between(1, 5)]; + final var semaphoresByThreadName = new HashMap(); + final var warmUpLatches = new CountDownLatch[threads.length]; + try { + for (int i = 0; i < threads.length; i++) { + final var threadName = "watched-thread-" + i; + final var semaphore = new Semaphore(1); + final var warmUpLatch = new CountDownLatch(20); + warmUpLatches[i] = warmUpLatch; + semaphoresByThreadName.put(threadName, semaphore); + threads[i] = new Thread(() -> { + final var activityTracker = watchdog.getActivityTrackerForCurrentThread(); + while (keepGoing.get()) { + activityTracker.startActivity(); + try { + safeAcquire(semaphore); + Thread.yield(); + semaphore.release(); + Thread.yield(); + } finally { + activityTracker.stopActivity(); + warmUpLatch.countDown(); + } + } + }, threadName); + threads[i].start(); + } + + for (final var warmUpLatch : warmUpLatches) { + safeAwait(warmUpLatch); + } + + final var threadToBlock = randomFrom(semaphoresByThreadName.keySet()); + final var semaphore = semaphoresByThreadName.get(threadToBlock); + safeAcquire(semaphore); + assertBusy(() -> assertThat(watchdog.getStuckThreadNames(), hasItem(threadToBlock))); + semaphore.release(); + assertBusy(() -> assertThat(watchdog.getStuckThreadNames(), not(hasItem(threadToBlock)))); + } finally { + keepGoing.set(false); + for (final var thread : threads) { + thread.join(); + } + } + } + + /** + * This logger is mentioned in the docs by name, so we cannot rename it without adjusting the docs. Thus we fix the expected logger + * name in this string constant rather than using {@code ThreadWatchdog.class.getCanonicalName()}. + */ + private static final String LOGGER_NAME = "org.elasticsearch.common.network.ThreadWatchdog"; + + public void testLoggingAndScheduling() { + final var watchdog = new ThreadWatchdog(); + final var activityTracker = watchdog.getActivityTrackerForCurrentThread(); + final var deterministicTaskQueue = new DeterministicTaskQueue(); + + final var settings = Settings.builder(); + final var lifecycle = new Lifecycle(); + assertTrue(lifecycle.moveToStarted()); + + final long checkIntervalMillis; + if (randomBoolean()) { + checkIntervalMillis = ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.get(Settings.EMPTY).millis(); + } else { + checkIntervalMillis = between(1, 100000); + settings.put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.getKey(), timeValueMillis(checkIntervalMillis)); + } + + final long quietTimeMillis; + if (randomBoolean()) { + quietTimeMillis = ThreadWatchdog.NETWORK_THREAD_WATCHDOG_QUIET_TIME.get(Settings.EMPTY).millis(); + } else { + quietTimeMillis = between(1, 100000); + settings.put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_QUIET_TIME.getKey(), timeValueMillis(quietTimeMillis)); + } + + watchdog.run(settings.build(), deterministicTaskQueue.getThreadPool(), lifecycle); + + for (int i = 0; i < 3; i++) { + assertAdvanceTime(deterministicTaskQueue, checkIntervalMillis); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.UnseenEventExpectation("no logging", LOGGER_NAME, Level.WARN, "*") + ); + } + + activityTracker.startActivity(); + assertAdvanceTime(deterministicTaskQueue, checkIntervalMillis); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.UnseenEventExpectation("no logging", LOGGER_NAME, Level.WARN, "*") + ); + assertAdvanceTime(deterministicTaskQueue, checkIntervalMillis); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.SeenEventExpectation( + "stuck threads logging", + LOGGER_NAME, + Level.WARN, + Strings.format( + "the following threads are active but did not make progress in the preceding [%s]: [%s]", + TimeValue.timeValueMillis(checkIntervalMillis), + Thread.currentThread().getName() + ) + ), + new MockLog.SeenEventExpectation( + "thread dump", + LOGGER_NAME, + Level.WARN, + "hot threads dump due to active threads not making progress (gzip compressed*base64-encoded*" + ) + ); + assertAdvanceTime(deterministicTaskQueue, Math.max(quietTimeMillis, checkIntervalMillis)); + activityTracker.stopActivity(); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.UnseenEventExpectation("no logging", LOGGER_NAME, Level.WARN, "*") + ); + assertAdvanceTime(deterministicTaskQueue, checkIntervalMillis); + deterministicTaskQueue.scheduleNow(lifecycle::moveToStopped); + deterministicTaskQueue.runAllTasksInTimeOrder(); // ensures that the rescheduling stops + } + + public void testDisableWithZeroInterval() { + final var watchdog = new ThreadWatchdog(); + final var deterministicTaskQueue = new DeterministicTaskQueue(); + final var lifecycle = new Lifecycle(); + assertTrue(lifecycle.moveToStarted()); + + watchdog.run( + Settings.builder() + .put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.getKey(), randomFrom(TimeValue.ZERO, TimeValue.MINUS_ONE)) + .build(), + deterministicTaskQueue.getThreadPool(), + lifecycle + ); + assertFalse(deterministicTaskQueue.hasAnyTasks()); + + watchdog.run( + Settings.builder().put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.getKey(), timeValueMillis(between(1, 100000))).build(), + deterministicTaskQueue.getThreadPool(), + lifecycle + ); + assertTrue(deterministicTaskQueue.hasDeferredTasks()); + lifecycle.moveToStopped(); + deterministicTaskQueue.runAllTasksInTimeOrder(); // ensures that the rescheduling stops + } + + private static void assertAdvanceTime(DeterministicTaskQueue deterministicTaskQueue, long expectedMillis) { + final var currentTimeMillis = deterministicTaskQueue.getCurrentTimeMillis(); + deterministicTaskQueue.advanceTime(); + assertEquals(expectedMillis, deterministicTaskQueue.getCurrentTimeMillis() - currentTimeMillis); + } +} diff --git a/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index f6b310abac770..a74a00792d701 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -286,7 +286,7 @@ public void testDontAllocateOnNoOrThrottleForceAllocationDecision() { List ignored = allocation.routingNodes().unassigned().ignored(); assertEquals(ignored.size(), 1); assertEquals( - ignored.get(0).unassignedInfo().getLastAllocationStatus(), + ignored.get(0).unassignedInfo().lastAllocationStatus(), forceDecisionNo ? AllocationStatus.DECIDERS_NO : AllocationStatus.DECIDERS_THROTTLED ); assertTrue(shardsWithState(allocation.routingNodes(), ShardRoutingState.INITIALIZING).isEmpty()); @@ -314,7 +314,7 @@ public void testDontForceAllocateOnThrottleDecision() { assertThat(allocation.routingNodesChanged(), equalTo(true)); List ignored = allocation.routingNodes().unassigned().ignored(); assertEquals(ignored.size(), 1); - assertEquals(ignored.get(0).unassignedInfo().getLastAllocationStatus(), AllocationStatus.DECIDERS_THROTTLED); + assertEquals(ignored.get(0).unassignedInfo().lastAllocationStatus(), AllocationStatus.DECIDERS_THROTTLED); assertTrue(shardsWithState(allocation.routingNodes(), ShardRoutingState.INITIALIZING).isEmpty()); } @@ -454,7 +454,7 @@ public void testRestoreDoesNotAssignIfShardSizeNotAvailable() { assertThat(allocation.routingNodesChanged(), equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false)); ShardRouting ignoredRouting = allocation.routingNodes().unassigned().ignored().get(0); - assertThat(ignoredRouting.unassignedInfo().getLastAllocationStatus(), equalTo(AllocationStatus.FETCHING_SHARD_DATA)); + assertThat(ignoredRouting.unassignedInfo().lastAllocationStatus(), equalTo(AllocationStatus.FETCHING_SHARD_DATA)); assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } diff --git a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index e1cba6f1746e4..9582037975318 100644 --- a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -254,8 +254,8 @@ private void runNoopRetentionLeaseTest(boolean isRelevantShard) { List unassignedShards = shardsWithState(allocation.routingNodes(), ShardRoutingState.UNASSIGNED); assertThat(unassignedShards, hasSize(1)); assertThat(unassignedShards.get(0).shardId(), equalTo(shardId)); - assertThat(unassignedShards.get(0).unassignedInfo().getNumFailedAllocations(), equalTo(0)); - assertThat(unassignedShards.get(0).unassignedInfo().getFailedNodeIds(), equalTo(failedNodeIds)); + assertThat(unassignedShards.get(0).unassignedInfo().failedAllocations(), equalTo(0)); + assertThat(unassignedShards.get(0).unassignedInfo().failedNodeIds(), equalTo(failedNodeIds)); } else { assertThat(allocation.routingNodesChanged(), equalTo(false)); assertThat(shardsWithState(allocation.routingNodes(), ShardRoutingState.UNASSIGNED).size(), equalTo(0)); diff --git a/server/src/test/java/org/elasticsearch/index/LogsIndexModeTests.java b/server/src/test/java/org/elasticsearch/index/LogsIndexModeTests.java index fd73a8c9f8f52..caddc7d5ea5af 100644 --- a/server/src/test/java/org/elasticsearch/index/LogsIndexModeTests.java +++ b/server/src/test/java/org/elasticsearch/index/LogsIndexModeTests.java @@ -10,12 +10,13 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.MapperServiceTestCase; -import org.hamcrest.Matchers; +import org.elasticsearch.test.ESTestCase; -public class LogsIndexModeTests extends MapperServiceTestCase { +import static org.hamcrest.Matchers.equalTo; + +public class LogsIndexModeTests extends ESTestCase { public void testLogsIndexModeSetting() { - assertThat(IndexSettings.MODE.get(buildSettings()), Matchers.equalTo(IndexMode.LOGS)); + assertThat(IndexSettings.MODE.get(buildSettings()), equalTo(IndexMode.LOGS)); } public void testSortField() { @@ -24,8 +25,10 @@ public void testSortField() { .put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), "agent_id") .build(); final IndexMetadata metadata = IndexSettingsTests.newIndexMeta("test", sortSettings); + assertThat(metadata.getIndexMode(), equalTo(IndexMode.LOGS)); final IndexSettings settings = new IndexSettings(metadata, Settings.EMPTY); - assertThat("agent_id", Matchers.equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey()))); + assertThat(settings.getMode(), equalTo(IndexMode.LOGS)); + assertThat("agent_id", equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey()))); } public void testSortMode() { @@ -35,9 +38,11 @@ public void testSortMode() { .put(IndexSortConfig.INDEX_SORT_MODE_SETTING.getKey(), "max") .build(); final IndexMetadata metadata = IndexSettingsTests.newIndexMeta("test", sortSettings); + assertThat(metadata.getIndexMode(), equalTo(IndexMode.LOGS)); final IndexSettings settings = new IndexSettings(metadata, Settings.EMPTY); - assertThat("agent_id", Matchers.equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey()))); - assertThat("max", Matchers.equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_MODE_SETTING.getKey()))); + assertThat(settings.getMode(), equalTo(IndexMode.LOGS)); + assertThat("agent_id", equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey()))); + assertThat("max", equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_MODE_SETTING.getKey()))); } public void testSortOrder() { @@ -47,9 +52,11 @@ public void testSortOrder() { .put(IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey(), "desc") .build(); final IndexMetadata metadata = IndexSettingsTests.newIndexMeta("test", sortSettings); + assertThat(metadata.getIndexMode(), equalTo(IndexMode.LOGS)); final IndexSettings settings = new IndexSettings(metadata, Settings.EMPTY); - assertThat("agent_id", Matchers.equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey()))); - assertThat("desc", Matchers.equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey()))); + assertThat(settings.getMode(), equalTo(IndexMode.LOGS)); + assertThat("agent_id", equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey()))); + assertThat("desc", equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey()))); } public void testSortMissing() { @@ -59,9 +66,11 @@ public void testSortMissing() { .put(IndexSortConfig.INDEX_SORT_MISSING_SETTING.getKey(), "_last") .build(); final IndexMetadata metadata = IndexSettingsTests.newIndexMeta("test", sortSettings); + assertThat(metadata.getIndexMode(), equalTo(IndexMode.LOGS)); final IndexSettings settings = new IndexSettings(metadata, Settings.EMPTY); - assertThat("agent_id", Matchers.equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey()))); - assertThat("_last", Matchers.equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_MISSING_SETTING.getKey()))); + assertThat(settings.getMode(), equalTo(IndexMode.LOGS)); + assertThat("agent_id", equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey()))); + assertThat("_last", equalTo(getIndexSetting(settings, IndexSortConfig.INDEX_SORT_MISSING_SETTING.getKey()))); } private Settings buildSettings() { diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index ffb3cc1943bff..3c687f1792d0d 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -19,15 +19,18 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; +import org.apache.lucene.util.Accountable; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.script.ScriptCompiler; @@ -107,6 +110,13 @@ private CodecService createCodecService() throws IOException { Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER ); + BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) {} + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) {} + }); MapperService service = new MapperService( () -> TransportVersion.current(), settings, @@ -117,6 +127,7 @@ private CodecService createCodecService() throws IOException { () -> null, settings.getMode().idFieldMapperWithoutFieldData(), ScriptCompiler.NONE, + bitsetFilterCache::getBitSetProducer, MapperMetrics.NOOP ); return new CodecService(service, BigArrays.NON_RECYCLING_INSTANCE); diff --git a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java index 74657842488b5..525fa31673494 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; @@ -61,6 +62,28 @@ public class PerFieldMapperCodecTests extends ESTestCase { } """; + private static final String MAPPING_3 = """ + { + "_data_stream_timestamp": { + "enabled": true + }, + "properties": { + "@timestamp": { + "type": "date" + }, + "hostname": { + "type": "keyword" + }, + "response_size": { + "type": "long" + }, + "message": { + "type": "text" + } + } + } + """; + public void testUseBloomFilter() throws IOException { PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, randomBoolean(), false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); @@ -103,13 +126,13 @@ public void testDoNotUseES87TSDBEncodingForTimestampFieldNonTimeSeriesIndex() th } public void testEnableES87TSDBCodec() throws IOException { - PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, MAPPING_1); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, IndexMode.TIME_SERIES, MAPPING_1); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); } public void testDisableES87TSDBCodec() throws IOException { - PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, true, MAPPING_1); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, IndexMode.TIME_SERIES, MAPPING_1); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); } @@ -144,31 +167,37 @@ private PerFieldFormatSupplier createFormatSupplier(boolean timestampField, bool } public void testUseES87TSDBEncodingSettingDisabled() throws IOException { - PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, true, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, IndexMode.TIME_SERIES, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); } public void testUseTimeSeriesModeDisabledCodecDisabled() throws IOException { - PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, IndexMode.STANDARD, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); } public void testUseTimeSeriesModeAndCodecEnabled() throws IOException { - PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, IndexMode.TIME_SERIES, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(true)); } - private PerFieldFormatSupplier createFormatSupplier(boolean enableES87TSDBCodec, boolean timeSeries, String mapping) - throws IOException { + public void testLogsIndexMode() throws IOException { + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, IndexMode.LOGS, MAPPING_3); + assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); + assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("hostname")), is(true)); + assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("response_size")), is(true)); + } + + private PerFieldFormatSupplier createFormatSupplier(boolean enableES87TSDBCodec, IndexMode mode, String mapping) throws IOException { Settings.Builder settings = Settings.builder(); - if (timeSeries) { - settings.put(IndexSettings.MODE.getKey(), "time_series"); + settings.put(IndexSettings.MODE.getKey(), mode); + if (mode == IndexMode.TIME_SERIES) { settings.put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "field"); } settings.put(IndexSettings.TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING.getKey(), enableES87TSDBCodec); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java index 8f6565cc5da94..886b0aa9e425d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java @@ -176,7 +176,9 @@ private static ObjectMapper createObjectMapper(String name) { } private static NestedObjectMapper createNestedObjectMapper(String name) { - return new NestedObjectMapper.Builder(name, IndexVersion.current()).build(MapperBuilderContext.root(false, false)); + return new NestedObjectMapper.Builder(name, IndexVersion.current(), query -> { throw new UnsupportedOperationException(); }).build( + MapperBuilderContext.root(false, false) + ); } private static MappingLookup createMappingLookup( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java index abe8e820acae8..aa22a345c5cec 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.util.Accountable; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.bytes.BytesReference; @@ -17,6 +18,8 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.script.ScriptService; @@ -43,6 +46,13 @@ private static MappingParser createMappingParser(Settings settings, IndexVersion IndexAnalyzers indexAnalyzers = createIndexAnalyzers(); SimilarityService similarityService = new SimilarityService(indexSettings, scriptService, Collections.emptyMap()); MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); + BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) {} + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) {} + }); Supplier mappingParserContextSupplier = () -> new MappingParserContext( similarityService::getSimilarity, type -> mapperRegistry.getMapperParser(type, indexSettings.getIndexVersionCreated()), @@ -55,7 +65,8 @@ private static MappingParser createMappingParser(Settings settings, IndexVersion scriptService, indexAnalyzers, indexSettings, - indexSettings.getMode().idFieldMapperWithoutFieldData() + indexSettings.getMode().idFieldMapperWithoutFieldData(), + bitsetFilterCache::getBitSetProducer ); Map metadataMapperParsers = mapperRegistry.getMetadataMapperParsers( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java index 80ba37d8066b2..5c2fa6e89b0c6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java @@ -64,7 +64,9 @@ public void testMultiLevelParents() throws IOException { } private static NestedObjectMapper buildMapper(String name) { - return new NestedObjectMapper.Builder(name, IndexVersion.current()).build(MapperBuilderContext.root(false, false)); + return new NestedObjectMapper.Builder(name, IndexVersion.current(), query -> { throw new UnsupportedOperationException(); }).build( + MapperBuilderContext.root(false, false) + ); } public void testAllParentFilters() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 25e4ccdf4d3a9..412077b659b98 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; @@ -1500,12 +1501,12 @@ public void testIndexTemplatesMergeIncludes() throws IOException { } public void testMergeNested() { - NestedObjectMapper firstMapper = new NestedObjectMapper.Builder("nested1", IndexVersion.current()).includeInParent(true) - .includeInRoot(true) - .build(MapperBuilderContext.root(false, false)); - NestedObjectMapper secondMapper = new NestedObjectMapper.Builder("nested1", IndexVersion.current()).includeInParent(false) - .includeInRoot(true) - .build(MapperBuilderContext.root(false, false)); + NestedObjectMapper firstMapper = new NestedObjectMapper.Builder("nested1", IndexVersion.current(), query -> { + throw new UnsupportedOperationException(); + }).includeInParent(true).includeInRoot(true).build(MapperBuilderContext.root(false, false)); + NestedObjectMapper secondMapper = new NestedObjectMapper.Builder("nested1", IndexVersion.current(), query -> { + throw new UnsupportedOperationException(); + }).includeInParent(false).includeInRoot(true).build(MapperBuilderContext.root(false, false)); MapperException e = expectThrows( MapperException.class, @@ -1533,6 +1534,39 @@ public void testWithoutMappers() throws IOException { assertThat(object.withoutMappers().toString(), equalTo(shallowObject.toString())); } + public void testNestedMapperFilters() throws Exception { + DocumentMapper docMapper = createDocumentMapper(mapping(b -> { + b.startObject("nested1"); + { + b.field("type", "nested"); + b.startObject("properties"); + { + b.startObject("field1").field("type", "text").endObject(); + b.startObject("sub_nested"); + { + b.field("type", "nested"); + b.startObject("properties"); + { + b.startObject("field2").field("type", "text").endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + })); + + assertThat(docMapper.mappers().nestedLookup().getNestedMappers().size(), equalTo(2)); + assertThat(docMapper.mappers().nestedLookup().getNestedMappers().get("nested1"), instanceOf(NestedObjectMapper.class)); + NestedObjectMapper mapper1 = docMapper.mappers().nestedLookup().getNestedMappers().get("nested1"); + assertThat(mapper1.parentTypeFilter(), equalTo(Queries.newNonNestedFilter(IndexVersion.current()))); + + NestedObjectMapper mapper2 = docMapper.mappers().nestedLookup().getNestedMappers().get("nested1.sub_nested"); + assertThat(mapper2.parentTypeFilter(), equalTo(mapper1.nestedTypeFilter())); + } + private NestedObjectMapper createNestedObjectMapperWithAllParametersSet(CheckedConsumer propertiesBuilder) throws IOException { DocumentMapper mapper = createDocumentMapper(mapping(b -> { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java index b1b7f80ba865f..0ec1997ae652e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java @@ -277,7 +277,10 @@ private static TestMapper fromMapping( ScriptCompiler.NONE, mapperService.getIndexAnalyzers(), mapperService.getIndexSettings(), - mapperService.getIndexSettings().getMode().idFieldMapperWithoutFieldData() + mapperService.getIndexSettings().getMode().idFieldMapperWithoutFieldData(), + query -> { + throw new UnsupportedOperationException(); + } ); if (fromDynamicTemplate) { pc = pc.createDynamicTemplateContext(null); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 802a18645eab6..d0350c1d92a83 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -243,6 +243,13 @@ public void testSyntheticSourceInTimeSeries() throws IOException { assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); } + public void testSyntheticSourceWithLogsIndexMode() throws IOException { + XContentBuilder mapping = fieldMapping(b -> { b.field("type", "keyword"); }); + DocumentMapper mapper = createLogsModeDocumentMapper(mapping); + assertTrue(mapper.sourceMapper().isSynthetic()); + assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); + } + public void testSupportsNonDefaultParameterValues() throws IOException { Settings settings = Settings.builder().put(SourceFieldMapper.LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, false).build(); { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java index 2b704a25e2232..035466d93ab06 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java @@ -97,7 +97,10 @@ public void testMultiFieldWithinMultiField() throws IOException { ScriptCompiler.NONE, mapperService.getIndexAnalyzers(), mapperService.getIndexSettings(), - ProvidedIdFieldMapper.NO_FIELD_DATA + ProvidedIdFieldMapper.NO_FIELD_DATA, + query -> { + throw new UnsupportedOperationException(); + } ); TextFieldMapper.PARSER.parse("some-field", fieldNode, olderContext); @@ -128,7 +131,10 @@ public void testMultiFieldWithinMultiField() throws IOException { ScriptCompiler.NONE, mapperService.getIndexAnalyzers(), mapperService.getIndexSettings(), - ProvidedIdFieldMapper.NO_FIELD_DATA + ProvidedIdFieldMapper.NO_FIELD_DATA, + query -> { + throw new UnsupportedOperationException(); + } ); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 6d8a22e7850e4..9cd1df700a618 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -548,7 +548,10 @@ private static MapperService createMapperService(IndexSettings indexSettings, Ma ScriptCompiler.NONE, indexAnalyzers, indexSettings, - indexSettings.getMode().buildIdFieldMapper(() -> true) + indexSettings.getMode().buildIdFieldMapper(() -> true), + query -> { + throw new UnsupportedOperationException(); + } ) ); when(mapperService.isMultiField(anyString())).then( diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index 59acb227385f6..4d58471f4817a 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -644,8 +644,8 @@ public void testIsParallelCollectionSupportedForResults() { ToLongFunction fieldCardinality = name -> -1; for (var resultsType : SearchService.ResultsType.values()) { switch (resultsType) { - case NONE, FETCH -> assertFalse( - "NONE and FETCH phases do not support parallel collection.", + case NONE, RANK_FEATURE, FETCH -> assertFalse( + "NONE, RANK_FEATURE, and FETCH phases do not support parallel collection.", DefaultSearchContext.isParallelCollectionSupportedForResults( resultsType, searchSourceBuilderOrNull, diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index d2c6c55634ec6..5c175f792d399 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -13,6 +13,8 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHitCountCollectorManager; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.SetOnce; @@ -27,6 +29,7 @@ import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.OpenPointInTimeRequest; +import org.elasticsearch.action.search.SearchPhaseController; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -92,6 +95,7 @@ import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; +import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ContextIndexSearcher; @@ -102,12 +106,26 @@ import org.elasticsearch.search.query.NonCountingTermQuery; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.rank.RankBuilder; +import org.elasticsearch.search.rank.RankShardResult; +import org.elasticsearch.search.rank.TestRankBuilder; +import org.elasticsearch.search.rank.TestRankDoc; +import org.elasticsearch.search.rank.TestRankShardResult; +import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; +import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.search.rank.feature.RankFeatureResult; +import org.elasticsearch.search.rank.feature.RankFeatureShardRequest; +import org.elasticsearch.search.rank.feature.RankFeatureShardResult; import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.tasks.TaskCancelHelper; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -115,8 +133,10 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.LinkedList; import java.util.List; import java.util.Locale; @@ -136,8 +156,8 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.indices.cluster.AbstractIndicesClusterStateServiceTestCase.awaitIndexShardCloseAsyncTasks; import static org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.DELETED; +import static org.elasticsearch.search.SearchService.DEFAULT_SIZE; import static org.elasticsearch.search.SearchService.QUERY_PHASE_PARALLEL_COLLECTION_ENABLED; import static org.elasticsearch.search.SearchService.SEARCH_WORKER_THREADS_ENABLED; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -371,7 +391,7 @@ public void testSearchWhileIndexDeleted() throws InterruptedException { -1, null ), - new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()), + new SearchShardTask(123L, "", "", "", null, emptyMap()), result.delegateFailure((l, r) -> { r.incRef(); l.onResponse(r); @@ -387,7 +407,7 @@ public void testSearchWhileIndexDeleted() throws InterruptedException { null/* not a scroll */ ); PlainActionFuture listener = new PlainActionFuture<>(); - service.executeFetchPhase(req, new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()), listener); + service.executeFetchPhase(req, new SearchShardTask(123L, "", "", "", null, emptyMap()), listener); listener.get(); if (useScroll) { // have to free context since this test does not remove the index from IndicesService. @@ -422,6 +442,712 @@ public void testSearchWhileIndexDeleted() throws InterruptedException { assertEquals(0, totalStats.getFetchCurrent()); } + public void testRankFeaturePhaseSearchPhases() throws InterruptedException, ExecutionException { + final String indexName = "index"; + final String rankFeatureFieldName = "field"; + final String searchFieldName = "search_field"; + final String searchFieldValue = "some_value"; + final String fetchFieldName = "fetch_field"; + final String fetchFieldValue = "fetch_value"; + + final int minDocs = 3; + final int maxDocs = 10; + int numDocs = between(minDocs, maxDocs); + createIndex(indexName); + // index some documents + for (int i = 0; i < numDocs; i++) { + prepareIndex(indexName).setId(String.valueOf(i)) + .setSource( + rankFeatureFieldName, + "aardvark_" + i, + searchFieldName, + searchFieldValue, + fetchFieldName, + fetchFieldValue + "_" + i + ) + .get(); + } + indicesAdmin().prepareRefresh(indexName).get(); + + final SearchService service = getInstanceFromNode(SearchService.class); + + final IndicesService indicesService = getInstanceFromNode(IndicesService.class); + final IndexService indexService = indicesService.indexServiceSafe(resolveIndex(indexName)); + final IndexShard indexShard = indexService.getShard(0); + SearchShardTask searchTask = new SearchShardTask(123L, "", "", "", null, emptyMap()); + + // create a SearchRequest that will return all documents and defines a TestRankBuilder with shard-level only operations + SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true) + .source( + new SearchSourceBuilder().query(new TermQueryBuilder(searchFieldName, searchFieldValue)) + .size(DEFAULT_SIZE) + .fetchField(fetchFieldName) + .rankBuilder( + // here we override only the shard-level contexts + new TestRankBuilder(RankBuilder.DEFAULT_RANK_WINDOW_SIZE) { + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + return new QueryPhaseRankShardContext(queries, from) { + + @Override + public int rankWindowSize() { + return DEFAULT_RANK_WINDOW_SIZE; + } + + @Override + public RankShardResult combineQueryPhaseResults(List rankResults) { + // we know we have just 1 query, so return all the docs from it + return new TestRankShardResult( + Arrays.stream(rankResults.get(0).scoreDocs) + .map(x -> new TestRankDoc(x.doc, x.score, x.shardIndex)) + .limit(rankWindowSize()) + .toArray(TestRankDoc[]::new) + ); + } + }; + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + return new RankFeaturePhaseRankShardContext(rankFeatureFieldName) { + @Override + public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) { + RankFeatureDoc[] rankFeatureDocs = new RankFeatureDoc[hits.getHits().length]; + for (int i = 0; i < hits.getHits().length; i++) { + SearchHit hit = hits.getHits()[i]; + rankFeatureDocs[i] = new RankFeatureDoc(hit.docId(), hit.getScore(), shardId); + rankFeatureDocs[i].featureData(hit.getFields().get(rankFeatureFieldName).getValue()); + rankFeatureDocs[i].score = (numDocs - i) + randomFloat(); + rankFeatureDocs[i].rank = i + 1; + } + return new RankFeatureShardResult(rankFeatureDocs); + } + }; + } + } + ) + ); + + ShardSearchRequest request = new ShardSearchRequest( + OriginalIndices.NONE, + searchRequest, + indexShard.shardId(), + 0, + 1, + AliasFilter.EMPTY, + 1.0f, + -1, + null + ); + QuerySearchResult queryResult = null; + RankFeatureResult rankResult = null; + try { + // Execute the query phase and store the result in a SearchPhaseResult container using a PlainActionFuture + PlainActionFuture queryPhaseResults = new PlainActionFuture<>(); + service.executeQueryPhase(request, searchTask, queryPhaseResults); + queryResult = (QuerySearchResult) queryPhaseResults.get(); + + // these are the matched docs from the query phase + final TestRankDoc[] queryRankDocs = ((TestRankShardResult) queryResult.getRankShardResult()).testRankDocs; + + // assume that we have cut down to these from the coordinator node as the top-docs to run the rank feature phase upon + List topRankWindowSizeDocs = randomNonEmptySubsetOf(Arrays.stream(queryRankDocs).map(x -> x.doc).toList()); + + // now we create a RankFeatureShardRequest to extract feature info for the top-docs above + RankFeatureShardRequest rankFeatureShardRequest = new RankFeatureShardRequest( + OriginalIndices.NONE, + queryResult.getContextId(), // use the context from the query phase + request, + topRankWindowSizeDocs + ); + PlainActionFuture rankPhaseResults = new PlainActionFuture<>(); + service.executeRankFeaturePhase(rankFeatureShardRequest, searchTask, rankPhaseResults); + rankResult = rankPhaseResults.get(); + + assertNotNull(rankResult); + assertNotNull(rankResult.rankFeatureResult()); + RankFeatureShardResult rankFeatureShardResult = rankResult.rankFeatureResult().shardResult(); + assertNotNull(rankFeatureShardResult); + + List sortedRankWindowDocs = topRankWindowSizeDocs.stream().sorted().toList(); + assertEquals(sortedRankWindowDocs.size(), rankFeatureShardResult.rankFeatureDocs.length); + for (int i = 0; i < sortedRankWindowDocs.size(); i++) { + assertEquals((long) sortedRankWindowDocs.get(i), rankFeatureShardResult.rankFeatureDocs[i].doc); + assertEquals(rankFeatureShardResult.rankFeatureDocs[i].featureData, "aardvark_" + sortedRankWindowDocs.get(i)); + } + + List globalTopKResults = randomNonEmptySubsetOf( + Arrays.stream(rankFeatureShardResult.rankFeatureDocs).map(x -> x.doc).toList() + ); + + // finally let's create a fetch request to bring back fetch info for the top results + ShardFetchSearchRequest fetchRequest = new ShardFetchSearchRequest( + OriginalIndices.NONE, + rankResult.getContextId(), + request, + globalTopKResults, + null, + null, + rankResult.getRescoreDocIds(), + null + ); + + // execute fetch phase and perform any validations once we retrieve the response + // the difference in how we do assertions here is needed because once the transport service sends back the response + // it decrements the reference to the FetchSearchResult (through the ActionListener#respondAndRelease) and sets hits to null + service.executeFetchPhase(fetchRequest, searchTask, new ActionListener<>() { + @Override + public void onResponse(FetchSearchResult fetchSearchResult) { + assertNotNull(fetchSearchResult); + assertNotNull(fetchSearchResult.hits()); + + int totalHits = fetchSearchResult.hits().getHits().length; + assertEquals(globalTopKResults.size(), totalHits); + for (int i = 0; i < totalHits; i++) { + // rank and score are set by the SearchPhaseController#merge so no need to validate that here + SearchHit hit = fetchSearchResult.hits().getAt(i); + assertNotNull(hit.getFields().get(fetchFieldName)); + assertEquals(hit.getFields().get(fetchFieldName).getValue(), fetchFieldValue + "_" + hit.docId()); + } + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError("No failure should have been raised", e); + } + }); + } catch (Exception ex) { + if (queryResult != null) { + if (queryResult.hasReferences()) { + queryResult.decRef(); + } + service.freeReaderContext(queryResult.getContextId()); + } + if (rankResult != null && rankResult.hasReferences()) { + rankResult.decRef(); + } + throw ex; + } + } + + public void testRankFeaturePhaseUsingClient() { + final String indexName = "index"; + final String rankFeatureFieldName = "field"; + final String searchFieldName = "search_field"; + final String searchFieldValue = "some_value"; + final String fetchFieldName = "fetch_field"; + final String fetchFieldValue = "fetch_value"; + + final int minDocs = 4; + final int maxDocs = 10; + int numDocs = between(minDocs, maxDocs); + createIndex(indexName); + // index some documents + for (int i = 0; i < numDocs; i++) { + prepareIndex(indexName).setId(String.valueOf(i)) + .setSource( + rankFeatureFieldName, + "aardvark_" + i, + searchFieldName, + searchFieldValue, + fetchFieldName, + fetchFieldValue + "_" + i + ) + .get(); + } + indicesAdmin().prepareRefresh(indexName).get(); + + ElasticsearchAssertions.assertResponse( + client().prepareSearch(indexName) + .setSource( + new SearchSourceBuilder().query(new TermQueryBuilder(searchFieldName, searchFieldValue)) + .size(2) + .from(2) + .fetchField(fetchFieldName) + .rankBuilder( + // here we override only the shard-level contexts + new TestRankBuilder(RankBuilder.DEFAULT_RANK_WINDOW_SIZE) { + + // no need for more than one queries + @Override + public boolean isCompoundBuilder() { + return false; + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from) { + return new RankFeaturePhaseRankCoordinatorContext(size, from, DEFAULT_RANK_WINDOW_SIZE) { + @Override + protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener) { + float[] scores = new float[featureDocs.length]; + for (int i = 0; i < featureDocs.length; i++) { + scores[i] = featureDocs[i].score; + } + scoreListener.onResponse(scores); + } + }; + } + + @Override + public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from) { + return new QueryPhaseRankCoordinatorContext(RankBuilder.DEFAULT_RANK_WINDOW_SIZE) { + @Override + public ScoreDoc[] rankQueryPhaseResults( + List querySearchResults, + SearchPhaseController.TopDocsStats topDocStats + ) { + List rankDocs = new ArrayList<>(); + for (int i = 0; i < querySearchResults.size(); i++) { + QuerySearchResult querySearchResult = querySearchResults.get(i); + TestRankShardResult shardResult = (TestRankShardResult) querySearchResult + .getRankShardResult(); + for (TestRankDoc trd : shardResult.testRankDocs) { + trd.shardIndex = i; + rankDocs.add(trd); + } + } + rankDocs.sort(Comparator.comparing((TestRankDoc doc) -> doc.score).reversed()); + TestRankDoc[] topResults = rankDocs.stream().limit(rankWindowSize).toArray(TestRankDoc[]::new); + topDocStats.fetchHits = topResults.length; + return topResults; + } + }; + } + + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + return new QueryPhaseRankShardContext(queries, from) { + + @Override + public int rankWindowSize() { + return DEFAULT_RANK_WINDOW_SIZE; + } + + @Override + public RankShardResult combineQueryPhaseResults(List rankResults) { + // we know we have just 1 query, so return all the docs from it + return new TestRankShardResult( + Arrays.stream(rankResults.get(0).scoreDocs) + .map(x -> new TestRankDoc(x.doc, x.score, x.shardIndex)) + .limit(rankWindowSize()) + .toArray(TestRankDoc[]::new) + ); + } + }; + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + return new RankFeaturePhaseRankShardContext(rankFeatureFieldName) { + @Override + public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) { + RankFeatureDoc[] rankFeatureDocs = new RankFeatureDoc[hits.getHits().length]; + for (int i = 0; i < hits.getHits().length; i++) { + SearchHit hit = hits.getHits()[i]; + rankFeatureDocs[i] = new RankFeatureDoc(hit.docId(), hit.getScore(), shardId); + rankFeatureDocs[i].featureData(hit.getFields().get(rankFeatureFieldName).getValue()); + rankFeatureDocs[i].score = randomFloat(); + rankFeatureDocs[i].rank = i + 1; + } + return new RankFeatureShardResult(rankFeatureDocs); + } + }; + } + } + ) + ), + (response) -> { + SearchHits hits = response.getHits(); + assertEquals(hits.getTotalHits().value, numDocs); + assertEquals(hits.getHits().length, 2); + int index = 0; + for (SearchHit hit : hits.getHits()) { + assertEquals(hit.getRank(), 3 + index); + assertTrue(hit.getScore() >= 0); + assertEquals(hit.getFields().get(fetchFieldName).getValue(), fetchFieldValue + "_" + hit.docId()); + index++; + } + } + ); + } + + public void testRankFeaturePhaseExceptionOnCoordinatingNode() { + final String indexName = "index"; + final String rankFeatureFieldName = "field"; + final String searchFieldName = "search_field"; + final String searchFieldValue = "some_value"; + final String fetchFieldName = "fetch_field"; + final String fetchFieldValue = "fetch_value"; + + final int minDocs = 3; + final int maxDocs = 10; + int numDocs = between(minDocs, maxDocs); + createIndex(indexName); + // index some documents + for (int i = 0; i < numDocs; i++) { + prepareIndex(indexName).setId(String.valueOf(i)) + .setSource( + rankFeatureFieldName, + "aardvark_" + i, + searchFieldName, + searchFieldValue, + fetchFieldName, + fetchFieldValue + "_" + i + ) + .get(); + } + indicesAdmin().prepareRefresh(indexName).get(); + + expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch(indexName) + .setSource( + new SearchSourceBuilder().query(new TermQueryBuilder(searchFieldName, searchFieldValue)) + .size(2) + .from(2) + .fetchField(fetchFieldName) + .rankBuilder(new TestRankBuilder(RankBuilder.DEFAULT_RANK_WINDOW_SIZE) { + + // no need for more than one queries + @Override + public boolean isCompoundBuilder() { + return false; + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from) { + return new RankFeaturePhaseRankCoordinatorContext(size, from, DEFAULT_RANK_WINDOW_SIZE) { + @Override + protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener) { + throw new IllegalStateException("should have failed earlier"); + } + }; + } + + @Override + public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from) { + return new QueryPhaseRankCoordinatorContext(RankBuilder.DEFAULT_RANK_WINDOW_SIZE) { + @Override + public ScoreDoc[] rankQueryPhaseResults( + List querySearchResults, + SearchPhaseController.TopDocsStats topDocStats + ) { + throw new UnsupportedOperationException("simulated failure"); + } + }; + } + + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + return new QueryPhaseRankShardContext(queries, from) { + + @Override + public int rankWindowSize() { + return DEFAULT_RANK_WINDOW_SIZE; + } + + @Override + public RankShardResult combineQueryPhaseResults(List rankResults) { + // we know we have just 1 query, so return all the docs from it + return new TestRankShardResult( + Arrays.stream(rankResults.get(0).scoreDocs) + .map(x -> new TestRankDoc(x.doc, x.score, x.shardIndex)) + .limit(rankWindowSize()) + .toArray(TestRankDoc[]::new) + ); + } + }; + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + return new RankFeaturePhaseRankShardContext(rankFeatureFieldName) { + @Override + public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) { + RankFeatureDoc[] rankFeatureDocs = new RankFeatureDoc[hits.getHits().length]; + for (int i = 0; i < hits.getHits().length; i++) { + SearchHit hit = hits.getHits()[i]; + rankFeatureDocs[i] = new RankFeatureDoc(hit.docId(), hit.getScore(), shardId); + rankFeatureDocs[i].featureData(hit.getFields().get(rankFeatureFieldName).getValue()); + rankFeatureDocs[i].score = randomFloat(); + rankFeatureDocs[i].rank = i + 1; + } + return new RankFeatureShardResult(rankFeatureDocs); + } + }; + } + }) + ) + .get() + ); + } + + public void testRankFeaturePhaseExceptionAllShardFail() { + final String indexName = "index"; + final String rankFeatureFieldName = "field"; + final String searchFieldName = "search_field"; + final String searchFieldValue = "some_value"; + final String fetchFieldName = "fetch_field"; + final String fetchFieldValue = "fetch_value"; + + final int minDocs = 3; + final int maxDocs = 10; + int numDocs = between(minDocs, maxDocs); + createIndex(indexName); + // index some documents + for (int i = 0; i < numDocs; i++) { + prepareIndex(indexName).setId(String.valueOf(i)) + .setSource( + rankFeatureFieldName, + "aardvark_" + i, + searchFieldName, + searchFieldValue, + fetchFieldName, + fetchFieldValue + "_" + i + ) + .get(); + } + indicesAdmin().prepareRefresh(indexName).get(); + + expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch(indexName) + .setAllowPartialSearchResults(true) + .setSource( + new SearchSourceBuilder().query(new TermQueryBuilder(searchFieldName, searchFieldValue)) + .fetchField(fetchFieldName) + .rankBuilder( + // here we override only the shard-level contexts + new TestRankBuilder(RankBuilder.DEFAULT_RANK_WINDOW_SIZE) { + + // no need for more than one queries + @Override + public boolean isCompoundBuilder() { + return false; + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from) { + return new RankFeaturePhaseRankCoordinatorContext(size, from, DEFAULT_RANK_WINDOW_SIZE) { + @Override + protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener) { + float[] scores = new float[featureDocs.length]; + for (int i = 0; i < featureDocs.length; i++) { + scores[i] = featureDocs[i].score; + } + scoreListener.onResponse(scores); + } + }; + } + + @Override + public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from) { + return new QueryPhaseRankCoordinatorContext(RankBuilder.DEFAULT_RANK_WINDOW_SIZE) { + @Override + public ScoreDoc[] rankQueryPhaseResults( + List querySearchResults, + SearchPhaseController.TopDocsStats topDocStats + ) { + List rankDocs = new ArrayList<>(); + for (int i = 0; i < querySearchResults.size(); i++) { + QuerySearchResult querySearchResult = querySearchResults.get(i); + TestRankShardResult shardResult = (TestRankShardResult) querySearchResult + .getRankShardResult(); + for (TestRankDoc trd : shardResult.testRankDocs) { + trd.shardIndex = i; + rankDocs.add(trd); + } + } + rankDocs.sort(Comparator.comparing((TestRankDoc doc) -> doc.score).reversed()); + TestRankDoc[] topResults = rankDocs.stream().limit(rankWindowSize).toArray(TestRankDoc[]::new); + topDocStats.fetchHits = topResults.length; + return topResults; + } + }; + } + + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + return new QueryPhaseRankShardContext(queries, from) { + + @Override + public int rankWindowSize() { + return DEFAULT_RANK_WINDOW_SIZE; + } + + @Override + public RankShardResult combineQueryPhaseResults(List rankResults) { + // we know we have just 1 query, so return all the docs from it + return new TestRankShardResult( + Arrays.stream(rankResults.get(0).scoreDocs) + .map(x -> new TestRankDoc(x.doc, x.score, x.shardIndex)) + .limit(rankWindowSize()) + .toArray(TestRankDoc[]::new) + ); + } + }; + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + return new RankFeaturePhaseRankShardContext(rankFeatureFieldName) { + @Override + public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) { + throw new UnsupportedOperationException("simulated failure"); + } + }; + } + } + ) + ) + .get() + ); + } + + public void testRankFeaturePhaseExceptionOneShardFails() { + // if we have only one shard and it fails, it will fallback to context.onPhaseFailure which will eventually clean up all contexts. + // in this test we want to make sure that even if one shard (of many) fails during the RankFeaturePhase, then the appropriate + // context will have been cleaned up. + final String indexName = "index"; + final String rankFeatureFieldName = "field"; + final String searchFieldName = "search_field"; + final String searchFieldValue = "some_value"; + final String fetchFieldName = "fetch_field"; + final String fetchFieldValue = "fetch_value"; + + final int minDocs = 3; + final int maxDocs = 10; + int numDocs = between(minDocs, maxDocs); + createIndex(indexName, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2).build()); + // index some documents + for (int i = 0; i < numDocs; i++) { + prepareIndex(indexName).setId(String.valueOf(i)) + .setSource( + rankFeatureFieldName, + "aardvark_" + i, + searchFieldName, + searchFieldValue, + fetchFieldName, + fetchFieldValue + "_" + i + ) + .get(); + } + indicesAdmin().prepareRefresh(indexName).get(); + + assertResponse( + client().prepareSearch(indexName) + .setAllowPartialSearchResults(true) + .setSource( + new SearchSourceBuilder().query(new TermQueryBuilder(searchFieldName, searchFieldValue)) + .fetchField(fetchFieldName) + .rankBuilder( + // here we override only the shard-level contexts + new TestRankBuilder(RankBuilder.DEFAULT_RANK_WINDOW_SIZE) { + + // no need for more than one queries + @Override + public boolean isCompoundBuilder() { + return false; + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from) { + return new RankFeaturePhaseRankCoordinatorContext(size, from, DEFAULT_RANK_WINDOW_SIZE) { + @Override + protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener) { + float[] scores = new float[featureDocs.length]; + for (int i = 0; i < featureDocs.length; i++) { + scores[i] = featureDocs[i].score; + } + scoreListener.onResponse(scores); + } + }; + } + + @Override + public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from) { + return new QueryPhaseRankCoordinatorContext(RankBuilder.DEFAULT_RANK_WINDOW_SIZE) { + @Override + public ScoreDoc[] rankQueryPhaseResults( + List querySearchResults, + SearchPhaseController.TopDocsStats topDocStats + ) { + List rankDocs = new ArrayList<>(); + for (int i = 0; i < querySearchResults.size(); i++) { + QuerySearchResult querySearchResult = querySearchResults.get(i); + TestRankShardResult shardResult = (TestRankShardResult) querySearchResult + .getRankShardResult(); + for (TestRankDoc trd : shardResult.testRankDocs) { + trd.shardIndex = i; + rankDocs.add(trd); + } + } + rankDocs.sort(Comparator.comparing((TestRankDoc doc) -> doc.score).reversed()); + TestRankDoc[] topResults = rankDocs.stream().limit(rankWindowSize).toArray(TestRankDoc[]::new); + topDocStats.fetchHits = topResults.length; + return topResults; + } + }; + } + + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + return new QueryPhaseRankShardContext(queries, from) { + + @Override + public int rankWindowSize() { + return DEFAULT_RANK_WINDOW_SIZE; + } + + @Override + public RankShardResult combineQueryPhaseResults(List rankResults) { + // we know we have just 1 query, so return all the docs from it + return new TestRankShardResult( + Arrays.stream(rankResults.get(0).scoreDocs) + .map(x -> new TestRankDoc(x.doc, x.score, x.shardIndex)) + .limit(rankWindowSize()) + .toArray(TestRankDoc[]::new) + ); + } + }; + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + return new RankFeaturePhaseRankShardContext(rankFeatureFieldName) { + @Override + public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) { + if (shardId == 0) { + throw new UnsupportedOperationException("simulated failure"); + } else { + RankFeatureDoc[] rankFeatureDocs = new RankFeatureDoc[hits.getHits().length]; + for (int i = 0; i < hits.getHits().length; i++) { + SearchHit hit = hits.getHits()[i]; + rankFeatureDocs[i] = new RankFeatureDoc(hit.docId(), hit.getScore(), shardId); + rankFeatureDocs[i].featureData(hit.getFields().get(rankFeatureFieldName).getValue()); + rankFeatureDocs[i].score = randomFloat(); + rankFeatureDocs[i].rank = i + 1; + } + return new RankFeatureShardResult(rankFeatureDocs); + } + } + }; + } + } + ) + ), + (searchResponse) -> { + assertEquals(1, searchResponse.getSuccessfulShards()); + assertEquals("simulated failure", searchResponse.getShardFailures()[0].getCause().getMessage()); + assertNotEquals(0, searchResponse.getHits().getHits().length); + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertEquals(fetchFieldValue + "_" + hit.getId(), hit.getFields().get(fetchFieldName).getValue()); + assertEquals(1, hit.getShard().getShardId().id()); + } + } + ); + } + public void testSearchWhileIndexDeletedDoesNotLeakSearchContext() throws ExecutionException, InterruptedException { createIndex("index"); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); @@ -457,7 +1183,7 @@ public void testSearchWhileIndexDeletedDoesNotLeakSearchContext() throws Executi -1, null ), - new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()), + new SearchShardTask(123L, "", "", "", null, emptyMap()), result ); @@ -694,7 +1420,7 @@ public void testMaxScriptFieldsSearch() throws IOException { for (int i = 0; i < maxScriptFields; i++) { searchSourceBuilder.scriptField( "field" + i, - new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap()) + new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, emptyMap()) ); } final ShardSearchRequest request = new ShardSearchRequest( @@ -723,7 +1449,7 @@ public void testMaxScriptFieldsSearch() throws IOException { } searchSourceBuilder.scriptField( "anotherScriptField", - new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap()) + new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, emptyMap()) ); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, @@ -752,7 +1478,7 @@ public void testIgnoreScriptfieldIfSizeZero() throws IOException { searchRequest.source(searchSourceBuilder); searchSourceBuilder.scriptField( "field" + 0, - new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap()) + new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, emptyMap()) ); searchSourceBuilder.size(0); final ShardSearchRequest request = new ShardSearchRequest( @@ -1036,7 +1762,7 @@ public void testCanMatch() throws Exception { ); CountDownLatch latch = new CountDownLatch(1); - SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); + SearchShardTask task = new SearchShardTask(123L, "", "", "", null, emptyMap()); // Because the foo field used in alias filter is unmapped the term query builder rewrite can resolve to a match no docs query, // without acquiring a searcher and that means the wrapper is not called assertEquals(5, numWrapInvocations.get()); @@ -1330,7 +2056,7 @@ public void testMatchNoDocsEmptyResponse() throws InterruptedException { 0, null ); - SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); + SearchShardTask task = new SearchShardTask(123L, "", "", "", null, emptyMap()); { CountDownLatch latch = new CountDownLatch(1); @@ -1705,7 +2431,7 @@ public void testWaitOnRefresh() throws ExecutionException, InterruptedException final DocWriteResponse response = prepareIndex("index").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); - SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); + SearchShardTask task = new SearchShardTask(123L, "", "", "", null, emptyMap()); ShardSearchRequest request = new ShardSearchRequest( OriginalIndices.NONE, searchRequest, @@ -1740,7 +2466,7 @@ public void testWaitOnRefreshFailsWithRefreshesDisabled() { final DocWriteResponse response = prepareIndex("index").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); - SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); + SearchShardTask task = new SearchShardTask(123L, "", "", "", null, emptyMap()); PlainActionFuture future = new PlainActionFuture<>(); ShardSearchRequest request = new ShardSearchRequest( OriginalIndices.NONE, @@ -1778,7 +2504,7 @@ public void testWaitOnRefreshFailsIfCheckpointNotIndexed() { final DocWriteResponse response = prepareIndex("index").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); - SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); + SearchShardTask task = new SearchShardTask(123L, "", "", "", null, emptyMap()); PlainActionFuture future = new PlainActionFuture<>(); ShardSearchRequest request = new ShardSearchRequest( OriginalIndices.NONE, @@ -1815,7 +2541,7 @@ public void testWaitOnRefreshTimeout() { final DocWriteResponse response = prepareIndex("index").setSource("id", "1").get(); assertEquals(RestStatus.CREATED, response.status()); - SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); + SearchShardTask task = new SearchShardTask(123L, "", "", "", null, emptyMap()); PlainActionFuture future = new PlainActionFuture<>(); ShardSearchRequest request = new ShardSearchRequest( OriginalIndices.NONE, @@ -1901,7 +2627,7 @@ public void testDfsQueryPhaseRewrite() { PlainActionFuture plainActionFuture = new PlainActionFuture<>(); service.executeQueryPhase( new QuerySearchRequest(null, context.id(), request, new AggregatedDfs(Map.of(), Map.of(), 10)), - new SearchShardTask(42L, "", "", "", null, Collections.emptyMap()), + new SearchShardTask(42L, "", "", "", null, emptyMap()), plainActionFuture ); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 09c13a96da704..4ec2e5ab49cd3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -912,6 +912,8 @@ protected List objectMappers() { ); public static NestedObjectMapper nestedObject(String path) { - return new NestedObjectMapper.Builder(path, IndexVersion.current()).build(MapperBuilderContext.root(false, false)); + return new NestedObjectMapper.Builder(path, IndexVersion.current(), query -> { throw new UnsupportedOperationException(); }).build( + MapperBuilderContext.root(false, false) + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java index 3a7460c05ca87..a086225e140ac 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java @@ -88,7 +88,7 @@ public void testDocValueFetcher() throws IOException { processor.setNextReader(context); for (int doc = 0; doc < context.reader().maxDoc(); doc++) { SearchHit searchHit = SearchHit.unpooled(doc + context.docBase); - processor.process(new FetchSubPhase.HitContext(searchHit, context, doc, Map.of(), Source.empty(null))); + processor.process(new FetchSubPhase.HitContext(searchHit, context, doc, Map.of(), Source.empty(null), null)); assertNotNull(searchHit.getFields().get("field")); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index 2b8bf0dad65fe..8778d02dc44f9 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -197,7 +197,7 @@ private HitContext hitExecuteMultiple( MemoryIndex index = new MemoryIndex(); LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0); Source source = sourceBuilder == null ? Source.empty(null) : Source.fromBytes(BytesReference.bytes(sourceBuilder)); - HitContext hitContext = new HitContext(searchHit, leafReaderContext, 1, Map.of(), source); + HitContext hitContext = new HitContext(searchHit, leafReaderContext, 1, Map.of(), source, null); FetchSourcePhase phase = new FetchSourcePhase(); FetchSubPhaseProcessor processor = phase.getProcessor(fetchContext); diff --git a/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java b/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java new file mode 100644 index 0000000000000..9a6442e129d5a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java @@ -0,0 +1,416 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.rank; + +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.FetchSearchResult; +import org.elasticsearch.search.fetch.StoredFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; +import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.search.rank.feature.RankFeatureResult; +import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; +import org.elasticsearch.search.rank.feature.RankFeatureShardRequest; +import org.elasticsearch.search.rank.feature.RankFeatureShardResult; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.TestSearchContext; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +import static org.elasticsearch.search.rank.RankBuilder.DEFAULT_RANK_WINDOW_SIZE; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +public class RankFeatureShardPhaseTests extends ESTestCase { + + private SearchContext getSearchContext() { + return new TestSearchContext((SearchExecutionContext) null) { + + private FetchSearchResult fetchResult; + private RankFeatureResult rankFeatureResult; + private FetchFieldsContext fetchFieldsContext; + private StoredFieldsContext storedFieldsContext; + + @Override + public FetchSearchResult fetchResult() { + return fetchResult; + } + + @Override + public void addFetchResult() { + this.fetchResult = new FetchSearchResult(); + this.addReleasable(fetchResult::decRef); + } + + @Override + public RankFeatureResult rankFeatureResult() { + return rankFeatureResult; + } + + @Override + public void addRankFeatureResult() { + this.rankFeatureResult = new RankFeatureResult(); + this.addReleasable(rankFeatureResult::decRef); + } + + @Override + public SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext) { + this.fetchFieldsContext = fetchFieldsContext; + return this; + } + + @Override + public FetchFieldsContext fetchFieldsContext() { + return fetchFieldsContext; + } + + @Override + public SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext) { + this.storedFieldsContext = storedFieldsContext; + return this; + } + + @Override + public StoredFieldsContext storedFieldsContext() { + return storedFieldsContext; + } + + @Override + public boolean isCancelled() { + return false; + } + }; + } + + private RankBuilder getRankBuilder(final String field) { + return new RankBuilder(DEFAULT_RANK_WINDOW_SIZE) { + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + // no-op + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + // no-op + } + + @Override + public boolean isCompoundBuilder() { + return false; + } + + @Override + public Explanation explainHit(Explanation baseExplanation, RankDoc scoreDoc, List queryNames) { + // no-op + return baseExplanation; + } + + // no work to be done on the query phase + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + return null; + } + + // no work to be done on the query phase + @Override + public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from) { + return null; + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + return new RankFeaturePhaseRankShardContext(field) { + @Override + public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) { + RankFeatureDoc[] rankFeatureDocs = new RankFeatureDoc[hits.getHits().length]; + for (int i = 0; i < hits.getHits().length; i++) { + SearchHit hit = hits.getHits()[i]; + rankFeatureDocs[i] = new RankFeatureDoc(hit.docId(), hit.getScore(), shardId); + rankFeatureDocs[i].featureData(hit.getFields().get(field).getValue()); + rankFeatureDocs[i].rank = i + 1; + } + return new RankFeatureShardResult(rankFeatureDocs); + } + }; + } + + // no work to be done on the coordinator node for the rank feature phase + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from) { + return null; + } + + @Override + protected boolean doEquals(RankBuilder other) { + return false; + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + public String getWriteableName() { + return "rank_builder_rank_feature_shard_phase_enabled"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.RANK_FEATURE_PHASE_ADDED; + } + }; + } + + public void testPrepareForFetch() { + + final String fieldName = "some_field"; + int numDocs = randomIntBetween(10, 30); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.rankBuilder(getRankBuilder(fieldName)); + + ShardSearchRequest searchRequest = mock(ShardSearchRequest.class); + when(searchRequest.source()).thenReturn(searchSourceBuilder); + + try (SearchContext searchContext = spy(getSearchContext())) { + when(searchContext.isCancelled()).thenReturn(false); + when(searchContext.request()).thenReturn(searchRequest); + + RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); + when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); + + RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); + rankFeatureShardPhase.prepareForFetch(searchContext, request); + + assertNotNull(searchContext.fetchFieldsContext()); + assertEquals(searchContext.fetchFieldsContext().fields().size(), 1); + assertEquals(searchContext.fetchFieldsContext().fields().get(0).field, fieldName); + assertNotNull(searchContext.storedFieldsContext()); + assertNull(searchContext.storedFieldsContext().fieldNames()); + assertFalse(searchContext.storedFieldsContext().fetchFields()); + assertNotNull(searchContext.fetchResult()); + } + } + + public void testPrepareForFetchNoRankFeatureContext() { + int numDocs = randomIntBetween(10, 30); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.rankBuilder(null); + + ShardSearchRequest searchRequest = mock(ShardSearchRequest.class); + when(searchRequest.source()).thenReturn(searchSourceBuilder); + + try (SearchContext searchContext = spy(getSearchContext())) { + when(searchContext.isCancelled()).thenReturn(false); + when(searchContext.request()).thenReturn(searchRequest); + + RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); + when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); + + RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); + rankFeatureShardPhase.prepareForFetch(searchContext, request); + + assertNull(searchContext.fetchFieldsContext()); + assertNull(searchContext.fetchResult()); + } + } + + public void testPrepareForFetchWhileTaskIsCancelled() { + + final String fieldName = "some_field"; + int numDocs = randomIntBetween(10, 30); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.rankBuilder(getRankBuilder(fieldName)); + + ShardSearchRequest searchRequest = mock(ShardSearchRequest.class); + when(searchRequest.source()).thenReturn(searchSourceBuilder); + + try (SearchContext searchContext = spy(getSearchContext())) { + when(searchContext.isCancelled()).thenReturn(true); + when(searchContext.request()).thenReturn(searchRequest); + + RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); + when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); + + RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); + expectThrows(TaskCancelledException.class, () -> rankFeatureShardPhase.prepareForFetch(searchContext, request)); + } + } + + public void testProcessFetch() { + final String fieldName = "some_field"; + int numDocs = randomIntBetween(15, 30); + Map expectedFieldData = Map.of(4, "doc_4_aardvark", 9, "doc_9_aardvark", numDocs - 1, "last_doc_aardvark"); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.rankBuilder(getRankBuilder(fieldName)); + + ShardSearchRequest searchRequest = mock(ShardSearchRequest.class); + when(searchRequest.source()).thenReturn(searchSourceBuilder); + + SearchShardTarget shardTarget = new SearchShardTarget( + "node_id", + new ShardId(new Index("some_index", UUID.randomUUID().toString()), 0), + null + ); + SearchHits searchHits = null; + try (SearchContext searchContext = spy(getSearchContext())) { + searchContext.addFetchResult(); + SearchHit[] hits = new SearchHit[3]; + hits[0] = SearchHit.unpooled(4); + hits[0].setDocumentField(fieldName, new DocumentField(fieldName, Collections.singletonList(expectedFieldData.get(4)))); + + hits[1] = SearchHit.unpooled(9); + hits[1].setDocumentField(fieldName, new DocumentField(fieldName, Collections.singletonList(expectedFieldData.get(9)))); + + hits[2] = SearchHit.unpooled(numDocs - 1); + hits[2].setDocumentField( + fieldName, + new DocumentField(fieldName, Collections.singletonList(expectedFieldData.get(numDocs - 1))) + ); + searchHits = SearchHits.unpooled(hits, new TotalHits(3, TotalHits.Relation.EQUAL_TO), 1.0f); + searchContext.fetchResult().shardResult(searchHits, null); + when(searchContext.isCancelled()).thenReturn(false); + when(searchContext.request()).thenReturn(searchRequest); + when(searchContext.shardTarget()).thenReturn(shardTarget); + RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); + when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); + + RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); + // this is called as part of the search context initialization + // with the ResultsType.RANK_FEATURE type + searchContext.addRankFeatureResult(); + rankFeatureShardPhase.processFetch(searchContext); + + assertNotNull(searchContext.rankFeatureResult()); + assertNotNull(searchContext.rankFeatureResult().rankFeatureResult()); + for (RankFeatureDoc rankFeatureDoc : searchContext.rankFeatureResult().rankFeatureResult().shardResult().rankFeatureDocs) { + assertTrue(expectedFieldData.containsKey(rankFeatureDoc.doc)); + assertEquals(rankFeatureDoc.featureData, expectedFieldData.get(rankFeatureDoc.doc)); + } + } finally { + if (searchHits != null) { + searchHits.decRef(); + } + } + } + + public void testProcessFetchEmptyHits() { + final String fieldName = "some_field"; + int numDocs = randomIntBetween(10, 30); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.rankBuilder(getRankBuilder(fieldName)); + + ShardSearchRequest searchRequest = mock(ShardSearchRequest.class); + when(searchRequest.source()).thenReturn(searchSourceBuilder); + + SearchShardTarget shardTarget = new SearchShardTarget( + "node_id", + new ShardId(new Index("some_index", UUID.randomUUID().toString()), 0), + null + ); + + SearchHits searchHits = null; + try (SearchContext searchContext = spy(getSearchContext())) { + searchContext.addFetchResult(); + SearchHit[] hits = new SearchHit[0]; + searchHits = SearchHits.unpooled(hits, new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1.0f); + searchContext.fetchResult().shardResult(searchHits, null); + when(searchContext.isCancelled()).thenReturn(false); + when(searchContext.request()).thenReturn(searchRequest); + when(searchContext.shardTarget()).thenReturn(shardTarget); + RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); + when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); + + RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); + // this is called as part of the search context initialization + // with the ResultsType.RANK_FEATURE type + searchContext.addRankFeatureResult(); + rankFeatureShardPhase.processFetch(searchContext); + + assertNotNull(searchContext.rankFeatureResult()); + assertNotNull(searchContext.rankFeatureResult().rankFeatureResult()); + assertEquals(searchContext.rankFeatureResult().rankFeatureResult().shardResult().rankFeatureDocs.length, 0); + } finally { + if (searchHits != null) { + searchHits.decRef(); + } + } + } + + public void testProcessFetchWhileTaskIsCancelled() { + + final String fieldName = "some_field"; + int numDocs = randomIntBetween(10, 30); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.rankBuilder(getRankBuilder(fieldName)); + + ShardSearchRequest searchRequest = mock(ShardSearchRequest.class); + when(searchRequest.source()).thenReturn(searchSourceBuilder); + + SearchShardTarget shardTarget = new SearchShardTarget( + "node_id", + new ShardId(new Index("some_index", UUID.randomUUID().toString()), 0), + null + ); + + SearchHits searchHits = null; + try (SearchContext searchContext = spy(getSearchContext())) { + searchContext.addFetchResult(); + SearchHit[] hits = new SearchHit[0]; + searchHits = SearchHits.unpooled(hits, new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1.0f); + searchContext.fetchResult().shardResult(searchHits, null); + when(searchContext.isCancelled()).thenReturn(true); + when(searchContext.request()).thenReturn(searchRequest); + when(searchContext.shardTarget()).thenReturn(shardTarget); + RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); + when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); + + RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); + // this is called as part of the search context initialization + // with the ResultsType.RANK_FEATURE type + searchContext.addRankFeatureResult(); + expectThrows(TaskCancelledException.class, () -> rankFeatureShardPhase.processFetch(searchContext)); + } finally { + if (searchHits != null) { + searchHits.decRef(); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 5fcd4eeeb2636..f5fbca13db1db 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -194,9 +194,9 @@ protected final SearchExecutionContext createMockSearchExecutionContext(IndexSea IndexFieldData.Builder builder = fieldType.fielddataBuilder(fdc); return builder.build(new IndexFieldDataCache.None(), null); }; - NestedLookup nestedLookup = NestedLookup.build( - List.of(new NestedObjectMapper.Builder("path", IndexVersion.current()).build(MapperBuilderContext.root(false, false))) - ); + NestedLookup nestedLookup = NestedLookup.build(List.of(new NestedObjectMapper.Builder("path", IndexVersion.current(), query -> { + throw new UnsupportedOperationException(); + }).build(MapperBuilderContext.root(false, false)))); return new SearchExecutionContext( 0, 0, diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index fa700dc5d78f7..40064e2b68ed1 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -178,6 +178,7 @@ import org.elasticsearch.search.SearchService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.FetchPhase; +import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ClusterServiceUtils; @@ -1035,7 +1036,7 @@ public void run() { .routingTable() .shardRoutingTable(shardToRelocate.shardId()) .primaryShard(); - if (shardRouting.unassigned() && shardRouting.unassignedInfo().getReason() == UnassignedInfo.Reason.NODE_LEFT) { + if (shardRouting.unassigned() && shardRouting.unassignedInfo().reason() == UnassignedInfo.Reason.NODE_LEFT) { if (masterNodeCount > 1) { scheduleNow(() -> testClusterNodes.stopNode(masterNode)); } @@ -1494,6 +1495,25 @@ public void onResponse(CreateSnapshotResponse createSnapshotResponse) { fail("snapshot should not have started"); } + @Override + public void onFailure(Exception e) { + assertThat(ExceptionsHelper.unwrapCause(e), instanceOf(SnapshotNameAlreadyInUseException.class)); + l.onResponse(null); + } + }) + ) + // attempt to clone snapshot + .andThen( + (l, ignored) -> client().admin() + .cluster() + .prepareCloneSnapshot(repoName, snapshotName, snapshotName) + .setIndices("*") + .execute(new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + fail("snapshot should not have started"); + } + @Override public void onFailure(Exception e) { assertThat(ExceptionsHelper.unwrapCause(e), instanceOf(SnapshotNameAlreadyInUseException.class)); @@ -1502,6 +1522,7 @@ public void onFailure(Exception e) { }) ); + final var expectedMessage = Strings.format("Invalid snapshot name [%s], snapshot with the same name already exists", snapshotName); MockLog.assertThatLogger(() -> { deterministicTaskQueue.runAllRunnableTasks(); assertTrue("executed all runnable tasks but test steps are still incomplete", testListener.isDone()); @@ -1512,7 +1533,13 @@ public void onFailure(Exception e) { "INFO log", SnapshotsService.class.getCanonicalName(), Level.INFO, - Strings.format("*failed to create snapshot*Invalid snapshot name [%s]*", snapshotName) + Strings.format("*failed to create snapshot*%s", expectedMessage) + ), + new MockLog.SeenEventExpectation( + "INFO log", + SnapshotsService.class.getCanonicalName(), + Level.INFO, + Strings.format("*failed to clone snapshot*%s", expectedMessage) ) ); } @@ -2249,6 +2276,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { threadPool, scriptService, bigArrays, + new RankFeatureShardPhase(), new FetchPhase(Collections.emptyList()), responseCollectorService, new NoneCircuitBreakerService(), diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java index c350e2a4cfaa8..863bb60f0acc7 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java @@ -22,6 +22,9 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancellationService; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ScalingExecutorBuilder; @@ -31,11 +34,19 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; +import static org.elasticsearch.test.tasks.MockTaskManager.SPY_TASK_MANAGER_SETTING; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; public class RemoteClusterAwareClientTests extends ESTestCase { @@ -62,6 +73,89 @@ private MockTransportService startTransport(String id, List known ); } + public void testRemoteTaskCancellationOnFailedResponse() throws Exception { + Settings.Builder remoteTransportSettingsBuilder = Settings.builder(); + remoteTransportSettingsBuilder.put(SPY_TASK_MANAGER_SETTING.getKey(), true); + try ( + MockTransportService remoteTransport = RemoteClusterConnectionTests.startTransport( + "seed_node", + new CopyOnWriteArrayList<>(), + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool, + remoteTransportSettingsBuilder.build() + ) + ) { + remoteTransport.getTaskManager().setTaskCancellationService(new TaskCancellationService(remoteTransport)); + Settings.Builder builder = Settings.builder(); + builder.putList("cluster.remote.cluster1.seeds", remoteTransport.getLocalDiscoNode().getAddress().toString()); + try ( + MockTransportService localService = MockTransportService.createNewService( + builder.build(), + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool, + null + ) + ) { + // the TaskCancellationService references the same TransportService instance + // this is identically to how it works in the Node constructor + localService.getTaskManager().setTaskCancellationService(new TaskCancellationService(localService)); + localService.start(); + localService.acceptIncomingRequests(); + + SearchShardsRequest searchShardsRequest = new SearchShardsRequest( + new String[] { "test-index" }, + IndicesOptions.strictExpandOpen(), + new MatchAllQueryBuilder(), + null, + "index_not_found", // this request must fail + randomBoolean(), + null + ); + Task parentTask = localService.getTaskManager().register("test_type", "test_action", searchShardsRequest); + TaskId parentTaskId = new TaskId("test-mock-node-id", parentTask.getId()); + searchShardsRequest.setParentTask(parentTaskId); + var client = new RemoteClusterAwareClient( + localService, + "cluster1", + threadPool.executor(TEST_THREAD_POOL_NAME), + randomBoolean() + ); + + CountDownLatch cancelChildReceived = new CountDownLatch(1); + remoteTransport.addRequestHandlingBehavior( + TaskCancellationService.CANCEL_CHILD_ACTION_NAME, + (handler, request, channel, task) -> { + handler.messageReceived(request, channel, task); + cancelChildReceived.countDown(); + } + ); + AtomicLong searchShardsRequestId = new AtomicLong(-1); + CountDownLatch cancelChildSent = new CountDownLatch(1); + localService.addSendBehavior(remoteTransport, (connection, requestId, action, request, options) -> { + connection.sendRequest(requestId, action, request, options); + if (action.equals("indices:admin/search/search_shards")) { + searchShardsRequestId.set(requestId); + } else if (action.equals(TaskCancellationService.CANCEL_CHILD_ACTION_NAME)) { + cancelChildSent.countDown(); + } + }); + + // assert original request failed + var future = new PlainActionFuture(); + client.execute(TransportSearchShardsAction.REMOTE_TYPE, searchShardsRequest, future); + ExecutionException e = expectThrows(ExecutionException.class, future::get); + assertThat(e.getCause(), instanceOf(RemoteTransportException.class)); + + // assert remote task is cancelled + safeAwait(cancelChildSent); + safeAwait(cancelChildReceived); + verify(remoteTransport.getTaskManager()).cancelChildLocal(eq(parentTaskId), eq(searchShardsRequestId.get()), anyString()); + } + } + } + public void testSearchShards() throws Exception { List knownNodes = new CopyOnWriteArrayList<>(); try ( diff --git a/settings.gradle b/settings.gradle index 48e3794c9005d..a75c660016599 100644 --- a/settings.gradle +++ b/settings.gradle @@ -4,6 +4,9 @@ import org.elasticsearch.gradle.internal.toolchain.AdoptiumJdkToolchainResolver pluginManagement { repositories { + maven { + url 'https://jitpack.io' + } mavenCentral() gradlePluginPortal() } @@ -14,7 +17,7 @@ pluginManagement { } plugins { - id "com.gradle.enterprise" version "3.16.2" + id "com.gradle.develocity" version "3.17.4" id 'elasticsearch.java-toolchain' } @@ -102,7 +105,8 @@ List projects = [ 'test:test-clusters', 'test:x-content', 'test:yaml-rest-runner', - 'test:metadata-extractor' + 'test:metadata-extractor', + 'test:immutable-collections-patch' ] /** diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 8a3f36ebb1f8a..30623c6bafd6b 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -73,6 +73,7 @@ public class BootstrapForTesting { // without making things complex??? static { + // make sure java.io.tmpdir exists always (in case code uses it in a static initializer) Path javaTmpDir = PathUtils.get( Objects.requireNonNull(System.getProperty("java.io.tmpdir"), "please set ${java.io.tmpdir} in pom.xml") diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java index 7848f0ef4a625..f3fac694f9980 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java @@ -423,10 +423,10 @@ public void allocateUnassigned( RoutingAllocation allocation, UnassignedAllocationHandler unassignedAllocationHandler ) { - if (shardRouting.primary() || shardRouting.unassignedInfo().getReason() == UnassignedInfo.Reason.INDEX_CREATED) { + if (shardRouting.primary() || shardRouting.unassignedInfo().reason() == UnassignedInfo.Reason.INDEX_CREATED) { return; } - if (shardRouting.unassignedInfo().isDelayed()) { + if (shardRouting.unassignedInfo().delayed()) { unassignedAllocationHandler.removeAndIgnore(UnassignedInfo.AllocationStatus.DELAYED_ALLOCATION, allocation.changes()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/common/network/ThreadWatchdogHelper.java b/test/framework/src/main/java/org/elasticsearch/common/network/ThreadWatchdogHelper.java new file mode 100644 index 0000000000000..7658a37c1df72 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/common/network/ThreadWatchdogHelper.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.network; + +import java.util.List; + +public class ThreadWatchdogHelper { + // exposes this package-private method to tests + public static List getStuckThreadNames(ThreadWatchdog watchdog) { + return watchdog.getStuckThreadNames(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java index 5025299b09b64..913caba615a67 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java @@ -8,15 +8,18 @@ package org.elasticsearch.index; +import org.apache.lucene.util.Accountable; import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.script.ScriptCompiler; @@ -58,6 +61,13 @@ public static MapperService newMapperService( IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexName, finalSettings); IndexAnalyzers indexAnalyzers = createTestAnalysis(indexSettings, finalSettings).indexAnalyzers; SimilarityService similarityService = new SimilarityService(indexSettings, null, Collections.emptyMap()); + BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) {} + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) {} + }); return new MapperService( () -> TransportVersion.current(), indexSettings, @@ -68,6 +78,7 @@ public static MapperService newMapperService( () -> null, indexSettings.getMode().idFieldMapperWithoutFieldData(), ScriptCompiler.NONE, + bitsetFilterCache::getBitSetProducer, MapperMetrics.NOOP ); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java index c2da7a561c041..dc626a3228685 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java @@ -55,6 +55,9 @@ public TranslogHandler(NamedXContentRegistry xContentRegistry, IndexSettings ind () -> null, indexSettings.getMode().idFieldMapperWithoutFieldData(), null, + query -> { + throw new UnsupportedOperationException("The bitset filter cache is not available in translog operations"); + }, MapperMetrics.NOOP ); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 7d13e33be0db7..50436ad64c8af 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -46,7 +46,6 @@ import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; @@ -68,7 +67,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.internal.SubSearchContext; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.search.lookup.Source; import org.elasticsearch.search.lookup.SourceProvider; import org.elasticsearch.search.sort.BucketedSort; import org.elasticsearch.search.sort.BucketedSort.ExtraData; @@ -77,11 +75,13 @@ import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Collection; import java.util.Collections; @@ -148,6 +148,11 @@ protected final DocumentMapper createTimeSeriesModeDocumentMapper(XContentBuilde return createMapperService(settings, mappings).documentMapper(); } + protected final DocumentMapper createLogsModeDocumentMapper(XContentBuilder mappings) throws IOException { + Settings settings = Settings.builder().put(IndexSettings.MODE.getKey(), "logs").build(); + return createMapperService(settings, mappings).documentMapper(); + } + protected final DocumentMapper createDocumentMapper(IndexVersion version, XContentBuilder mappings) throws IOException { return createMapperService(version, mappings).documentMapper(); } @@ -254,6 +259,14 @@ public MapperService build() { getPlugins().stream().filter(p -> p instanceof MapperPlugin).map(p -> (MapperPlugin) p).collect(toList()) ).getMapperRegistry(); + BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) {} + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) {} + }); + return new MapperService( () -> TransportVersion.current(), indexSettings, @@ -266,6 +279,7 @@ public MapperService build() { }, indexSettings.getMode().buildIdFieldMapper(idFieldDataEnabled), scriptCompiler, + bitsetFilterCache::getBitSetProducer, mapperMetrics ); } @@ -794,23 +808,33 @@ private void roundTripSyntheticSource(DocumentMapper mapper, String syntheticSou } private static String syntheticSource(DocumentMapper mapper, IndexReader reader, int docId) throws IOException { - SourceProvider provider = SourceProvider.fromSyntheticSource(mapper.mapping(), SourceFieldMetrics.NOOP); - Source synthetic = provider.getSource(getOnlyLeafReader(reader).getContext(), docId); - return synthetic.internalSourceRef().utf8ToString(); - } + LeafReader leafReader = getOnlyLeafReader(reader); + + final String synthetic1; + final XContent xContent; + { + SourceProvider provider = SourceProvider.fromSyntheticSource(mapper.mapping(), SourceFieldMetrics.NOOP); + var source = provider.getSource(leafReader.getContext(), docId); + synthetic1 = source.internalSourceRef().utf8ToString(); + xContent = source.sourceContentType().xContent(); + } - protected static LeafStoredFieldLoader syntheticSourceStoredFieldLoader( - DocumentMapper mapper, - LeafReader leafReader, - SourceLoader loader - ) throws IOException { - if (loader.requiredStoredFields().isEmpty()) { - return StoredFieldLoader.empty().getLoader(leafReader.getContext(), null); + final String synthetic2; + { + int[] docIds = new int[] { docId }; + SourceLoader sourceLoader = new SourceLoader.Synthetic(mapper.mapping()::syntheticFieldLoader, SourceFieldMetrics.NOOP); + var sourceLeafLoader = sourceLoader.leaf(getOnlyLeafReader(reader), docIds); + var storedFieldLoader = StoredFieldLoader.create(false, sourceLoader.requiredStoredFields()) + .getLoader(leafReader.getContext(), docIds); + storedFieldLoader.advanceTo(docId); + try (XContentBuilder b = new XContentBuilder(xContent, new ByteArrayOutputStream())) { + sourceLeafLoader.write(storedFieldLoader, docId, b); + synthetic2 = BytesReference.bytes(b).utf8ToString(); + } } - LeafStoredFieldLoader storedFields = StoredFieldLoader.create(false, loader.requiredStoredFields()) - .getLoader(leafReader.getContext(), null); - storedFields.advanceTo(0); - return storedFields; + + assertThat(synthetic2, equalTo(synthetic1)); + return synthetic1; } protected void validateRoundTripReader(String syntheticSource, DirectoryReader reader, DirectoryReader roundTripReader) diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java index d4c238322e28a..5243ef85cdb76 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java @@ -63,7 +63,10 @@ private TestDocumentParserContext(MappingLookup mappingLookup, SourceToParse sou null, (type, name) -> Lucene.STANDARD_ANALYZER, MapperTestCase.createIndexSettings(IndexVersion.current(), settings), - null + null, + query -> { + throw new UnsupportedOperationException(); + } ), source, mappingLookup.getMapping().getRoot(), diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index ef29f9fca4f93..520aff77497ba 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -40,6 +40,7 @@ import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.fetch.FetchPhase; +import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; @@ -97,6 +98,7 @@ SearchService newSearchService( ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, + RankFeatureShardPhase rankFeatureShardPhase, FetchPhase fetchPhase, ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, @@ -111,6 +113,7 @@ SearchService newSearchService( threadPool, scriptService, bigArrays, + rankFeatureShardPhase, fetchPhase, responseCollectorService, circuitBreakerService, @@ -124,6 +127,7 @@ SearchService newSearchService( threadPool, scriptService, bigArrays, + rankFeatureShardPhase, fetchPhase, responseCollectorService, circuitBreakerService, diff --git a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java index aa1889e15d594..747eff1d21708 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -23,6 +23,7 @@ import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ThreadPool; @@ -81,6 +82,7 @@ public MockSearchService( ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, + RankFeatureShardPhase rankFeatureShardPhase, FetchPhase fetchPhase, ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, @@ -93,6 +95,7 @@ public MockSearchService( threadPool, scriptService, bigArrays, + rankFeatureShardPhase, fetchPhase, responseCollectorService, circuitBreakerService, diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 134352a4f8af4..d39a8df80c26d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -1284,7 +1284,10 @@ private static class MockParserContext extends MappingParserContext { ScriptCompiler.NONE, null, indexSettings, - null + null, + query -> { + throw new UnsupportedOperationException(); + } ); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java index 526c2104e52ae..a6c76bc15119c 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java @@ -77,7 +77,8 @@ protected final Map highlight(MapperService mapperServic ir.leaves().get(0), 0, storedFields, - source + source, + null ); processor.process(hitContext); highlights.putAll(hitContext.hit().getHighlightFields()); diff --git a/test/framework/src/main/java/org/elasticsearch/search/rank/TestRankBuilder.java b/test/framework/src/main/java/org/elasticsearch/search/rank/TestRankBuilder.java index 8e2a2c96a31ab..5694b1a2592ef 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/rank/TestRankBuilder.java +++ b/test/framework/src/main/java/org/elasticsearch/search/rank/TestRankBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.rank; +import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; @@ -15,6 +16,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -31,7 +34,7 @@ public class TestRankBuilder extends RankBuilder { static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( NAME, - args -> new TestRankBuilder(args[0] == null ? DEFAULT_WINDOW_SIZE : (int) args[0]) + args -> new TestRankBuilder(args[0] == null ? DEFAULT_RANK_WINDOW_SIZE : (int) args[0]) ); static { @@ -74,6 +77,16 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep // do nothing } + @Override + public boolean isCompoundBuilder() { + return true; + } + + @Override + public Explanation explainHit(Explanation baseExplanation, RankDoc rankDoc, List queryNames) { + return baseExplanation; + } + @Override public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { throw new UnsupportedOperationException(); @@ -84,6 +97,16 @@ public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int si throw new UnsupportedOperationException(); } + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + throw new UnsupportedOperationException(); + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from) { + throw new UnsupportedOperationException(); + } + @Override protected boolean doEquals(RankBuilder other) { return true; diff --git a/test/framework/src/main/java/org/elasticsearch/search/rank/TestRankDoc.java b/test/framework/src/main/java/org/elasticsearch/search/rank/TestRankDoc.java index 0a8b6e4c5f2be..f2f3cb82d203f 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/rank/TestRankDoc.java +++ b/test/framework/src/main/java/org/elasticsearch/search/rank/TestRankDoc.java @@ -37,4 +37,9 @@ public boolean doEquals(RankDoc rd) { public int doHashCode() { return 0; } + + @Override + public String getWriteableName() { + return "test_rank_doc"; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index 6ef8d3d8a6a1b..271df2a971fb1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -467,6 +467,13 @@ private static class ServiceHolder implements Closeable { IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings); scriptService = new MockScriptService(Settings.EMPTY, scriptModule.engines, scriptModule.contexts); similarityService = new SimilarityService(idxSettings, null, Collections.emptyMap()); + this.bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) {} + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) {} + }); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); mapperService = new MapperService( clusterService, @@ -478,23 +485,12 @@ private static class ServiceHolder implements Closeable { () -> createShardContext(null), idxSettings.getMode().idFieldMapperWithoutFieldData(), ScriptCompiler.NONE, + bitsetFilterCache::getBitSetProducer, MapperMetrics.NOOP ); IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(nodeSettings, new IndexFieldDataCache.Listener() { }); indexFieldDataService = new IndexFieldDataService(idxSettings, indicesFieldDataCache, new NoneCircuitBreakerService()); - bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() { - @Override - public void onCache(ShardId shardId, Accountable accountable) { - - } - - @Override - public void onRemoval(ShardId shardId, Accountable accountable) { - - } - }); - if (registerType) { mapperService.merge( "_doc", diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 42cc719a904cd..6920083f2a1a6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -79,6 +79,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.PathUtilsForTesting; @@ -145,6 +146,7 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import java.lang.invoke.MethodHandles; import java.math.BigInteger; import java.net.InetAddress; import java.net.UnknownHostException; @@ -257,8 +259,9 @@ public static void resetPortCounter() { private static final SetOnce WARN_SECURE_RANDOM_FIPS_NOT_DETERMINISTIC = new SetOnce<>(); static { + Random random = initTestSeed(); TEST_WORKER_VM_ID = System.getProperty(TEST_WORKER_SYS_PROPERTY, DEFAULT_TEST_WORKER_ID); - setTestSysProps(); + setTestSysProps(random); // TODO: consolidate logging initialization for tests so it all occurs in logconfigurator LogConfigurator.loadLog4jPlugins(); LogConfigurator.configureESLogging(); @@ -359,8 +362,46 @@ public void append(LogEvent event) { JAVA_ZONE_IDS = ZoneId.getAvailableZoneIds().stream().filter(unsupportedZoneIdsPredicate.negate()).sorted().toList(); } + static Random initTestSeed() { + String inputSeed = System.getProperty("tests.seed"); + long seed; + if (inputSeed == null) { + // when running tests in intellij, we don't have a seed. Setup the seed early here, before getting to RandomizedRunner, + // so that we can use it in ESTestCase static init + seed = System.nanoTime(); + setTestSeed(Long.toHexString(seed)); + } else { + String[] seedParts = inputSeed.split("[\\:]"); + seed = Long.parseUnsignedLong(seedParts[0], 16); + } + + if (Booleans.parseBoolean(System.getProperty("tests.hackImmutableCollections", "false"))) { + forceImmutableCollectionsSeed(seed); + } + + return new Random(seed); + } + + @SuppressForbidden(reason = "set tests.seed for intellij") + static void setTestSeed(String seed) { + System.setProperty("tests.seed", seed); + } + + private static void forceImmutableCollectionsSeed(long seed) { + try { + MethodHandles.Lookup lookup = MethodHandles.lookup(); + Class collectionsClass = Class.forName("java.util.ImmutableCollections"); + var salt32l = lookup.findStaticVarHandle(collectionsClass, "SALT32L", long.class); + var reverse = lookup.findStaticVarHandle(collectionsClass, "REVERSE", boolean.class); + salt32l.set(seed & 0xFFFF_FFFFL); + reverse.set((seed & 1) == 0); + } catch (Exception e) { + throw new AssertionError(e); + } + } + @SuppressForbidden(reason = "force log4j and netty sysprops") - private static void setTestSysProps() { + private static void setTestSysProps(Random random) { System.setProperty("log4j.shutdownHookEnabled", "false"); System.setProperty("log4j2.disable.jmx", "true"); @@ -377,11 +418,7 @@ private static void setTestSysProps() { System.setProperty("es.set.netty.runtime.available.processors", "false"); // sometimes use the java.time date formatters - // we can't use randomBoolean here, the random context isn't set properly - // so read it directly from the test seed in an unfortunately hacky way - String testSeed = System.getProperty("tests.seed", "0"); - boolean firstBit = (Integer.parseInt(testSeed.substring(testSeed.length() - 1), 16) & 1) == 1; - if (firstBit) { + if (random.nextBoolean()) { System.setProperty("es.datetime.java_time_parsers", "true"); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index cba2b41d279bb..fa414cd8121d6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -44,6 +44,7 @@ import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.feature.RankFeatureResult; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -463,6 +464,16 @@ public float getMaxScore() { return queryResult.getMaxScore(); } + @Override + public void addRankFeatureResult() { + // this space intentionally left blank + } + + @Override + public RankFeatureResult rankFeatureResult() { + return null; + } + @Override public FetchSearchResult fetchResult() { return null; diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index a7f21bd206c62..bf9eba87ee809 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -687,6 +687,10 @@ public static Matcher hasScore(final float score) { return transformedMatch(SearchHit::getScore, equalTo(score)); } + public static Matcher hasRank(final int rank) { + return transformedMatch(SearchHit::getRank, equalTo(rank)); + } + public static T assertBooleanSubQuery(Query query, Class subqueryType, int i) { assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery q = (BooleanQuery) query; diff --git a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java index 599868ab7f1f9..68f1f74b23c0b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java @@ -39,6 +39,12 @@ public class MockTaskManager extends TaskManager { Property.NodeScope ); + public static final Setting SPY_TASK_MANAGER_SETTING = Setting.boolSetting( + "tests.spy.taskmanager.enabled", + false, + Property.NodeScope + ); + private final Collection listeners = new CopyOnWriteArrayList<>(); public MockTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 93c9f63fc5e63..51893e551ba88 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -80,6 +80,7 @@ import java.util.function.Supplier; import static org.junit.Assert.assertNotNull; +import static org.mockito.Mockito.spy; /** * A mock delegate service that allows to simulate different network topology failures. @@ -102,7 +103,7 @@ public class MockTransportService extends TransportService { public static class TestPlugin extends Plugin { @Override public List> getSettings() { - return List.of(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING); + return List.of(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING, MockTaskManager.SPY_TASK_MANAGER_SETTING); } } @@ -310,7 +311,15 @@ private static TransportAddress[] extractTransportAddresses(TransportService tra return transportAddresses.toArray(new TransportAddress[transportAddresses.size()]); } - private static TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { + public static TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { + if (MockTaskManager.SPY_TASK_MANAGER_SETTING.get(settings)) { + return spy(createMockTaskManager(settings, threadPool, taskHeaders, tracer)); + } else { + return createMockTaskManager(settings, threadPool, taskHeaders, tracer); + } + } + + private static TaskManager createMockTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) { return new MockTaskManager(settings, threadPool, taskHeaders); } else { diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 6ced86156d008..b6a8bc343687f 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -18,6 +18,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.UnsafePlainActionFuture; @@ -33,6 +34,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkUtils; +import org.elasticsearch.common.network.ThreadWatchdog; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -41,6 +43,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.IOUtils; @@ -226,6 +229,7 @@ private MockTransportService buildService( ) { Settings updatedSettings = Settings.builder() .put(TransportSettings.PORT.getKey(), getPortRange()) + .put(ThreadWatchdog.NETWORK_THREAD_WATCHDOG_INTERVAL.getKey(), TimeValue.ZERO) // suppress watchdog running concurrently .put(settings) .put(Node.NODE_NAME_SETTING.getKey(), name) .put(IGNORE_DESERIALIZATION_ERRORS_SETTING.getKey(), true) // suppress assertions to test production error-handling @@ -3348,6 +3352,61 @@ public void writeTo(StreamOutput out) throws IOException { } } + public void testWatchdogLogging() { + final var watchdog = networkService.getThreadWatchdog(); + final var deterministicTaskQueue = new DeterministicTaskQueue(); + watchdog.run(Settings.EMPTY, deterministicTaskQueue.getThreadPool(), new Lifecycle()); + + final var barrier = new CyclicBarrier(2); + final var threadNameFuture = new PlainActionFuture(); + final var actionName = "internal:action"; + serviceA.registerRequestHandler( + actionName, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + TransportRequest.Empty::new, + (request, channel, task) -> { + threadNameFuture.onResponse(Thread.currentThread().getName()); + safeAwait(barrier); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } + ); + + final var responseLatch = new CountDownLatch(1); + submitRequest( + serviceB, + nodeA, + actionName, + new TransportRequest.Empty(), + new ActionListenerResponseHandler( + ActionTestUtils.assertNoFailureListener(t -> responseLatch.countDown()), + in -> TransportResponse.Empty.INSTANCE, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ) + ); + + final var threadName = safeGet(threadNameFuture); + assertFalse(deterministicTaskQueue.hasRunnableTasks()); + deterministicTaskQueue.advanceTime(); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.UnseenEventExpectation("no logging", ThreadWatchdog.class.getCanonicalName(), Level.WARN, "*") + ); + deterministicTaskQueue.advanceTime(); + MockLog.assertThatLogger( + deterministicTaskQueue::runAllRunnableTasks, + ThreadWatchdog.class, + new MockLog.SeenEventExpectation( + "stuck threads logging", + ThreadWatchdog.class.getCanonicalName(), + Level.WARN, + "the following threads are active but did not make progress in the preceding [5s]: [" + threadName + "]" + ) + ); + safeAwait(barrier); + safeAwait(responseLatch); + } + private static long[] getConstantMessageSizeHistogram(int count, long size) { final var histogram = new long[29]; int bucket = 0; diff --git a/test/immutable-collections-patch/build.gradle b/test/immutable-collections-patch/build.gradle new file mode 100644 index 0000000000000..2d42215b3e02c --- /dev/null +++ b/test/immutable-collections-patch/build.gradle @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import org.elasticsearch.gradle.OS +import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.internal.info.BuildParams + +apply plugin: 'elasticsearch.java' + +configurations { + patch +} + +dependencies { + implementation 'org.ow2.asm:asm:9.7' + implementation 'org.ow2.asm:asm-tree:9.7' +} + +def outputDir = layout.buildDirectory.dir("jdk-patches") +def generatePatch = tasks.register("generatePatch", JavaExec) +generatePatch.configure { + dependsOn tasks.named("compileJava") + inputs.property("java-home-set", BuildParams.getIsRuntimeJavaHomeSet()) + inputs.property("java-version", BuildParams.runtimeJavaVersion) + outputs.dir(outputDir) + + classpath = sourceSets.main.runtimeClasspath + mainClass = 'org.elasticsearch.jdk.patch.ImmutableCollectionsPatcher' + if (BuildParams.getIsRuntimeJavaHomeSet()) { + executable = "${BuildParams.runtimeJavaHome}/bin/java" + (OS.current() == OS.WINDOWS ? '.exe' : '') + } else { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(BuildParams.runtimeJavaVersion.majorVersion) + vendor = VersionProperties.bundledJdkVendor == "openjdk" ? + JvmVendorSpec.ORACLE : + JvmVendorSpec.matching(VersionProperties.bundledJdkVendor) + } + } + doFirst { + args outputDir.get().getAsFile().toString() + } +} + +artifacts.add("patch", generatePatch); diff --git a/test/immutable-collections-patch/src/main/java/org/elasticsearch/jdk/patch/ImmutableCollectionsPatcher.java b/test/immutable-collections-patch/src/main/java/org/elasticsearch/jdk/patch/ImmutableCollectionsPatcher.java new file mode 100644 index 0000000000000..b98df1b3d2e17 --- /dev/null +++ b/test/immutable-collections-patch/src/main/java/org/elasticsearch/jdk/patch/ImmutableCollectionsPatcher.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.jdk.patch; + +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.FieldVisitor; +import org.objectweb.asm.Opcodes; + +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +/** + * Loads ImmutableCollections.class from the current jdk and writes it out + * as a public class with SALT32L and REVERSE as public, non-final static fields. + * + * By exposing ImmutableCollections, tests run with this patched version can + * hook in the existing test seed to ensure consistent iteration of immutable collections. + * Note that the consistency is for reproducing dependencies on iteration + * order, so that the code can be fixed. + */ +public class ImmutableCollectionsPatcher { + private static final String CLASSFILE = "java.base/java/util/ImmutableCollections.class"; + + public static void main(String[] args) throws Exception { + Path outputDir = Paths.get(args[0]); + byte[] originalClassFile = Files.readAllBytes(Paths.get(URI.create("jrt:/" + CLASSFILE))); + + ClassReader classReader = new ClassReader(originalClassFile); + ClassWriter classWriter = new ClassWriter(classReader, 0); + classReader.accept(new ClassVisitor(Opcodes.ASM9, classWriter) { + @Override + public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { + super.visit(version, Opcodes.ACC_PUBLIC, name, signature, superName, interfaces); + } + + @Override + public FieldVisitor visitField(int access, String name, String descriptor, String signature, Object value) { + if (name.equals("SALT32L") || name.equals("REVERSE")) { + access = Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC; + } + return super.visitField(access, name, descriptor, signature, value); + } + }, 0); + Path outputFile = outputDir.resolve(CLASSFILE); + Files.createDirectories(outputFile.getParent()); + Files.write(outputFile, classWriter.toByteArray()); + } +} diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java index c7736a54187f1..cdcc0e495582a 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java @@ -410,7 +410,7 @@ public void parse(DocumentParserContext context) throws IOException { + name() + "] of type [" + typeName() - + "] doesn't not support indexing multiple values for the same field in the same document" + + "] doesn't support indexing multiple values for the same field in the same document" ); } context.doc().addWithKey(fieldType().name(), field); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java index 2892ada15fec9..5e2bdaf2d465e 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java @@ -87,7 +87,7 @@ public void testParseArrayValue() throws Exception { }))); assertThat( e.getCause().getMessage(), - containsString("doesn't not support indexing multiple values for the same field in the same document") + containsString("doesn't support indexing multiple values for the same field in the same document") ); } diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java index 04b0b11ad38d4..c0305f873327d 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java @@ -397,6 +397,14 @@ protected void onQueryResult(int shardIndex, QuerySearchResult queryResult) { } } + @Override + protected void onRankFeatureResult(int shardIndex) { + checkCancellation(); + if (delegate != null) { + delegate.onRankFeatureResult(shardIndex); + } + } + @Override protected void onFetchResult(int shardIndex) { checkCancellation(); @@ -420,6 +428,12 @@ protected void onQueryFailure(int shardIndex, SearchShardTarget shardTarget, Exc ); } + @Override + protected void onRankFeatureFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) { + // best effort to cancel expired tasks + checkCancellation(); + } + @Override protected void onFetchFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) { // best effort to cancel expired tasks diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index c3cd60d88c6f2..ac22d22d5affb 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -335,7 +335,7 @@ public SharedBlobCacheService( String ioExecutor, BlobCacheMetrics blobCacheMetrics ) { - this(environment, settings, threadPool, ioExecutor, blobCacheMetrics, threadPool::relativeTimeInNanos); + this(environment, settings, threadPool, ioExecutor, blobCacheMetrics, System::nanoTime); } public SharedBlobCacheService( diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index dff3ff935595f..90bbc29a11b41 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -553,7 +553,7 @@ public void testCcrRepositoryFetchesSnapshotShardSizeFromIndexShardStoreStats() if (RestoreInProgress.get(event.state()).isEmpty() == false && event.state().routingTable().hasIndex(followerIndex)) { final IndexRoutingTable indexRoutingTable = event.state().routingTable().index(followerIndex); for (ShardRouting shardRouting : indexRoutingTable.shardsWithState(ShardRoutingState.UNASSIGNED)) { - if (shardRouting.unassignedInfo().getLastAllocationStatus() == AllocationStatus.FETCHING_SHARD_DATA) { + if (shardRouting.unassignedInfo().lastAllocationStatus() == AllocationStatus.FETCHING_SHARD_DATA) { try { assertBusy(() -> { final Long snapshotShardSize = snapshotsInfoService.snapshotShardSizes().getShardSize(shardRouting); @@ -644,7 +644,7 @@ public void testCcrRepositoryFailsToFetchSnapshotShardSizes() throws Exception { assertBusy(() -> { List sizes = indexRoutingTable.shardsWithState(ShardRoutingState.UNASSIGNED) .stream() - .filter(shard -> shard.unassignedInfo().getLastAllocationStatus() == AllocationStatus.FETCHING_SHARD_DATA) + .filter(shard -> shard.unassignedInfo().lastAllocationStatus() == AllocationStatus.FETCHING_SHARD_DATA) .sorted(Comparator.comparingInt(ShardRouting::getId)) .map(shard -> snapshotsInfoService.snapshotShardSizes().getShardSize(shard)) .filter(Objects::nonNull) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java index cdb5bf67b4712..e1d1d84a4eca5 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.UnsafePlainActionFuture; import org.elasticsearch.client.internal.RemoteClusterClient; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.core.TimeValue; @@ -18,6 +19,7 @@ import org.elasticsearch.index.seqno.RetentionLeaseAlreadyExistsException; import org.elasticsearch.index.seqno.RetentionLeaseNotFoundException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.threadpool.ThreadPool; import java.util.Locale; import java.util.Optional; @@ -79,7 +81,7 @@ public static Optional syncAddRetentionLea final TimeValue timeout ) { try { - final PlainActionFuture response = new PlainActionFuture<>(); + final PlainActionFuture response = new UnsafePlainActionFuture<>(ThreadPool.Names.GENERIC); asyncAddRetentionLease(leaderShardId, retentionLeaseId, retainingSequenceNumber, remoteClient, response); response.actionGet(timeout); return Optional.empty(); diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 116f864695e15..0c65c7e4b6d29 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -176,6 +176,12 @@ testClusters.configureEach { requiresFeature 'es.failure_store_feature_flag_enabled', Version.fromString("8.15.0") } +if (BuildParams.isSnapshotBuild() == false) { + tasks.withType(Test).configureEach { + systemProperty 'es.failure_store_feature_flag_enabled', 'true' + } +} + if (BuildParams.inFipsJvm) { // Test clusters run with security disabled tasks.named("javaRestTest").configure { enabled = false } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckNotDataStreamWriteIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckNotDataStreamWriteIndexStep.java index e716a18738bca..28b04bc9614bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckNotDataStreamWriteIndexStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckNotDataStreamWriteIndexStep.java @@ -62,14 +62,15 @@ public Result isConditionMet(Index index, ClusterState clusterState) { assert indexAbstraction != null : "invalid cluster metadata. index [" + indexName + "] was not found"; DataStream dataStream = indexAbstraction.getParentDataStream(); if (dataStream != null) { - assert dataStream.getWriteIndex() != null : dataStream.getName() + " has no write index"; - if (dataStream.getWriteIndex().equals(index)) { + boolean isFailureStoreWriteIndex = index.equals(dataStream.getFailureStoreWriteIndex()); + if (isFailureStoreWriteIndex || dataStream.getWriteIndex().equals(index)) { String errorMessage = String.format( Locale.ROOT, - "index [%s] is the write index for data stream [%s], pausing " + "index [%s] is the%s write index for data stream [%s], pausing " + "ILM execution of lifecycle [%s] until this index is no longer the write index for the data stream via manual or " + "automated rollover", indexName, + isFailureStoreWriteIndex ? " failure store" : "", dataStream.getName(), policyName ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java index ba6b6f9366c61..282f32da28a6b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; import java.util.Locale; @@ -40,13 +41,17 @@ public void performDuringNoSnapshot(IndexMetadata indexMetadata, ClusterState cu DataStream dataStream = indexAbstraction.getParentDataStream(); if (dataStream != null) { - assert dataStream.getWriteIndex() != null : dataStream.getName() + " has no write index"; + Index failureStoreWriteIndex = dataStream.getFailureStoreWriteIndex(); + boolean isFailureStoreWriteIndex = failureStoreWriteIndex != null && indexName.equals(failureStoreWriteIndex.getName()); // using index name equality across this if/else branch as the UUID of the index might change via restoring a data stream // with one index from snapshot - if (dataStream.getIndices().size() == 1 && dataStream.getWriteIndex().getName().equals(indexName)) { - // This is the last index in the data stream, the entire stream - // needs to be deleted, because we can't have an empty data stream + if (dataStream.getIndices().size() == 1 + && isFailureStoreWriteIndex == false + && dataStream.getWriteIndex().getName().equals(indexName)) { + // This is the last backing index in the data stream, and it's being deleted because the policy doesn't have a rollover + // phase. The entire stream needs to be deleted, because we can't have an empty list of data stream backing indices. + // We do this even if there are multiple failure store indices because otherwise we would never delete the index. DeleteDataStreamAction.Request deleteReq = new DeleteDataStreamAction.Request(new String[] { dataStream.getName() }); getClient().execute( DeleteDataStreamAction.INSTANCE, @@ -54,13 +59,14 @@ public void performDuringNoSnapshot(IndexMetadata indexMetadata, ClusterState cu listener.delegateFailureAndWrap((l, response) -> l.onResponse(null)) ); return; - } else if (dataStream.getWriteIndex().getName().equals(indexName)) { + } else if (isFailureStoreWriteIndex || dataStream.getWriteIndex().getName().equals(indexName)) { String errorMessage = String.format( Locale.ROOT, - "index [%s] is the write index for data stream [%s]. " + "index [%s] is the%s write index for data stream [%s]. " + "stopping execution of lifecycle [%s] as a data stream's write index cannot be deleted. manually rolling over the" + " index will resume the execution of the policy as the index will not be the data stream's write index anymore", indexName, + isFailureStoreWriteIndex ? " failure store" : "", dataStream.getName(), policyName ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java index 9de08c8693a12..3962768e94212 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java @@ -86,14 +86,15 @@ public ClusterState performAction(Index index, ClusterState clusterState) { throw new IllegalStateException(errorMessage); } - assert dataStream.getWriteIndex() != null : dataStream.getName() + " has no write index"; - if (dataStream.getWriteIndex().equals(index)) { + boolean isFailureStoreWriteIndex = index.equals(dataStream.getFailureStoreWriteIndex()); + if (isFailureStoreWriteIndex || dataStream.getWriteIndex().equals(index)) { String errorMessage = String.format( Locale.ROOT, - "index [%s] is the write index for data stream [%s], pausing " + "index [%s] is the%s write index for data stream [%s], pausing " + "ILM execution of lifecycle [%s] until this index is no longer the write index for the data stream via manual or " + "automated rollover", originalIndex, + isFailureStoreWriteIndex ? " failure store" : "", dataStream.getName(), policyName ); @@ -114,8 +115,10 @@ public ClusterState performAction(Index index, ClusterState clusterState) { throw new IllegalStateException(errorMessage); } - Metadata.Builder newMetaData = Metadata.builder(clusterState.getMetadata()) - .put(dataStream.replaceBackingIndex(index, targetIndexMetadata.getIndex())); + DataStream updatedDataStream = dataStream.isFailureStoreIndex(originalIndex) + ? dataStream.replaceFailureStoreIndex(index, targetIndexMetadata.getIndex()) + : dataStream.replaceBackingIndex(index, targetIndexMetadata.getIndex()); + Metadata.Builder newMetaData = Metadata.builder(clusterState.getMetadata()).put(updatedDataStream); return ClusterState.builder(clusterState).metadata(newMetaData).build(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java index 26300f646d617..3e6c00eeadba4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; @@ -57,13 +58,16 @@ public void performAction( IndexAbstraction indexAbstraction = currentClusterState.metadata().getIndicesLookup().get(indexName); assert indexAbstraction != null : "expected the index " + indexName + " to exist in the lookup but it didn't"; final String rolloverTarget; + final boolean targetFailureStore; DataStream dataStream = indexAbstraction.getParentDataStream(); if (dataStream != null) { - assert dataStream.getWriteIndex() != null : "datastream " + dataStream.getName() + " has no write index"; - if (dataStream.getWriteIndex().equals(indexMetadata.getIndex()) == false) { + boolean isFailureStoreWriteIndex = indexMetadata.getIndex().equals(dataStream.getFailureStoreWriteIndex()); + targetFailureStore = dataStream.isFailureStoreIndex(indexMetadata.getIndex().getName()); + if (isFailureStoreWriteIndex == false && dataStream.getWriteIndex().equals(indexMetadata.getIndex()) == false) { logger.warn( - "index [{}] is not the write index for data stream [{}]. skipping rollover for policy [{}]", + "index [{}] is not the {}write index for data stream [{}]. skipping rollover for policy [{}]", indexName, + targetFailureStore ? "failure store " : "", dataStream.getName(), indexMetadata.getLifecyclePolicyName() ); @@ -115,10 +119,18 @@ public void performAction( } rolloverTarget = rolloverAlias; + targetFailureStore = false; } // Calling rollover with no conditions will always roll over the index RolloverRequest rolloverRequest = new RolloverRequest(rolloverTarget, null).masterNodeTimeout(TimeValue.MAX_VALUE); + if (targetFailureStore) { + rolloverRequest.setIndicesOptions( + IndicesOptions.builder(rolloverRequest.indicesOptions()) + .failureStoreOptions(opts -> opts.includeFailureIndices(true).includeRegularIndices(false)) + .build() + ); + } // We don't wait for active shards when we perform the rollover because the // {@link org.elasticsearch.xpack.core.ilm.WaitForActiveShardsStep} step will do so rolloverRequest.setWaitForActiveShards(ActiveShardCount.NONE); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsStep.java index b6cf8b0bdd663..71c99d7f21848 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsStep.java @@ -30,7 +30,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.parseIndexNameCounter; /** - * After we performed the index rollover we wait for the the configured number of shards for the rolled over index (ie. newly created + * After we performed the index rollover we wait for the configured number of shards for the rolled over index (ie. newly created * index) to become available. */ public class WaitForActiveShardsStep extends ClusterStateWaitStep { @@ -84,10 +84,17 @@ public Result isConditionMet(Index index, ClusterState clusterState) { if (dataStream != null) { IndexAbstraction dataStreamAbstraction = metadata.getIndicesLookup().get(dataStream.getName()); assert dataStreamAbstraction != null : dataStream.getName() + " datastream is not present in the metadata indices lookup"; - if (dataStreamAbstraction.getWriteIndex() == null) { + // Determine which write index we care about right now: + final Index rolledIndex; + if (dataStream.isFailureStoreIndex(index.getName())) { + rolledIndex = dataStream.getFailureStoreWriteIndex(); + } else { + rolledIndex = dataStream.getWriteIndex(); + } + if (rolledIndex == null) { return getErrorResultOnNullMetadata(getKey(), index); } - IndexMetadata rolledIndexMeta = metadata.index(dataStreamAbstraction.getWriteIndex()); + IndexMetadata rolledIndexMeta = metadata.index(rolledIndex); rolledIndexName = rolledIndexMeta.getIndex().getName(); waitForActiveShardsSettingValue = rolledIndexMeta.getSettings().get(IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS.getKey()); } else { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java index acb36bd015e4b..7b751994222b1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.rollover.RolloverConditions; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -83,13 +84,16 @@ public void evaluateCondition(Metadata metadata, Index index, Listener listener, IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(index.getName()); assert indexAbstraction != null : "invalid cluster metadata. index [" + index.getName() + "] was not found"; final String rolloverTarget; + final boolean targetFailureStore; DataStream dataStream = indexAbstraction.getParentDataStream(); if (dataStream != null) { - assert dataStream.getWriteIndex() != null : "datastream " + dataStream.getName() + " has no write index"; - if (dataStream.getWriteIndex().equals(index) == false) { + targetFailureStore = dataStream.isFailureStoreIndex(index.getName()); + boolean isFailureStoreWriteIndex = index.equals(dataStream.getFailureStoreWriteIndex()); + if (isFailureStoreWriteIndex == false && dataStream.getWriteIndex().equals(index) == false) { logger.warn( - "index [{}] is not the write index for data stream [{}]. skipping rollover for policy [{}]", + "index [{}] is not the {}write index for data stream [{}]. skipping rollover for policy [{}]", index.getName(), + targetFailureStore ? "failure store " : "", dataStream.getName(), metadata.index(index).getLifecyclePolicyName() ); @@ -194,12 +198,18 @@ public void evaluateCondition(Metadata metadata, Index index, Listener listener, } rolloverTarget = rolloverAlias; + targetFailureStore = false; } // if we should only rollover if not empty, *and* if neither an explicit min_docs nor an explicit min_primary_shard_docs // has been specified on this policy, then inject a default min_docs: 1 condition so that we do not rollover empty indices boolean rolloverOnlyIfHasDocuments = LifecycleSettings.LIFECYCLE_ROLLOVER_ONLY_IF_HAS_DOCUMENTS_SETTING.get(metadata.settings()); - RolloverRequest rolloverRequest = createRolloverRequest(rolloverTarget, masterTimeout, rolloverOnlyIfHasDocuments); + RolloverRequest rolloverRequest = createRolloverRequest( + rolloverTarget, + masterTimeout, + rolloverOnlyIfHasDocuments, + targetFailureStore + ); getClient().admin().indices().rolloverIndex(rolloverRequest, ActionListener.wrap(response -> { final var conditionStatus = response.getConditionStatus(); @@ -226,10 +236,22 @@ public void evaluateCondition(Metadata metadata, Index index, Listener listener, * @return A RolloverRequest suitable for passing to {@code rolloverIndex(...) }. */ // visible for testing - RolloverRequest createRolloverRequest(String rolloverTarget, TimeValue masterTimeout, boolean rolloverOnlyIfHasDocuments) { + RolloverRequest createRolloverRequest( + String rolloverTarget, + TimeValue masterTimeout, + boolean rolloverOnlyIfHasDocuments, + boolean targetFailureStore + ) { RolloverRequest rolloverRequest = new RolloverRequest(rolloverTarget, null).masterNodeTimeout(masterTimeout); rolloverRequest.dryRun(true); rolloverRequest.setConditions(applyDefaultConditions(conditions, rolloverOnlyIfHasDocuments)); + if (targetFailureStore) { + rolloverRequest.setIndicesOptions( + IndicesOptions.builder(rolloverRequest.indicesOptions()) + .failureStoreOptions(opts -> opts.includeFailureIndices(true).includeRegularIndices(false)) + .build() + ); + } return rolloverRequest; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java index be4c87195e9d1..19542ef466156 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java @@ -20,7 +20,7 @@ import java.util.Objects; import java.util.Set; -public class DeleteInferenceEndpointAction extends ActionType { +public class DeleteInferenceEndpointAction extends ActionType { public static final DeleteInferenceEndpointAction INSTANCE = new DeleteInferenceEndpointAction(); public static final String NAME = "cluster:admin/xpack/inference/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedSparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedSparseEmbeddingResults.java deleted file mode 100644 index c91d0dc6fd538..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedSparseEmbeddingResults.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.inference.results; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.InferenceResults; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; -import org.elasticsearch.xpack.core.ml.search.WeightedToken; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; - -public class ChunkedSparseEmbeddingResults implements ChunkedInferenceServiceResults { - - public static final String NAME = "chunked_sparse_embedding_results"; - public static final String FIELD_NAME = "sparse_embedding_chunk"; - - public static ChunkedSparseEmbeddingResults ofMlResult(ChunkedTextExpansionResults mlInferenceResults) { - return new ChunkedSparseEmbeddingResults(mlInferenceResults.getChunks()); - } - - /** - * Returns a list of {@link ChunkedSparseEmbeddingResults}. The number of entries in the list will match the input list size. - * Each {@link ChunkedSparseEmbeddingResults} will have a single chunk containing the entire results from the - * {@link SparseEmbeddingResults}. - */ - public static List of(List inputs, SparseEmbeddingResults sparseEmbeddingResults) { - validateInputSizeAgainstEmbeddings(inputs, sparseEmbeddingResults.embeddings().size()); - - var results = new ArrayList(inputs.size()); - for (int i = 0; i < inputs.size(); i++) { - results.add(of(inputs.get(i), sparseEmbeddingResults.embeddings().get(i))); - } - - return results; - } - - public static ChunkedSparseEmbeddingResults of(String input, SparseEmbeddingResults.Embedding embedding) { - var weightedTokens = embedding.tokens() - .stream() - .map(weightedToken -> new WeightedToken(weightedToken.token(), weightedToken.weight())) - .toList(); - - return new ChunkedSparseEmbeddingResults(List.of(new ChunkedTextExpansionResults.ChunkedResult(input, weightedTokens))); - } - - private final List chunkedResults; - - public ChunkedSparseEmbeddingResults(List chunks) { - this.chunkedResults = chunks; - } - - public ChunkedSparseEmbeddingResults(StreamInput in) throws IOException { - this.chunkedResults = in.readCollectionAsList(ChunkedTextExpansionResults.ChunkedResult::new); - } - - public List getChunkedResults() { - return chunkedResults; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startArray(FIELD_NAME); - for (ChunkedTextExpansionResults.ChunkedResult chunk : chunkedResults) { - chunk.toXContent(builder, params); - } - builder.endArray(); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeCollection(chunkedResults); - } - - @Override - public List transformToCoordinationFormat() { - throw new UnsupportedOperationException("Chunked results are not returned in the coordindated action"); - } - - @Override - public List transformToLegacyFormat() { - throw new UnsupportedOperationException("Chunked results are not returned in the legacy format"); - } - - @Override - public Map asMap() { - return Map.of( - FIELD_NAME, - chunkedResults.stream().map(ChunkedTextExpansionResults.ChunkedResult::asMap).collect(Collectors.toList()) - ); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ChunkedSparseEmbeddingResults that = (ChunkedSparseEmbeddingResults) o; - return Objects.equals(chunkedResults, that.chunkedResults); - } - - @Override - public int hashCode() { - return Objects.hash(chunkedResults); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingResults.java deleted file mode 100644 index f09eafc1591dd..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingResults.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.inference.results; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.InferenceResults; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.IntStream; - -import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; - -public class ChunkedTextEmbeddingResults implements ChunkedInferenceServiceResults { - - public static final String NAME = "chunked_text_embedding_service_results"; - - public static final String FIELD_NAME = "text_embedding_chunk"; - - public static ChunkedTextEmbeddingResults ofMlResult( - org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults mlInferenceResults - ) { - return new ChunkedTextEmbeddingResults(mlInferenceResults.getChunks()); - } - - /** - * Returns a list of {@link ChunkedTextEmbeddingResults}. The number of entries in the list will match the input list size. - * Each {@link ChunkedTextEmbeddingResults} will have a single chunk containing the entire results from the - * {@link TextEmbeddingResults}. - */ - public static List of(List inputs, TextEmbeddingResults textEmbeddings) { - validateInputSizeAgainstEmbeddings(inputs, textEmbeddings.embeddings().size()); - - var results = new ArrayList(inputs.size()); - for (int i = 0; i < inputs.size(); i++) { - results.add(ChunkedTextEmbeddingResults.of(inputs.get(i), textEmbeddings.embeddings().get(i).values())); - } - - return results; - } - - public static ChunkedTextEmbeddingResults of(String input, float[] floatEmbeddings) { - double[] doubleEmbeddings = IntStream.range(0, floatEmbeddings.length).mapToDouble(i -> floatEmbeddings[i]).toArray(); - - return new ChunkedTextEmbeddingResults( - List.of( - new org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk(input, doubleEmbeddings) - ) - ); - } - - private static double floatToDouble(Float aFloat) { - return aFloat != null ? aFloat : 0; - } - - private final List chunks; - - public ChunkedTextEmbeddingResults( - List chunks - ) { - this.chunks = chunks; - } - - public ChunkedTextEmbeddingResults(StreamInput in) throws IOException { - this.chunks = in.readCollectionAsList( - org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk::new - ); - } - - public List getChunks() { - return chunks; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - // TODO add isTruncated flag - builder.startArray(FIELD_NAME); - for (var embedding : chunks) { - embedding.toXContent(builder, params); - } - builder.endArray(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeCollection(chunks); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public List transformToCoordinationFormat() { - throw new UnsupportedOperationException("Chunked results are not returned in the coordinated action"); - } - - @Override - public List transformToLegacyFormat() { - throw new UnsupportedOperationException("Chunked results are not returned in the legacy format"); - } - - @Override - public Map asMap() { - return Map.of(FIELD_NAME, chunks); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ChunkedTextEmbeddingResults that = (ChunkedTextEmbeddingResults) o; - return Objects.equals(chunks, that.chunks); - } - - @Override - public int hashCode() { - return Objects.hash(chunks); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java index eef864f2e8992..376b8763a5eb9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java @@ -8,17 +8,21 @@ package org.elasticsearch.xpack.core.inference.results; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Stream; public class ErrorChunkedInferenceResults implements ChunkedInferenceServiceResults { @@ -94,4 +98,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public String getWriteableName() { return NAME; } + + @Override + public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) { + return Stream.of(exception).map(e -> new Chunk(e.getMessage(), BytesArray.EMPTY)).iterator(); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java new file mode 100644 index 0000000000000..2093b687a2ab9 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedSparseEmbeddingResults.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.search.WeightedToken; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; + +public class InferenceChunkedSparseEmbeddingResults implements ChunkedInferenceServiceResults { + + public static final String NAME = "chunked_sparse_embedding_results"; + public static final String FIELD_NAME = "sparse_embedding_chunk"; + + public static InferenceChunkedSparseEmbeddingResults ofMlResult(InferenceChunkedTextExpansionResults mlInferenceResults) { + return new InferenceChunkedSparseEmbeddingResults(mlInferenceResults.getChunks()); + } + + /** + * Returns a list of {@link InferenceChunkedSparseEmbeddingResults}. The number of entries in the list will match the input list size. + * Each {@link InferenceChunkedSparseEmbeddingResults} will have a single chunk containing the entire results from the + * {@link SparseEmbeddingResults}. + */ + public static List listOf(List inputs, SparseEmbeddingResults sparseEmbeddingResults) { + validateInputSizeAgainstEmbeddings(inputs, sparseEmbeddingResults.embeddings().size()); + + var results = new ArrayList(inputs.size()); + for (int i = 0; i < inputs.size(); i++) { + results.add(ofSingle(inputs.get(i), sparseEmbeddingResults.embeddings().get(i))); + } + + return results; + } + + private static InferenceChunkedSparseEmbeddingResults ofSingle(String input, SparseEmbeddingResults.Embedding embedding) { + var weightedTokens = embedding.tokens() + .stream() + .map(weightedToken -> new WeightedToken(weightedToken.token(), weightedToken.weight())) + .toList(); + + return new InferenceChunkedSparseEmbeddingResults( + List.of(new InferenceChunkedTextExpansionResults.ChunkedResult(input, weightedTokens)) + ); + } + + private final List chunkedResults; + + public InferenceChunkedSparseEmbeddingResults(List chunks) { + this.chunkedResults = chunks; + } + + public InferenceChunkedSparseEmbeddingResults(StreamInput in) throws IOException { + this.chunkedResults = in.readCollectionAsList(InferenceChunkedTextExpansionResults.ChunkedResult::new); + } + + public List getChunkedResults() { + return chunkedResults; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray(FIELD_NAME); + for (InferenceChunkedTextExpansionResults.ChunkedResult chunk : chunkedResults) { + chunk.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(chunkedResults); + } + + @Override + public List transformToCoordinationFormat() { + throw new UnsupportedOperationException("Chunked results are not returned in the coordindated action"); + } + + @Override + public List transformToLegacyFormat() { + throw new UnsupportedOperationException("Chunked results are not returned in the legacy format"); + } + + @Override + public Map asMap() { + return Map.of( + FIELD_NAME, + chunkedResults.stream().map(InferenceChunkedTextExpansionResults.ChunkedResult::asMap).collect(Collectors.toList()) + ); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InferenceChunkedSparseEmbeddingResults that = (InferenceChunkedSparseEmbeddingResults) o; + return Objects.equals(chunkedResults, that.chunkedResults); + } + + @Override + public int hashCode() { + return Objects.hash(chunkedResults); + } + + @Override + public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) { + return chunkedResults.stream() + .map(chunk -> new Chunk(chunk.matchedText(), toBytesReference(xcontent, chunk.weightedTokens()))) + .iterator(); + } + + /** + * Serialises the {@link WeightedToken} list, according to the provided {@link XContent}, + * into a {@link BytesReference}. + */ + private static BytesReference toBytesReference(XContent xContent, List tokens) { + try { + XContentBuilder b = XContentBuilder.builder(xContent); + b.startObject(); + for (var weightedToken : tokens) { + weightedToken.toXContent(b, ToXContent.EMPTY_PARAMS); + } + b.endObject(); + return BytesReference.bytes(b); + } catch (IOException exc) { + throw new RuntimeException(exc); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java similarity index 65% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingByteResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java index 86ea70ddd62dd..a2bc072064ea1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingByteResults.java @@ -8,51 +8,56 @@ package org.elasticsearch.xpack.core.inference.results; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; -public record ChunkedTextEmbeddingByteResults(List chunks, boolean isTruncated) implements ChunkedInferenceServiceResults { +public record InferenceChunkedTextEmbeddingByteResults(List chunks, boolean isTruncated) + implements + ChunkedInferenceServiceResults { public static final String NAME = "chunked_text_embedding_service_byte_results"; public static final String FIELD_NAME = "text_embedding_byte_chunk"; /** - * Returns a list of {@link ChunkedTextEmbeddingByteResults}. The number of entries in the list will match the input list size. - * Each {@link ChunkedTextEmbeddingByteResults} will have a single chunk containing the entire results from the - * {@link TextEmbeddingByteResults}. + * Returns a list of {@link InferenceChunkedTextEmbeddingByteResults}. The number of entries in the list will match the input list size. + * Each {@link InferenceChunkedTextEmbeddingByteResults} will have a single chunk containing the entire results from the + * {@link InferenceTextEmbeddingByteResults}. */ - public static List of(List inputs, TextEmbeddingByteResults textEmbeddings) { + public static List listOf(List inputs, InferenceTextEmbeddingByteResults textEmbeddings) { validateInputSizeAgainstEmbeddings(inputs, textEmbeddings.embeddings().size()); var results = new ArrayList(inputs.size()); for (int i = 0; i < inputs.size(); i++) { - results.add(of(inputs.get(i), textEmbeddings.embeddings().get(i).values())); + results.add(ofSingle(inputs.get(i), textEmbeddings.embeddings().get(i).values())); } return results; } - public static ChunkedTextEmbeddingByteResults of(String input, byte[] byteEmbeddings) { - return new ChunkedTextEmbeddingByteResults(List.of(new EmbeddingChunk(input, byteEmbeddings)), false); + private static InferenceChunkedTextEmbeddingByteResults ofSingle(String input, byte[] byteEmbeddings) { + return new InferenceChunkedTextEmbeddingByteResults(List.of(new InferenceByteEmbeddingChunk(input, byteEmbeddings)), false); } - public ChunkedTextEmbeddingByteResults(StreamInput in) throws IOException { - this(in.readCollectionAsList(EmbeddingChunk::new), in.readBoolean()); + public InferenceChunkedTextEmbeddingByteResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(InferenceByteEmbeddingChunk::new), in.readBoolean()); } @Override @@ -92,7 +97,7 @@ public String getWriteableName() { return NAME; } - public List getChunks() { + public List getChunks() { return chunks; } @@ -100,7 +105,7 @@ public List getChunks() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ChunkedTextEmbeddingByteResults that = (ChunkedTextEmbeddingByteResults) o; + InferenceChunkedTextEmbeddingByteResults that = (InferenceChunkedTextEmbeddingByteResults) o; return isTruncated == that.isTruncated && Objects.equals(chunks, that.chunks); } @@ -109,9 +114,9 @@ public int hashCode() { return Objects.hash(chunks, isTruncated); } - public record EmbeddingChunk(String matchedText, byte[] embedding) implements Writeable, ToXContentObject { + public record InferenceByteEmbeddingChunk(String matchedText, byte[] embedding) implements Writeable, ToXContentObject { - public EmbeddingChunk(StreamInput in) throws IOException { + public InferenceByteEmbeddingChunk(StreamInput in) throws IOException { this(in.readString(), in.readByteArray()); } @@ -145,7 +150,7 @@ public String toString() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - EmbeddingChunk that = (EmbeddingChunk) o; + InferenceByteEmbeddingChunk that = (InferenceByteEmbeddingChunk) o; return Objects.equals(matchedText, that.matchedText) && Arrays.equals(embedding, that.embedding); } @@ -156,4 +161,22 @@ public int hashCode() { return result; } } + + public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) { + return chunks.stream().map(chunk -> new Chunk(chunk.matchedText(), toBytesReference(xcontent, chunk.embedding()))).iterator(); + } + + private static BytesReference toBytesReference(XContent xContent, byte[] value) { + try { + XContentBuilder b = XContentBuilder.builder(xContent); + b.startArray(); + for (byte v : value) { + b.value(v); + } + b.endArray(); + return BytesReference.bytes(b); + } catch (IOException exc) { + throw new RuntimeException(exc); + } + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingFloatResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java similarity index 52% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingFloatResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java index 4fcd5a53fc287..9b625f9b1712a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingFloatResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResults.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.inference.results; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -15,22 +16,61 @@ import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.utils.FloatConversionUtils; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -public record ChunkedTextEmbeddingFloatResults(List chunks) implements ChunkedInferenceServiceResults { +import static org.elasticsearch.xpack.core.inference.results.TextEmbeddingUtils.validateInputSizeAgainstEmbeddings; + +public record InferenceChunkedTextEmbeddingFloatResults(List chunks) + implements + ChunkedInferenceServiceResults { public static final String NAME = "chunked_text_embedding_service_float_results"; public static final String FIELD_NAME = "text_embedding_float_chunk"; - public ChunkedTextEmbeddingFloatResults(StreamInput in) throws IOException { - this(in.readCollectionAsList(EmbeddingChunk::new)); + public InferenceChunkedTextEmbeddingFloatResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(InferenceFloatEmbeddingChunk::new)); + } + + /** + * Returns a list of {@link InferenceChunkedTextEmbeddingFloatResults}. + * Each {@link InferenceChunkedTextEmbeddingFloatResults} contain a single chunk with the text and the + * {@link InferenceTextEmbeddingFloatResults}. + */ + public static List listOf(List inputs, InferenceTextEmbeddingFloatResults textEmbeddings) { + validateInputSizeAgainstEmbeddings(inputs, textEmbeddings.embeddings().size()); + + var results = new ArrayList(inputs.size()); + + for (int i = 0; i < inputs.size(); i++) { + results.add( + new InferenceChunkedTextEmbeddingFloatResults( + List.of(new InferenceFloatEmbeddingChunk(inputs.get(i), textEmbeddings.embeddings().get(i).values())) + ) + ); + } + + return results; + } + + public static InferenceChunkedTextEmbeddingFloatResults ofMlResults(MlChunkedTextEmbeddingFloatResults mlInferenceResult) { + return new InferenceChunkedTextEmbeddingFloatResults( + mlInferenceResult.getChunks() + .stream() + .map(chunk -> new InferenceFloatEmbeddingChunk(chunk.matchedText(), FloatConversionUtils.floatArrayOf(chunk.embedding()))) + .toList() + ); } @Override @@ -69,7 +109,7 @@ public String getWriteableName() { return NAME; } - public List getChunks() { + public List getChunks() { return chunks; } @@ -77,7 +117,7 @@ public List getChunks() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ChunkedTextEmbeddingFloatResults that = (ChunkedTextEmbeddingFloatResults) o; + InferenceChunkedTextEmbeddingFloatResults that = (InferenceChunkedTextEmbeddingFloatResults) o; return Objects.equals(chunks, that.chunks); } @@ -86,12 +126,16 @@ public int hashCode() { return Objects.hash(chunks); } - public record EmbeddingChunk(String matchedText, float[] embedding) implements Writeable, ToXContentObject { + public record InferenceFloatEmbeddingChunk(String matchedText, float[] embedding) implements Writeable, ToXContentObject { - public EmbeddingChunk(StreamInput in) throws IOException { + public InferenceFloatEmbeddingChunk(StreamInput in) throws IOException { this(in.readString(), in.readFloatArray()); } + public static InferenceFloatEmbeddingChunk of(String matchedText, double[] doubleEmbedding) { + return new InferenceFloatEmbeddingChunk(matchedText, FloatConversionUtils.floatArrayOf(doubleEmbedding)); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(matchedText); @@ -122,7 +166,7 @@ public String toString() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - EmbeddingChunk that = (EmbeddingChunk) o; + InferenceFloatEmbeddingChunk that = (InferenceFloatEmbeddingChunk) o; return Objects.equals(matchedText, that.matchedText) && Arrays.equals(embedding, that.embedding); } @@ -134,4 +178,24 @@ public int hashCode() { } } + public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) { + return chunks.stream().map(chunk -> new Chunk(chunk.matchedText(), toBytesReference(xcontent, chunk.embedding()))).iterator(); + } + + /** + * Serialises the {@code value} array, according to the provided {@link XContent}, into a {@link BytesReference}. + */ + private static BytesReference toBytesReference(XContent xContent, float[] value) { + try { + XContentBuilder b = XContentBuilder.builder(xContent); + b.startArray(); + for (float v : value) { + b.value(v); + } + b.endArray(); + return BytesReference.bytes(b); + } catch (IOException exc) { + throw new RuntimeException(exc); + } + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java similarity index 82% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java index 04986b2d957d7..8d94083bf3241 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingByteResults.java @@ -17,6 +17,7 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; @@ -43,12 +44,12 @@ * ] * } */ -public record TextEmbeddingByteResults(List embeddings) implements InferenceServiceResults, TextEmbedding { +public record InferenceTextEmbeddingByteResults(List embeddings) implements InferenceServiceResults, TextEmbedding { public static final String NAME = "text_embedding_service_byte_results"; public static final String TEXT_EMBEDDING_BYTES = "text_embedding_bytes"; - public TextEmbeddingByteResults(StreamInput in) throws IOException { - this(in.readCollectionAsList(Embedding::new)); + public InferenceTextEmbeddingByteResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(InferenceByteEmbedding::new)); } @Override @@ -59,7 +60,7 @@ public int getFirstEmbeddingSize() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startArray(TEXT_EMBEDDING_BYTES); - for (Embedding embedding : embeddings) { + for (InferenceByteEmbedding embedding : embeddings) { embedding.toXContent(builder, params); } builder.endArray(); @@ -79,13 +80,7 @@ public String getWriteableName() { @Override public List transformToCoordinationFormat() { return embeddings.stream() - .map( - embedding -> new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( - TEXT_EMBEDDING_BYTES, - embedding.toDoubleArray(), - false - ) - ) + .map(embedding -> new MlTextEmbeddingResults(TEXT_EMBEDDING_BYTES, embedding.toDoubleArray(), false)) .toList(); } @@ -110,7 +105,7 @@ public Map asMap() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - TextEmbeddingByteResults that = (TextEmbeddingByteResults) o; + InferenceTextEmbeddingByteResults that = (InferenceTextEmbeddingByteResults) o; return Objects.equals(embeddings, that.embeddings); } @@ -119,10 +114,10 @@ public int hashCode() { return Objects.hash(embeddings); } - public record Embedding(byte[] values) implements Writeable, ToXContentObject, EmbeddingInt { + public record InferenceByteEmbedding(byte[] values) implements Writeable, ToXContentObject, EmbeddingInt { public static final String EMBEDDING = "embedding"; - public Embedding(StreamInput in) throws IOException { + public InferenceByteEmbedding(StreamInput in) throws IOException { this(in.readByteArray()); } @@ -131,12 +126,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeByteArray(values); } - public static Embedding of(List embeddingValuesList) { + public static InferenceByteEmbedding of(List embeddingValuesList) { byte[] embeddingValues = new byte[embeddingValuesList.size()]; for (int i = 0; i < embeddingValuesList.size(); i++) { embeddingValues[i] = embeddingValuesList.get(i); } - return new Embedding(embeddingValues); + return new InferenceByteEmbedding(embeddingValues); } @Override @@ -183,7 +178,7 @@ public int getSize() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - Embedding embedding = (Embedding) o; + InferenceByteEmbedding embedding = (InferenceByteEmbedding) o; return Arrays.equals(values, embedding.values); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java similarity index 76% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java index 152e10e82d5ba..1822e3af28c2d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/InferenceTextEmbeddingFloatResults.java @@ -20,6 +20,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; @@ -47,29 +48,32 @@ * ] * } */ -public record TextEmbeddingResults(List embeddings) implements InferenceServiceResults, TextEmbedding { +public record InferenceTextEmbeddingFloatResults(List embeddings) + implements + InferenceServiceResults, + TextEmbedding { public static final String NAME = "text_embedding_service_results"; public static final String TEXT_EMBEDDING = TaskType.TEXT_EMBEDDING.toString(); - public TextEmbeddingResults(StreamInput in) throws IOException { - this(in.readCollectionAsList(Embedding::new)); + public InferenceTextEmbeddingFloatResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(InferenceFloatEmbedding::new)); } @SuppressWarnings("deprecation") - TextEmbeddingResults(LegacyTextEmbeddingResults legacyTextEmbeddingResults) { + InferenceTextEmbeddingFloatResults(LegacyTextEmbeddingResults legacyTextEmbeddingResults) { this( legacyTextEmbeddingResults.embeddings() .stream() - .map(embedding -> new Embedding(embedding.values())) + .map(embedding -> new InferenceFloatEmbedding(embedding.values())) .collect(Collectors.toList()) ); } - public static TextEmbeddingResults of(List results) { - List embeddings = new ArrayList<>(results.size()); + public static InferenceTextEmbeddingFloatResults of(List results) { + List embeddings = new ArrayList<>(results.size()); for (InferenceResults result : results) { - if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults embeddingResult) { - embeddings.add(Embedding.of(embeddingResult)); + if (result instanceof MlTextEmbeddingResults embeddingResult) { + embeddings.add(InferenceFloatEmbedding.of(embeddingResult)); } else if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults errorResult) { if (errorResult.getException() instanceof ElasticsearchStatusException statusException) { throw statusException; @@ -86,7 +90,7 @@ public static TextEmbeddingResults of(List results) ); } } - return new TextEmbeddingResults(embeddings); + return new InferenceTextEmbeddingFloatResults(embeddings); } @Override @@ -97,7 +101,7 @@ public int getFirstEmbeddingSize() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startArray(TEXT_EMBEDDING); - for (Embedding embedding : embeddings) { + for (InferenceFloatEmbedding embedding : embeddings) { embedding.toXContent(builder, params); } builder.endArray(); @@ -116,15 +120,7 @@ public String getWriteableName() { @Override public List transformToCoordinationFormat() { - return embeddings.stream() - .map( - embedding -> new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( - TEXT_EMBEDDING, - embedding.asDoubleArray(), - false - ) - ) - .toList(); + return embeddings.stream().map(embedding -> new MlTextEmbeddingResults(TEXT_EMBEDDING, embedding.asDoubleArray(), false)).toList(); } @Override @@ -148,7 +144,7 @@ public Map asMap() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - TextEmbeddingResults that = (TextEmbeddingResults) o; + InferenceTextEmbeddingFloatResults that = (InferenceTextEmbeddingFloatResults) o; return Objects.equals(embeddings, that.embeddings); } @@ -157,24 +153,24 @@ public int hashCode() { return Objects.hash(embeddings); } - public record Embedding(float[] values) implements Writeable, ToXContentObject, EmbeddingInt { + public record InferenceFloatEmbedding(float[] values) implements Writeable, ToXContentObject, EmbeddingInt { public static final String EMBEDDING = "embedding"; - public Embedding(StreamInput in) throws IOException { + public InferenceFloatEmbedding(StreamInput in) throws IOException { this(in.readFloatArray()); } - public static Embedding of(org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults embeddingResult) { + public static InferenceFloatEmbedding of(MlTextEmbeddingResults embeddingResult) { float[] embeddingAsArray = embeddingResult.getInferenceAsFloat(); - return new Embedding(embeddingAsArray); + return new InferenceFloatEmbedding(embeddingAsArray); } - public static Embedding of(List embeddingValuesList) { + public static InferenceFloatEmbedding of(List embeddingValuesList) { float[] embeddingValues = new float[embeddingValuesList.size()]; for (int i = 0; i < embeddingValuesList.size(); i++) { embeddingValues[i] = embeddingValuesList.get(i); } - return new Embedding(embeddingValues); + return new InferenceFloatEmbedding(embeddingValues); } @Override @@ -218,7 +214,7 @@ private double[] asDoubleArray() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - Embedding embedding = (Embedding) o; + InferenceFloatEmbedding embedding = (InferenceFloatEmbedding) o; return Arrays.equals(values, embedding.values); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java index 87a56da93ec17..84a0928cae0d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java @@ -44,7 +44,7 @@ * * Legacy text embedding results represents what was returned prior to the * {@link org.elasticsearch.TransportVersions#V_8_12_0} version. - * @deprecated use {@link TextEmbeddingResults} instead + * @deprecated use {@link InferenceTextEmbeddingFloatResults} instead */ @Deprecated public record LegacyTextEmbeddingResults(List embeddings) implements InferenceResults { @@ -114,8 +114,8 @@ public int hashCode() { return Objects.hash(embeddings); } - public TextEmbeddingResults transformToTextEmbeddingResults() { - return new TextEmbeddingResults(this); + public InferenceTextEmbeddingFloatResults transformToTextEmbeddingResults() { + return new InferenceTextEmbeddingFloatResults(this); } public record Embedding(float[] values) implements Writeable, ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ResultUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ResultUtils.java index c865d23ef8e28..4fe2c9ae486f1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ResultUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ResultUtils.java @@ -12,11 +12,11 @@ public class ResultUtils { - public static ElasticsearchStatusException createInvalidChunkedResultException(String receivedResultName) { + public static ElasticsearchStatusException createInvalidChunkedResultException(String expectedResultName, String receivedResultName) { return new ElasticsearchStatusException( "Expected a chunked inference [{}] received [{}]", RestStatus.INTERNAL_SERVER_ERROR, - ChunkedTextEmbeddingResults.NAME, + expectedResultName, receivedResultName ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java index eb5f1d4f086d0..e6b580f62fdd3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferModelAction.java @@ -90,6 +90,7 @@ public static Builder parseRequest(String id, XContentParser parser) { private final List textInput; private boolean highPriority; private TrainedModelPrefixStrings.PrefixType prefixType = TrainedModelPrefixStrings.PrefixType.NONE; + private boolean chunked = false; /** * Build a request from a list of documents as maps. @@ -197,6 +198,11 @@ public Request(StreamInput in) throws IOException { } else { prefixType = TrainedModelPrefixStrings.PrefixType.NONE; } + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_CHUNK_INFERENCE_OPTION)) { + chunked = in.readBoolean(); + } else { + chunked = false; + } } public int numberOfDocuments() { @@ -247,6 +253,14 @@ public TrainedModelPrefixStrings.PrefixType getPrefixType() { return prefixType; } + public void setChunked(boolean chunked) { + this.chunked = chunked; + } + + public boolean isChunked() { + return chunked; + } + @Override public ActionRequestValidationException validate() { return null; @@ -271,6 +285,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeEnum(prefixType); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_CHUNK_INFERENCE_OPTION)) { + out.writeBoolean(chunked); + } } @Override @@ -285,7 +302,8 @@ public boolean equals(Object o) { && Objects.equals(objectsToInfer, that.objectsToInfer) && Objects.equals(textInput, that.textInput) && (highPriority == that.highPriority) - && (prefixType == that.prefixType); + && (prefixType == that.prefixType) + && (chunked == that.chunked); } @Override @@ -295,7 +313,17 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, @Override public int hashCode() { - return Objects.hash(id, objectsToInfer, update, previouslyLicensed, inferenceTimeout, textInput, highPriority, prefixType); + return Objects.hash( + id, + objectsToInfer, + update, + previouslyLicensed, + inferenceTimeout, + textInput, + highPriority, + prefixType, + chunked + ); } public static class Builder { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java index a3fb956c3252d..354e898a514d7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java @@ -20,17 +20,17 @@ import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor; import org.elasticsearch.xpack.core.ml.inference.preprocessing.StrictlyParsedPreProcessor; import org.elasticsearch.xpack.core.ml.inference.preprocessing.TargetMeanEncoding; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.ClassificationInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.FillMaskResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.NerResults; import org.elasticsearch.xpack.core.ml.inference.results.NlpClassificationInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.PyTorchPassThroughResults; import org.elasticsearch.xpack.core.ml.inference.results.QuestionAnsweringInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.RegressionInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextSimilarityInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; @@ -652,7 +652,9 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(InferenceResults.class, PyTorchPassThroughResults.NAME, PyTorchPassThroughResults::new) ); namedWriteables.add(new NamedWriteableRegistry.Entry(InferenceResults.class, TextExpansionResults.NAME, TextExpansionResults::new)); - namedWriteables.add(new NamedWriteableRegistry.Entry(InferenceResults.class, TextEmbeddingResults.NAME, TextEmbeddingResults::new)); + namedWriteables.add( + new NamedWriteableRegistry.Entry(InferenceResults.class, MlTextEmbeddingResults.NAME, MlTextEmbeddingResults::new) + ); namedWriteables.add( new NamedWriteableRegistry.Entry( InferenceResults.class, @@ -675,10 +677,18 @@ public List getNamedWriteables() { ) ); namedWriteables.add( - new NamedWriteableRegistry.Entry(InferenceResults.class, ChunkedTextEmbeddingResults.NAME, ChunkedTextEmbeddingResults::new) + new NamedWriteableRegistry.Entry( + InferenceResults.class, + MlChunkedTextEmbeddingFloatResults.NAME, + MlChunkedTextEmbeddingFloatResults::new + ) ); namedWriteables.add( - new NamedWriteableRegistry.Entry(InferenceResults.class, ChunkedTextExpansionResults.NAME, ChunkedTextExpansionResults::new) + new NamedWriteableRegistry.Entry( + InferenceResults.class, + InferenceChunkedTextExpansionResults.NAME, + InferenceChunkedTextExpansionResults::new + ) ); // Inference Configs diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResults.java similarity index 91% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResults.java index f2055e0930fda..3c719262fbfc6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResults.java @@ -21,7 +21,7 @@ import java.util.Objects; import java.util.stream.Collectors; -public class ChunkedTextExpansionResults extends ChunkedNlpInferenceResults { +public class InferenceChunkedTextExpansionResults extends ChunkedNlpInferenceResults { public static final String NAME = "chunked_text_expansion_result"; public record ChunkedResult(String matchedText, List weightedTokens) implements Writeable, ToXContentObject { @@ -60,13 +60,13 @@ public Map asMap() { private final String resultsField; private final List chunks; - public ChunkedTextExpansionResults(String resultField, List chunks, boolean isTruncated) { + public InferenceChunkedTextExpansionResults(String resultField, List chunks, boolean isTruncated) { super(isTruncated); this.resultsField = resultField; this.chunks = chunks; } - public ChunkedTextExpansionResults(StreamInput in) throws IOException { + public InferenceChunkedTextExpansionResults(StreamInput in) throws IOException { super(in); this.resultsField = in.readString(); this.chunks = in.readCollectionAsList(ChunkedResult::new); @@ -104,7 +104,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; - ChunkedTextExpansionResults that = (ChunkedTextExpansionResults) o; + InferenceChunkedTextExpansionResults that = (InferenceChunkedTextExpansionResults) o; return Objects.equals(resultsField, that.resultsField) && Objects.equals(chunks, that.chunks); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResults.java similarity index 91% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResults.java index e47554aebbadf..aabd87c1c2725 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResults.java @@ -21,7 +21,7 @@ import java.util.Objects; import java.util.stream.Collectors; -public class ChunkedTextEmbeddingResults extends ChunkedNlpInferenceResults { +public class MlChunkedTextEmbeddingFloatResults extends ChunkedNlpInferenceResults { public record EmbeddingChunk(String matchedText, double[] embedding) implements Writeable, ToXContentObject { @@ -77,13 +77,13 @@ public int hashCode() { private final String resultsField; private final List chunks; - public ChunkedTextEmbeddingResults(String resultsField, List embeddings, boolean isTruncated) { + public MlChunkedTextEmbeddingFloatResults(String resultsField, List embeddings, boolean isTruncated) { super(isTruncated); this.resultsField = resultsField; this.chunks = embeddings; } - public ChunkedTextEmbeddingResults(StreamInput in) throws IOException { + public MlChunkedTextEmbeddingFloatResults(StreamInput in) throws IOException { super(in); this.resultsField = in.readString(); this.chunks = in.readCollectionAsList(EmbeddingChunk::new); @@ -134,7 +134,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; - ChunkedTextEmbeddingResults that = (ChunkedTextEmbeddingResults) o; + MlChunkedTextEmbeddingFloatResults that = (MlChunkedTextEmbeddingFloatResults) o; return Objects.equals(resultsField, that.resultsField) && Objects.equals(chunks, that.chunks); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResults.java similarity index 89% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResults.java index 526c2ec7b7aaa..0c0fa6f3f690e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResults.java @@ -16,20 +16,20 @@ import java.util.Map; import java.util.Objects; -public class TextEmbeddingResults extends NlpInferenceResults { +public class MlTextEmbeddingResults extends NlpInferenceResults { public static final String NAME = "text_embedding_result"; private final String resultsField; private final double[] inference; - public TextEmbeddingResults(String resultsField, double[] inference, boolean isTruncated) { + public MlTextEmbeddingResults(String resultsField, double[] inference, boolean isTruncated) { super(isTruncated); this.inference = inference; this.resultsField = resultsField; } - public TextEmbeddingResults(StreamInput in) throws IOException { + public MlTextEmbeddingResults(StreamInput in) throws IOException { super(in); inference = in.readDoubleArray(); resultsField = in.readString(); @@ -89,7 +89,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; - TextEmbeddingResults that = (TextEmbeddingResults) o; + MlTextEmbeddingResults that = (MlTextEmbeddingResults) o; return Objects.equals(resultsField, that.resultsField) && Arrays.equals(inference, that.inference); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java index 7623a7f65af34..4cea7526bce83 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.security.action.settings; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -14,6 +15,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,27 +26,39 @@ import static org.elasticsearch.xpack.core.security.action.settings.UpdateSecuritySettingsAction.PROFILES_INDEX_NAME; import static org.elasticsearch.xpack.core.security.action.settings.UpdateSecuritySettingsAction.TOKENS_INDEX_NAME; -public class GetSecuritySettingsAction extends ActionType { +public class GetSecuritySettingsAction { - public static final GetSecuritySettingsAction INSTANCE = new GetSecuritySettingsAction(); - public static final String NAME = "cluster:admin/xpack/security/settings/get"; + public static final ActionType INSTANCE = new ActionType<>( + "cluster:admin/xpack/security/settings/get" + ); - public GetSecuritySettingsAction() { - super(NAME); - } + private GetSecuritySettingsAction() {/* no instances */} public static class Request extends MasterNodeReadRequest { - public Request() { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + public Request(TimeValue masterNodeTimeout) { + super(masterNodeTimeout); + } + + @UpdateForV9 // no need for bwc any more, this can be inlined + public static Request readFrom(StreamInput in) throws IOException { + if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + return new Request(in); + } else { + return new Request(TimeValue.THIRTY_SECONDS); + } } - public Request(StreamInput in) throws IOException { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + private Request(StreamInput in) throws IOException { + super(in); } @Override - public void writeTo(StreamOutput out) throws IOException {} + public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + super.writeTo(out); + } + } @Override public ActionRequestValidationException validate() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java index 3cce133749e44..2d59911ec7ecb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.security.action.settings; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ValidateActions; @@ -16,6 +17,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -28,9 +31,9 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class UpdateSecuritySettingsAction extends ActionType { - public static final UpdateSecuritySettingsAction INSTANCE = new UpdateSecuritySettingsAction(); - public static final String NAME = "cluster:admin/xpack/security/settings/update"; +public class UpdateSecuritySettingsAction { + + public static final ActionType INSTANCE = new ActionType<>("cluster:admin/xpack/security/settings/update"); // The names here are separate constants for 2 reasons: // 1. Keeping the names defined here helps ensure REST compatibility, even if the internal aliases of these indices change, @@ -44,9 +47,7 @@ public class UpdateSecuritySettingsAction extends ActionType { @@ -54,11 +55,19 @@ public static class Request extends AcknowledgedRequest { private final Map tokensIndexSettings; private final Map profilesIndexSettings; + public interface Factory { + Request create( + Map mainIndexSettings, + Map tokensIndexSettings, + Map profilesIndexSettings + ); + } + @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "update_security_settings_request", false, - a -> new Request((Map) a[0], (Map) a[1], (Map) a[2]) + (a, factory) -> factory.create((Map) a[0], (Map) a[1], (Map) a[2]) ); static { @@ -68,18 +77,36 @@ public static class Request extends AcknowledgedRequest { } public Request( + TimeValue masterNodeTimeout, + TimeValue ackTimeout, Map mainIndexSettings, Map tokensIndexSettings, Map profilesIndexSettings ) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + super(masterNodeTimeout, ackTimeout); this.mainIndexSettings = Objects.requireNonNullElse(mainIndexSettings, Collections.emptyMap()); this.tokensIndexSettings = Objects.requireNonNullElse(tokensIndexSettings, Collections.emptyMap()); this.profilesIndexSettings = Objects.requireNonNullElse(profilesIndexSettings, Collections.emptyMap()); } - public Request(StreamInput in) throws IOException { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + @UpdateForV9 // no need for bwc any more, this can be inlined + public static Request readFrom(StreamInput in) throws IOException { + if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + return new Request(in); + } else { + return new Request(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS, in); + } + } + + private Request(StreamInput in) throws IOException { + super(in); + this.mainIndexSettings = in.readGenericMap(); + this.tokensIndexSettings = in.readGenericMap(); + this.profilesIndexSettings = in.readGenericMap(); + } + + private Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, StreamInput in) throws IOException { + super(masterNodeTimeout, ackTimeout); this.mainIndexSettings = in.readGenericMap(); this.tokensIndexSettings = in.readGenericMap(); this.profilesIndexSettings = in.readGenericMap(); @@ -87,13 +114,16 @@ public Request(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_SETTINGS_REQUEST_TIMEOUTS)) { + super.writeTo(out); + } out.writeGenericMap(this.mainIndexSettings); out.writeGenericMap(this.tokensIndexSettings); out.writeGenericMap(this.profilesIndexSettings); } - public static Request parse(XContentParser parser) { - return PARSER.apply(parser, null); + public static Request parse(XContentParser parser, Factory factory) { + return PARSER.apply(parser, factory); } public Map mainIndexSettings() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Subject.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Subject.java index 703cc18c66f49..39173be73f191 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Subject.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Subject.java @@ -272,7 +272,7 @@ private RoleReferenceIntersection buildRoleReferencesForApiKey() { } // Package private for testing - RoleReference.ApiKeyRoleReference buildRoleReferenceForCrossClusterApiKey() { + RoleReference.CrossClusterApiKeyRoleReference buildRoleReferenceForCrossClusterApiKey() { assert version.onOrAfter(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY); final String apiKeyId = (String) metadata.get(AuthenticationField.API_KEY_ID_KEY); assert ApiKey.Type.CROSS_CLUSTER == getApiKeyType() : "cross cluster access must use cross-cluster API keys"; @@ -283,7 +283,7 @@ RoleReference.ApiKeyRoleReference buildRoleReferenceForCrossClusterApiKey() { final BytesReference limitedByRoleDescriptorsBytes = (BytesReference) metadata.get(API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY); assert isEmptyRoleDescriptorsBytes(limitedByRoleDescriptorsBytes) : "cross cluster API keys must have empty limited-by role descriptors"; - return new RoleReference.ApiKeyRoleReference(apiKeyId, roleDescriptorsBytes, RoleReference.ApiKeyRoleType.ASSIGNED, true); + return new RoleReference.CrossClusterApiKeyRoleReference(apiKeyId, roleDescriptorsBytes); } private RoleReferenceIntersection buildRoleReferencesForCrossClusterAccess() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 7f927d45a2375..4465d7d083183 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Nullable; +import org.elasticsearch.tasks.TaskCancellationService; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.action.XPackInfoAction; @@ -183,6 +184,8 @@ public class ClusterPrivilegeResolver { private static final Set CROSS_CLUSTER_SEARCH_PATTERN = Set.of( RemoteClusterService.REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME, RemoteClusterNodesAction.TYPE.name(), + TaskCancellationService.REMOTE_CLUSTER_BAN_PARENT_ACTION_NAME, + TaskCancellationService.REMOTE_CLUSTER_CANCEL_CHILD_ACTION_NAME, XPackInfoAction.NAME, // esql enrich "cluster:monitor/xpack/enrich/esql/resolve_policy", @@ -192,6 +195,8 @@ public class ClusterPrivilegeResolver { private static final Set CROSS_CLUSTER_REPLICATION_PATTERN = Set.of( RemoteClusterService.REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME, RemoteClusterNodesAction.TYPE.name(), + TaskCancellationService.REMOTE_CLUSTER_BAN_PARENT_ACTION_NAME, + TaskCancellationService.REMOTE_CLUSTER_CANCEL_CHILD_ACTION_NAME, XPackInfoAction.NAME, ClusterStateAction.NAME ); @@ -264,7 +269,7 @@ public class ClusterPrivilegeResolver { ActionTypes.QUERY_USER_ACTION.name(), GetUserPrivilegesAction.NAME, // normally authorized under the "same-user" authz check, but added here for uniformity HasPrivilegesAction.NAME, - GetSecuritySettingsAction.NAME + GetSecuritySettingsAction.INSTANCE.name() ) ); public static final NamedClusterPrivilege MANAGE_SAML = new ActionClusterPrivilege("manage_saml", MANAGE_SAML_PATTERN); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReference.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReference.java index 3e631c2516967..3f7a53f7a2dd1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReference.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReference.java @@ -81,36 +81,29 @@ final class ApiKeyRoleReference implements RoleReference { private final BytesReference roleDescriptorsBytes; private final ApiKeyRoleType roleType; private RoleKey id = null; - private final boolean checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess; public ApiKeyRoleReference(String apiKeyId, BytesReference roleDescriptorsBytes, ApiKeyRoleType roleType) { - this(apiKeyId, roleDescriptorsBytes, roleType, false); - } - - public ApiKeyRoleReference( - String apiKeyId, - BytesReference roleDescriptorsBytes, - ApiKeyRoleType roleType, - boolean checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess - ) { this.apiKeyId = apiKeyId; this.roleDescriptorsBytes = roleDescriptorsBytes; this.roleType = roleType; - this.checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess = checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess; } @Override public RoleKey id() { // Hashing can be expensive. memorize the result in case the method is called multiple times. if (id == null) { - final String roleDescriptorsHash = MessageDigests.toHexString( - MessageDigests.digest(roleDescriptorsBytes, MessageDigests.sha256()) - ); - id = new RoleKey(Set.of("apikey:" + roleDescriptorsHash), "apikey_" + roleType); + id = computeRoleKey(roleDescriptorsBytes, roleType); } return id; } + private static RoleKey computeRoleKey(BytesReference roleDescriptorsBytes, ApiKeyRoleType roleType) { + final String roleDescriptorsHash = MessageDigests.toHexString( + MessageDigests.digest(roleDescriptorsBytes, MessageDigests.sha256()) + ); + return new RoleKey(Set.of("apikey:" + roleDescriptorsHash), "apikey_" + roleType); + } + @Override public void resolve(RoleReferenceResolver resolver, ActionListener listener) { resolver.resolveApiKeyRoleReference(this, listener); @@ -127,12 +120,60 @@ public BytesReference getRoleDescriptorsBytes() { public ApiKeyRoleType getRoleType() { return roleType; } + } + + /** + * Represents the role descriptors of the cross-cluster API key underlying an API key authentication based remote cluster connection. + * This captures the permissions of the cross-cluster API key on the fulfilling cluster and is intersected with the permissions of the + * query-cluster-side user entity making the cross cluster request (see {@link CrossClusterAccessRoleReference}). + */ + final class CrossClusterApiKeyRoleReference implements RoleReference { + + private final String apiKeyId; + private final BytesReference roleDescriptorsBytes; + private final ApiKeyRoleType roleType; + private RoleKey id = null; + + public CrossClusterApiKeyRoleReference(String apiKeyId, BytesReference roleDescriptorsBytes) { + this.apiKeyId = apiKeyId; + this.roleDescriptorsBytes = roleDescriptorsBytes; + this.roleType = ApiKeyRoleType.ASSIGNED; + } + + @Override + public RoleKey id() { + // Hashing can be expensive. memorize the result in case the method is called multiple times. + if (id == null) { + // Note: the role key is the same as for ApiKeyRoleReference, to maximize cache utilization + id = ApiKeyRoleReference.computeRoleKey(roleDescriptorsBytes, roleType); + } + return id; + } - public boolean checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess() { - return checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess; + @Override + public void resolve(RoleReferenceResolver resolver, ActionListener listener) { + resolver.resolveCrossClusterApiKeyRoleReference(this, listener); + } + + public String getApiKeyId() { + return apiKeyId; + } + + public BytesReference getRoleDescriptorsBytes() { + return roleDescriptorsBytes; + } + + public ApiKeyRoleType getRoleType() { + return roleType; } } + /** + * Represents the role descriptors sent from the querying cluster to the fulfilling cluster as part of API key authentication based + * cross cluster operations. This captures the permissions of the user entity on the querying cluster and is intersected with the + * fulfilling-cluster-side permissions of the cross-cluster API key underlying the connection + * (see {@link CrossClusterApiKeyRoleReference}). + */ final class CrossClusterAccessRoleReference implements RoleReference { private final CrossClusterAccessSubjectInfo.RoleDescriptorsBytes roleDescriptorsBytes; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceResolver.java index 21e4a3f73a9bc..bac9a210fa7a3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceResolver.java @@ -30,4 +30,9 @@ void resolveCrossClusterAccessRoleReference( RoleReference.CrossClusterAccessRoleReference crossClusterAccessRoleReference, ActionListener listener ); + + void resolveCrossClusterApiKeyRoleReference( + RoleReference.CrossClusterApiKeyRoleReference crossClusterApiKeyRoleReference, + ActionListener listener + ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsAction.java new file mode 100644 index 0000000000000..6cadefbe206f0 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsAction.java @@ -0,0 +1,149 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.transform.transforms.TransformSchedulerStats; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.core.transform.transforms.TransformSchedulerStats.REGISTERED_TRANSFORM_COUNT_FIELD_NAME; + +public class GetTransformNodeStatsAction extends ActionType { + + public static final GetTransformNodeStatsAction INSTANCE = new GetTransformNodeStatsAction(); + public static final String NAME = "cluster:admin/transform/node_stats"; + + private static final String SCHEDULER_STATS_FIELD_NAME = "scheduler"; + + private GetTransformNodeStatsAction() { + super(NAME); + } + + public static class NodesStatsRequest extends BaseNodesRequest { + + public NodesStatsRequest() { + super(Strings.EMPTY_ARRAY); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } + } + + public static class NodesStatsResponse extends BaseNodesResponse implements ToXContentObject { + + private static final String TOTAL_FIELD_NAME = "total"; + + public int getTotalRegisteredTransformCount() { + int totalRegisteredTransformCount = 0; + for (var nodeResponse : getNodes()) { + totalRegisteredTransformCount += nodeResponse.schedulerStats().registeredTransformCount(); + } + return totalRegisteredTransformCount; + } + + public NodesStatsResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + public RestStatus status() { + return this.hasFailures() ? RestStatus.INTERNAL_SERVER_ERROR : RestStatus.OK; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + for (var nodeEntry : getNodesMap().entrySet()) { + String nodeName = nodeEntry.getKey(); + NodeStatsResponse nodeResponse = nodeEntry.getValue(); + builder.field(nodeName); + nodeResponse.toXContent(builder, params); + } + builder.startObject(TOTAL_FIELD_NAME); + builder.startObject(SCHEDULER_STATS_FIELD_NAME); + builder.field(REGISTERED_TRANSFORM_COUNT_FIELD_NAME, getTotalRegisteredTransformCount()); + builder.endObject(); + builder.endObject(); + return builder.endObject(); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return TransportAction.localOnly(); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + TransportAction.localOnly(); + } + } + + public static class NodeStatsRequest extends TransportRequest { + + public NodeStatsRequest() {} + + public NodeStatsRequest(StreamInput in) throws IOException { + super(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + } + } + + public static class NodeStatsResponse extends BaseNodeResponse implements ToXContentObject { + + private final TransformSchedulerStats schedulerStats; + + public NodeStatsResponse(DiscoveryNode node, TransformSchedulerStats schedulerStats) { + super(node); + this.schedulerStats = schedulerStats; + } + + public NodeStatsResponse(StreamInput in) throws IOException { + super(in); + this.schedulerStats = in.readOptionalWriteable(TransformSchedulerStats::new); + } + + TransformSchedulerStats schedulerStats() { + return schedulerStats; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalWriteable(schedulerStats); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(SCHEDULER_STATS_FIELD_NAME, schedulerStats); + return builder.endObject(); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformSchedulerStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformSchedulerStats.java new file mode 100644 index 0000000000000..ab6e9d587cb8d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformSchedulerStats.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.transforms; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +public record TransformSchedulerStats(int registeredTransformCount, String peekTransformName) implements ToXContent, Writeable { + + public static final String REGISTERED_TRANSFORM_COUNT_FIELD_NAME = "registered_transform_count"; + public static final String PEEK_TRANSFORM_FIELD_NAME = "peek_transform"; + + public TransformSchedulerStats(StreamInput in) throws IOException { + this(in.readVInt(), in.readOptionalString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.registeredTransformCount); + out.writeOptionalString(this.peekTransformName); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(REGISTERED_TRANSFORM_COUNT_FIELD_NAME, this.registeredTransformCount); + builder.field(PEEK_TRANSFORM_FIELD_NAME, this.peekTransformName); + return builder.endObject(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/utils/FloatConversionUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/utils/FloatConversionUtils.java new file mode 100644 index 0000000000000..1b9ca54b394b6 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/utils/FloatConversionUtils.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.utils; + +public class FloatConversionUtils { + + public static float[] floatArrayOf(double[] doublesArray) { + var floatArray = new float[doublesArray.length]; + for (int i = 0; i < doublesArray.length; i++) { + floatArray[i] = (float) doublesArray[i]; + } + return floatArray; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckNoDataStreamWriteIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckNoDataStreamWriteIndexStepTests.java index 33d571fbe8599..e0957239e33a8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckNoDataStreamWriteIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckNoDataStreamWriteIndexStepTests.java @@ -65,29 +65,45 @@ public void testStepCompleteIfIndexIsNotPartOfDataStream() { public void testStepIncompleteIfIndexIsTheDataStreamWriteIndex() { String dataStreamName = randomAlphaOfLength(10); - String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + long ts = System.currentTimeMillis(); + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts); + String failureIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts); String policyName = "test-ilm-policy"; IndexMetadata indexMetadata = IndexMetadata.builder(indexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); + IndexMetadata failureIndexMetadata = IndexMetadata.builder(failureIndexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); ClusterState clusterState = ClusterState.builder(emptyClusterState()) .metadata( - Metadata.builder().put(indexMetadata, true).put(newInstance(dataStreamName, List.of(indexMetadata.getIndex()))).build() + Metadata.builder() + .put(indexMetadata, true) + .put(failureIndexMetadata, true) + .put(newInstance(dataStreamName, List.of(indexMetadata.getIndex()), List.of(failureIndexMetadata.getIndex()))) + .build() ) .build(); - ClusterStateWaitStep.Result result = createRandomInstance().isConditionMet(indexMetadata.getIndex(), clusterState); + boolean useFailureStore = randomBoolean(); + IndexMetadata indexToOperateOn = useFailureStore ? failureIndexMetadata : indexMetadata; + String expectedIndexName = indexToOperateOn.getIndex().getName(); + ClusterStateWaitStep.Result result = createRandomInstance().isConditionMet(indexToOperateOn.getIndex(), clusterState); assertThat(result.isComplete(), is(false)); SingleMessageFieldInfo info = (SingleMessageFieldInfo) result.getInfomationContext(); assertThat( info.getMessage(), is( "index [" - + indexName - + "] is the write index for data stream [" + + expectedIndexName + + "] is the " + + (useFailureStore ? "failure store " : "") + + "write index for data stream [" + dataStreamName + "], " + "pausing ILM execution of lifecycle [" @@ -100,33 +116,51 @@ public void testStepIncompleteIfIndexIsTheDataStreamWriteIndex() { public void testStepCompleteIfPartOfDataStreamButNotWriteIndex() { String dataStreamName = randomAlphaOfLength(10); - String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + long ts = System.currentTimeMillis(); + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts); + String failureIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts); String policyName = "test-ilm-policy"; IndexMetadata indexMetadata = IndexMetadata.builder(indexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); + IndexMetadata failureIndexMetadata = IndexMetadata.builder(failureIndexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); - String writeIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 2); + String writeIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 2, ts); + String failureStoreWriteIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 2, ts); IndexMetadata writeIndexMetadata = IndexMetadata.builder(writeIndexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); + IndexMetadata failureStoreWriteIndexMetadata = IndexMetadata.builder(failureStoreWriteIndexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); List backingIndices = List.of(indexMetadata.getIndex(), writeIndexMetadata.getIndex()); + List failureIndices = List.of(failureIndexMetadata.getIndex(), failureStoreWriteIndexMetadata.getIndex()); ClusterState clusterState = ClusterState.builder(emptyClusterState()) .metadata( Metadata.builder() .put(indexMetadata, true) .put(writeIndexMetadata, true) - .put(newInstance(dataStreamName, backingIndices)) + .put(failureIndexMetadata, true) + .put(failureStoreWriteIndexMetadata, true) + .put(newInstance(dataStreamName, backingIndices, failureIndices)) .build() ) .build(); - ClusterStateWaitStep.Result result = createRandomInstance().isConditionMet(indexMetadata.getIndex(), clusterState); + boolean useFailureStore = randomBoolean(); + IndexMetadata indexToOperateOn = useFailureStore ? failureIndexMetadata : indexMetadata; + ClusterStateWaitStep.Result result = createRandomInstance().isConditionMet(indexToOperateOn.getIndex(), clusterState); assertThat(result.isComplete(), is(true)); assertThat(result.getInfomationContext(), is(nullValue())); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DeleteStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DeleteStepTests.java index 7445e82da3ecf..af4dc67d5dcbd 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DeleteStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/DeleteStepTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.datastreams.DeleteDataStreamAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterState; @@ -130,10 +131,11 @@ public void testPerformActionCallsFailureListenerIfIndexIsTheDataStreamWriteInde String policyName = "test-ilm-policy"; String dataStreamName = randomAlphaOfLength(10); + long ts = System.currentTimeMillis(); IndexMetadata index1; { - String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts); index1 = IndexMetadata.builder(indexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) @@ -142,25 +144,258 @@ public void testPerformActionCallsFailureListenerIfIndexIsTheDataStreamWriteInde } IndexMetadata sourceIndexMetadata; { - - String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 2); + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 2, ts); sourceIndexMetadata = IndexMetadata.builder(indexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); } + IndexMetadata failureIndex1; + { + String indexName = DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts); + failureIndex1 = IndexMetadata.builder(indexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + } + IndexMetadata failureSourceIndexMetadata; + { + String indexName = DataStream.getDefaultFailureStoreName(dataStreamName, 2, ts); + failureSourceIndexMetadata = IndexMetadata.builder(indexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + } DataStream dataStream = DataStreamTestHelper.newInstance( dataStreamName, - List.of(index1.getIndex(), sourceIndexMetadata.getIndex()) + List.of(index1.getIndex(), sourceIndexMetadata.getIndex()), + List.of(failureIndex1.getIndex(), failureSourceIndexMetadata.getIndex()) + ); + ClusterState clusterState = ClusterState.builder(emptyClusterState()) + .metadata( + Metadata.builder() + .put(index1, false) + .put(sourceIndexMetadata, false) + .put(failureIndex1, false) + .put(failureSourceIndexMetadata, false) + .put(dataStream) + .build() + ) + .build(); + + AtomicBoolean listenerCalled = new AtomicBoolean(false); + final boolean useFailureStore = randomBoolean(); + final IndexMetadata indexToOperateOn = useFailureStore ? failureSourceIndexMetadata : sourceIndexMetadata; + createRandomInstance().performDuringNoSnapshot(indexToOperateOn, clusterState, new ActionListener<>() { + @Override + public void onResponse(Void complete) { + listenerCalled.set(true); + fail("unexpected listener callback"); + } + + @Override + public void onFailure(Exception e) { + listenerCalled.set(true); + assertThat( + e.getMessage(), + is( + "index [" + + indexToOperateOn.getIndex().getName() + + "] is the " + + (useFailureStore ? "failure store " : "") + + "write index for data stream [" + + dataStreamName + + "]. stopping execution of lifecycle [test-ilm-policy] as a data stream's write index cannot be deleted. " + + "manually rolling over the index will resume the execution of the policy as the index will not be the " + + "data stream's write index anymore" + ) + ); + } + }); + + assertThat(listenerCalled.get(), is(true)); + } + + public void testDeleteWorksIfWriteIndexIsTheOnlyIndexInDataStream() throws Exception { + String policyName = "test-ilm-policy"; + String dataStreamName = randomAlphaOfLength(10); + long ts = System.currentTimeMillis(); + + // Single backing index + IndexMetadata index1; + { + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts); + index1 = IndexMetadata.builder(indexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + } + + DataStream dataStream = DataStreamTestHelper.newInstance(dataStreamName, List.of(index1.getIndex()), List.of()); + + ClusterState clusterState = ClusterState.builder(emptyClusterState()) + .metadata(Metadata.builder().put(index1, false).put(dataStream).build()) + .build(); + + Mockito.doAnswer(invocation -> { + DeleteDataStreamAction.Request request = (DeleteDataStreamAction.Request) invocation.getArguments()[1]; + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + assertNotNull(request); + assertEquals(1, request.getNames().length); + assertEquals(dataStreamName, request.getNames()[0]); + listener.onResponse(null); + return null; + }).when(client).execute(any(), any(), any()); + + // Try on the normal data stream - It should delete the data stream + DeleteStep step = createRandomInstance(); + PlainActionFuture.get(f -> step.performAction(index1, clusterState, null, f)); + + Mockito.verify(client, Mockito.only()).execute(any(), any(), any()); + Mockito.verify(adminClient, Mockito.never()).indices(); + Mockito.verify(indicesClient, Mockito.never()).delete(any(), any()); + } + + public void testDeleteWorksIfWriteIndexIsTheOnlyIndexInDataStreamWithFailureStore() throws Exception { + String policyName = "test-ilm-policy"; + String dataStreamName = randomAlphaOfLength(10); + long ts = System.currentTimeMillis(); + + // Single backing index + IndexMetadata index1; + { + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts); + index1 = IndexMetadata.builder(indexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + } + + // Multiple failure indices + IndexMetadata failureIndex1; + { + String indexName = DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts); + failureIndex1 = IndexMetadata.builder(indexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + } + IndexMetadata failureSourceIndexMetadata; + { + String indexName = DataStream.getDefaultFailureStoreName(dataStreamName, 2, ts); + failureSourceIndexMetadata = IndexMetadata.builder(indexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + } + + DataStream dataStreamWithFailureIndices = DataStreamTestHelper.newInstance( + dataStreamName, + List.of(index1.getIndex()), + List.of(failureIndex1.getIndex(), failureSourceIndexMetadata.getIndex()) ); + ClusterState clusterState = ClusterState.builder(emptyClusterState()) - .metadata(Metadata.builder().put(index1, false).put(sourceIndexMetadata, false).put(dataStream).build()) + .metadata( + Metadata.builder() + .put(index1, false) + .put(failureIndex1, false) + .put(failureSourceIndexMetadata, false) + .put(dataStreamWithFailureIndices) + .build() + ) + .build(); + + Mockito.doAnswer(invocation -> { + DeleteDataStreamAction.Request request = (DeleteDataStreamAction.Request) invocation.getArguments()[1]; + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + assertNotNull(request); + assertEquals(1, request.getNames().length); + assertEquals(dataStreamName, request.getNames()[0]); + listener.onResponse(null); + return null; + }).when(client).execute(any(), any(), any()); + + // Again, the deletion should work since the data stream would be fully deleted anyway if the failure store were disabled. + DeleteStep step = createRandomInstance(); + PlainActionFuture.get(f -> step.performAction(index1, clusterState, null, f)); + + Mockito.verify(client, Mockito.only()).execute(any(), any(), any()); + Mockito.verify(adminClient, Mockito.never()).indices(); + Mockito.verify(indicesClient, Mockito.never()).delete(any(), any()); + } + + public void testDeletingFailureStoreWriteIndexOnDataStreamWithSingleBackingIndex() { + doThrow( + new IllegalStateException( + "the client must not be called in this test as we should fail in the step validation phase before we call the delete API" + ) + ).when(indicesClient).delete(any(DeleteIndexRequest.class), anyActionListener()); + + String policyName = "test-ilm-policy"; + String dataStreamName = randomAlphaOfLength(10); + long ts = System.currentTimeMillis(); + + // Single backing index + IndexMetadata index1; + { + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts); + index1 = IndexMetadata.builder(indexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + } + + // Multiple failure indices + IndexMetadata failureIndex1; + { + String indexName = DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts); + failureIndex1 = IndexMetadata.builder(indexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + } + IndexMetadata failureSourceIndexMetadata; + { + String indexName = DataStream.getDefaultFailureStoreName(dataStreamName, 2, ts); + failureSourceIndexMetadata = IndexMetadata.builder(indexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + } + + DataStream dataStreamWithFailureIndices = DataStreamTestHelper.newInstance( + dataStreamName, + List.of(index1.getIndex()), + List.of(failureIndex1.getIndex(), failureSourceIndexMetadata.getIndex()) + ); + + ClusterState clusterState = ClusterState.builder(emptyClusterState()) + .metadata( + Metadata.builder() + .put(index1, false) + .put(failureIndex1, false) + .put(failureSourceIndexMetadata, false) + .put(dataStreamWithFailureIndices) + .build() + ) .build(); AtomicBoolean listenerCalled = new AtomicBoolean(false); - createRandomInstance().performDuringNoSnapshot(sourceIndexMetadata, clusterState, new ActionListener<>() { + createRandomInstance().performDuringNoSnapshot(failureSourceIndexMetadata, clusterState, new ActionListener<>() { @Override public void onResponse(Void complete) { listenerCalled.set(true); @@ -174,8 +409,8 @@ public void onFailure(Exception e) { e.getMessage(), is( "index [" - + sourceIndexMetadata.getIndex().getName() - + "] is the write index for data stream [" + + failureSourceIndexMetadata.getIndex().getName() + + "] is the failure store write index for data stream [" + dataStreamName + "]. stopping execution of lifecycle [test-ilm-policy] as a data stream's write index cannot be deleted. " + "manually rolling over the index will resume the execution of the policy as the index will not be the " diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStepTests.java index 2a49be703574b..a3318e68305c6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStepTests.java @@ -68,55 +68,85 @@ public void testPerformActionThrowsExceptionIfIndexIsNotPartOfDataStream() { public void testPerformActionThrowsExceptionIfIndexIsTheDataStreamWriteIndex() { String dataStreamName = randomAlphaOfLength(10); - String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + long ts = System.currentTimeMillis(); + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts); + String failureIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts); String policyName = "test-ilm-policy"; IndexMetadata sourceIndexMetadata = IndexMetadata.builder(indexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); + IndexMetadata failureSourceIndexMetadata = IndexMetadata.builder(failureIndexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); ClusterState clusterState = ClusterState.builder(emptyClusterState()) .metadata( Metadata.builder() .put(sourceIndexMetadata, true) - .put(newInstance(dataStreamName, List.of(sourceIndexMetadata.getIndex()))) + .put(failureSourceIndexMetadata, true) + .put( + newInstance(dataStreamName, List.of(sourceIndexMetadata.getIndex()), List.of(failureSourceIndexMetadata.getIndex())) + ) .build() ) .build(); - expectThrows(IllegalStateException.class, () -> createRandomInstance().performAction(sourceIndexMetadata.getIndex(), clusterState)); + boolean useFailureStore = randomBoolean(); + IndexMetadata indexToOperateOn = useFailureStore ? failureSourceIndexMetadata : sourceIndexMetadata; + expectThrows(IllegalStateException.class, () -> createRandomInstance().performAction(indexToOperateOn.getIndex(), clusterState)); } public void testPerformActionThrowsExceptionIfTargetIndexIsMissing() { String dataStreamName = randomAlphaOfLength(10); - String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + long ts = System.currentTimeMillis(); + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts); + String failureIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts); String policyName = "test-ilm-policy"; IndexMetadata sourceIndexMetadata = IndexMetadata.builder(indexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); + IndexMetadata failureSourceIndexMetadata = IndexMetadata.builder(failureIndexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); - String writeIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 2); + String writeIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 2, ts); + String failureWriteIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 2, ts); IndexMetadata writeIndexMetadata = IndexMetadata.builder(writeIndexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); + IndexMetadata failureWriteIndexMetadata = IndexMetadata.builder(failureWriteIndexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); List backingIndices = List.of(sourceIndexMetadata.getIndex(), writeIndexMetadata.getIndex()); + List failureIndices = List.of(failureSourceIndexMetadata.getIndex(), failureWriteIndexMetadata.getIndex()); ClusterState clusterState = ClusterState.builder(emptyClusterState()) .metadata( Metadata.builder() .put(sourceIndexMetadata, true) .put(writeIndexMetadata, true) - .put(newInstance(dataStreamName, backingIndices)) + .put(failureSourceIndexMetadata, true) + .put(failureWriteIndexMetadata, true) + .put(newInstance(dataStreamName, backingIndices, failureIndices)) .build() ) .build(); - expectThrows(IllegalStateException.class, () -> createRandomInstance().performAction(sourceIndexMetadata.getIndex(), clusterState)); + boolean useFailureStore = randomBoolean(); + IndexMetadata indexToOperateOn = useFailureStore ? failureSourceIndexMetadata : sourceIndexMetadata; + expectThrows(IllegalStateException.class, () -> createRandomInstance().performAction(indexToOperateOn.getIndex(), clusterState)); } public void testPerformActionIsNoOpIfIndexIsMissing() { @@ -129,23 +159,39 @@ public void testPerformActionIsNoOpIfIndexIsMissing() { public void testPerformAction() { String dataStreamName = randomAlphaOfLength(10); - String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + long ts = System.currentTimeMillis(); + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts); + String failureIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts); String policyName = "test-ilm-policy"; IndexMetadata sourceIndexMetadata = IndexMetadata.builder(indexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); + IndexMetadata failureSourceIndexMetadata = IndexMetadata.builder(failureIndexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); - String writeIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 2); + String writeIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 2, ts); + String failureWriteIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 2, ts); IndexMetadata writeIndexMetadata = IndexMetadata.builder(writeIndexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); + IndexMetadata failureWriteIndexMetadata = IndexMetadata.builder(failureWriteIndexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + + boolean useFailureStore = randomBoolean(); + String indexNameToUse = useFailureStore ? failureIndexName : indexName; String indexPrefix = "test-prefix-"; - String targetIndex = indexPrefix + indexName; + String targetIndex = indexPrefix + indexNameToUse; IndexMetadata targetIndexMetadata = IndexMetadata.builder(targetIndex) .settings(settings(IndexVersion.current())) @@ -154,12 +200,15 @@ public void testPerformAction() { .build(); List backingIndices = List.of(sourceIndexMetadata.getIndex(), writeIndexMetadata.getIndex()); + List failureIndices = List.of(failureSourceIndexMetadata.getIndex(), failureWriteIndexMetadata.getIndex()); ClusterState clusterState = ClusterState.builder(emptyClusterState()) .metadata( Metadata.builder() .put(sourceIndexMetadata, true) .put(writeIndexMetadata, true) - .put(newInstance(dataStreamName, backingIndices)) + .put(failureSourceIndexMetadata, true) + .put(failureWriteIndexMetadata, true) + .put(newInstance(dataStreamName, backingIndices, failureIndices)) .put(targetIndexMetadata, true) .build() ) @@ -168,12 +217,16 @@ public void testPerformAction() { ReplaceDataStreamBackingIndexStep replaceSourceIndexStep = new ReplaceDataStreamBackingIndexStep( randomStepKey(), randomStepKey(), - (index, state) -> indexPrefix + index + (index, state) -> indexPrefix + indexNameToUse ); - ClusterState newState = replaceSourceIndexStep.performAction(sourceIndexMetadata.getIndex(), clusterState); + IndexMetadata indexToOperateOn = useFailureStore ? failureSourceIndexMetadata : sourceIndexMetadata; + ClusterState newState = replaceSourceIndexStep.performAction(indexToOperateOn.getIndex(), clusterState); DataStream updatedDataStream = newState.metadata().dataStreams().get(dataStreamName); - assertThat(updatedDataStream.getIndices().size(), is(2)); - assertThat(updatedDataStream.getIndices().get(0), is(targetIndexMetadata.getIndex())); + DataStream.DataStreamIndices resultIndices = useFailureStore + ? updatedDataStream.getFailureIndices() + : updatedDataStream.getBackingIndices(); + assertThat(resultIndices.getIndices().size(), is(2)); + assertThat(resultIndices.getIndices().get(0), is(targetIndexMetadata.getIndex())); } /** @@ -181,23 +234,38 @@ public void testPerformAction() { */ public void testPerformActionSameOriginalTargetError() { String dataStreamName = randomAlphaOfLength(10); - String writeIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 2); + long ts = System.currentTimeMillis(); + String writeIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 2, ts); + String failureWriteIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 2, ts); String indexName = writeIndexName; + String failureIndexName = failureWriteIndexName; String policyName = "test-ilm-policy"; IndexMetadata sourceIndexMetadata = IndexMetadata.builder(indexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); + IndexMetadata failureSourceIndexMetadata = IndexMetadata.builder(failureIndexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); IndexMetadata writeIndexMetadata = IndexMetadata.builder(writeIndexName) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); + IndexMetadata failureWriteIndexMetadata = IndexMetadata.builder(failureWriteIndexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); String indexPrefix = "test-prefix-"; - String targetIndex = indexPrefix + indexName; + boolean useFailureStore = randomBoolean(); + String indexNameToUse = useFailureStore ? failureIndexName : indexName; + String targetIndex = indexPrefix + indexNameToUse; IndexMetadata targetIndexMetadata = IndexMetadata.builder(targetIndex) .settings(settings(IndexVersion.current())) @@ -206,12 +274,15 @@ public void testPerformActionSameOriginalTargetError() { .build(); List backingIndices = List.of(writeIndexMetadata.getIndex()); + List failureIndices = List.of(failureWriteIndexMetadata.getIndex()); ClusterState clusterState = ClusterState.builder(emptyClusterState()) .metadata( Metadata.builder() .put(sourceIndexMetadata, true) .put(writeIndexMetadata, true) - .put(newInstance(dataStreamName, backingIndices)) + .put(failureSourceIndexMetadata, true) + .put(failureWriteIndexMetadata, true) + .put(newInstance(dataStreamName, backingIndices, failureIndices)) .put(targetIndexMetadata, true) .build() ) @@ -222,14 +293,17 @@ public void testPerformActionSameOriginalTargetError() { randomStepKey(), (index, state) -> indexPrefix + index ); + IndexMetadata indexToOperateOn = useFailureStore ? failureSourceIndexMetadata : sourceIndexMetadata; IllegalStateException ex = expectThrows( IllegalStateException.class, - () -> replaceSourceIndexStep.performAction(sourceIndexMetadata.getIndex(), clusterState) + () -> replaceSourceIndexStep.performAction(indexToOperateOn.getIndex(), clusterState) ); assertEquals( "index [" - + writeIndexName - + "] is the write index for data stream [" + + indexNameToUse + + "] is the " + + (useFailureStore ? "failure store " : "") + + "write index for data stream [" + dataStreamName + "], pausing ILM execution of lifecycle [test-ilm-policy] until this index is no longer the write index for the data " + "stream via manual or automated rollover", diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java index 1fcfc1fb287c4..f25a862362540 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java @@ -96,7 +96,13 @@ public void testPerformAction() throws Exception { public void testPerformActionOnDataStream() throws Exception { String dataStreamName = "test-datastream"; - IndexMetadata indexMetadata = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1)) + long ts = System.currentTimeMillis(); + IndexMetadata indexMetadata = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts)) + .settings(settings(IndexVersion.current())) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + IndexMetadata failureIndexMetadata = IndexMetadata.builder(DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts)) .settings(settings(IndexVersion.current())) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) @@ -107,9 +113,16 @@ public void testPerformActionOnDataStream() throws Exception { mockClientRolloverCall(dataStreamName); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) - .metadata(Metadata.builder().put(newInstance(dataStreamName, List.of(indexMetadata.getIndex()))).put(indexMetadata, true)) + .metadata( + Metadata.builder() + .put(newInstance(dataStreamName, List.of(indexMetadata.getIndex()), List.of(failureIndexMetadata.getIndex()))) + .put(indexMetadata, true) + .put(failureIndexMetadata, true) + ) .build(); - PlainActionFuture.get(f -> step.performAction(indexMetadata, clusterState, null, f)); + boolean useFailureStore = randomBoolean(); + IndexMetadata indexToOperateOn = useFailureStore ? failureIndexMetadata : indexMetadata; + PlainActionFuture.get(f -> step.performAction(indexToOperateOn, clusterState, null, f)); Mockito.verify(client, Mockito.only()).admin(); Mockito.verify(adminClient, Mockito.only()).indices(); @@ -118,13 +131,24 @@ public void testPerformActionOnDataStream() throws Exception { public void testSkipRolloverIfDataStreamIsAlreadyRolledOver() throws Exception { String dataStreamName = "test-datastream"; - IndexMetadata firstGenerationIndex = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1)) + long ts = System.currentTimeMillis(); + IndexMetadata firstGenerationIndex = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts)) + .settings(settings(IndexVersion.current())) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + IndexMetadata failureFirstGenerationIndex = IndexMetadata.builder(DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts)) .settings(settings(IndexVersion.current())) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); - IndexMetadata writeIndex = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 2)) + IndexMetadata writeIndex = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 2, ts)) + .settings(settings(IndexVersion.current())) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + IndexMetadata failureWriteIndex = IndexMetadata.builder(DataStream.getDefaultFailureStoreName(dataStreamName, 2, ts)) .settings(settings(IndexVersion.current())) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) @@ -136,10 +160,20 @@ public void testSkipRolloverIfDataStreamIsAlreadyRolledOver() throws Exception { Metadata.builder() .put(firstGenerationIndex, true) .put(writeIndex, true) - .put(newInstance(dataStreamName, List.of(firstGenerationIndex.getIndex(), writeIndex.getIndex()))) + .put(failureFirstGenerationIndex, true) + .put(failureWriteIndex, true) + .put( + newInstance( + dataStreamName, + List.of(firstGenerationIndex.getIndex(), writeIndex.getIndex()), + List.of(failureFirstGenerationIndex.getIndex(), failureWriteIndex.getIndex()) + ) + ) ) .build(); - PlainActionFuture.get(f -> step.performAction(firstGenerationIndex, clusterState, null, f)); + boolean useFailureStore = randomBoolean(); + IndexMetadata indexToOperateOn = useFailureStore ? failureFirstGenerationIndex : firstGenerationIndex; + PlainActionFuture.get(f -> step.performAction(indexToOperateOn, clusterState, null, f)); verifyNoMoreInteractions(client); verifyNoMoreInteractions(adminClient); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsTests.java index d9fd2d8a2247e..f5f36781e011b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsTests.java @@ -170,13 +170,24 @@ public void testResultEvaluatedOnOnlyIndexTheAliasPointsToIfWriteIndexIsNull() { public void testResultEvaluatedOnDataStream() throws IOException { String dataStreamName = "test-datastream"; - IndexMetadata originalIndexMeta = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1)) + long ts = System.currentTimeMillis(); + IndexMetadata originalIndexMeta = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts)) + .settings(settings(IndexVersion.current())) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + IndexMetadata failureOriginalIndexMeta = IndexMetadata.builder(DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts)) .settings(settings(IndexVersion.current())) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); - IndexMetadata rolledIndexMeta = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 2)) + IndexMetadata rolledIndexMeta = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 2, ts)) + .settings(settings(IndexVersion.current()).put("index.write.wait_for_active_shards", "3")) + .numberOfShards(1) + .numberOfReplicas(3) + .build(); + IndexMetadata failureRolledIndexMeta = IndexMetadata.builder(DataStream.getDefaultFailureStoreName(dataStreamName, 2, ts)) .settings(settings(IndexVersion.current()).put("index.write.wait_for_active_shards", "3")) .numberOfShards(1) .numberOfReplicas(3) @@ -186,28 +197,53 @@ public void testResultEvaluatedOnDataStream() throws IOException { ShardRoutingRoleStrategy.NO_SHARD_CREATION, rolledIndexMeta.getIndex() ); + IndexRoutingTable.Builder failureRoutingTable = new IndexRoutingTable.Builder( + ShardRoutingRoleStrategy.NO_SHARD_CREATION, + failureRolledIndexMeta.getIndex() + ); routingTable.addShard( TestShardRouting.newShardRouting(rolledIndexMeta.getIndex().getName(), 0, "node", null, true, ShardRoutingState.STARTED) ); routingTable.addShard( TestShardRouting.newShardRouting(rolledIndexMeta.getIndex().getName(), 0, "node2", null, false, ShardRoutingState.STARTED) ); + failureRoutingTable.addShard( + TestShardRouting.newShardRouting(failureRolledIndexMeta.getIndex().getName(), 0, "node", null, true, ShardRoutingState.STARTED) + ); + failureRoutingTable.addShard( + TestShardRouting.newShardRouting( + failureRolledIndexMeta.getIndex().getName(), + 0, + "node2", + null, + false, + ShardRoutingState.STARTED + ) + ); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) .metadata( Metadata.builder() .put( - DataStreamTestHelper.newInstance(dataStreamName, List.of(originalIndexMeta.getIndex(), rolledIndexMeta.getIndex())) + DataStreamTestHelper.newInstance( + dataStreamName, + List.of(originalIndexMeta.getIndex(), rolledIndexMeta.getIndex()), + List.of(failureOriginalIndexMeta.getIndex(), failureRolledIndexMeta.getIndex()) + ) ) .put(originalIndexMeta, true) .put(rolledIndexMeta, true) + .put(failureOriginalIndexMeta, true) + .put(failureRolledIndexMeta, true) ) - .routingTable(RoutingTable.builder().add(routingTable.build()).build()) + .routingTable(RoutingTable.builder().add(routingTable.build()).add(failureRoutingTable.build()).build()) .build(); WaitForActiveShardsStep waitForActiveShardsStep = createRandomInstance(); - ClusterStateWaitStep.Result result = waitForActiveShardsStep.isConditionMet(originalIndexMeta.getIndex(), clusterState); + boolean useFailureStore = randomBoolean(); + IndexMetadata indexToOperateOn = useFailureStore ? failureOriginalIndexMeta : originalIndexMeta; + ClusterStateWaitStep.Result result = waitForActiveShardsStep.isConditionMet(indexToOperateOn.getIndex(), clusterState); assertThat(result.isComplete(), is(false)); XContentBuilder expected = new WaitForActiveShardsStep.ActiveShardsInfo(2, "3", false).toXContent( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java index 2d39d093d149e..15958e9396d81 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStepTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContentObject; +import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import java.util.Collections; @@ -47,6 +48,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; @@ -254,7 +256,14 @@ public void onFailure(Exception e) { public void testEvaluateConditionOnDataStreamTarget() { String dataStreamName = "test-datastream"; - IndexMetadata indexMetadata = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1)) + long ts = System.currentTimeMillis(); + boolean failureStoreIndex = randomBoolean(); + IndexMetadata indexMetadata = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts)) + .settings(settings(IndexVersion.current())) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + IndexMetadata failureStoreMetadata = IndexMetadata.builder(DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts)) .settings(settings(IndexVersion.current())) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) @@ -267,9 +276,17 @@ public void testEvaluateConditionOnDataStreamTarget() { SetOnce conditionsMet = new SetOnce<>(); Metadata metadata = Metadata.builder() .put(indexMetadata, true) - .put(DataStreamTestHelper.newInstance(dataStreamName, List.of(indexMetadata.getIndex()))) + .put(failureStoreMetadata, true) + .put( + DataStreamTestHelper.newInstance( + dataStreamName, + List.of(indexMetadata.getIndex()), + List.of(failureStoreMetadata.getIndex()) + ) + ) .build(); - step.evaluateCondition(metadata, indexMetadata.getIndex(), new AsyncWaitStep.Listener() { + IndexMetadata indexToOperateOn = failureStoreIndex ? failureStoreMetadata : indexMetadata; + step.evaluateCondition(metadata, indexToOperateOn.getIndex(), new AsyncWaitStep.Listener() { @Override public void onResponse(boolean complete, ToXContentObject infomationContext) { @@ -286,18 +303,38 @@ public void onFailure(Exception e) { verify(client, Mockito.only()).admin(); verify(adminClient, Mockito.only()).indices(); - verify(indicesClient, Mockito.only()).rolloverIndex(Mockito.any(), Mockito.any()); + + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(RolloverRequest.class); + verify(indicesClient, Mockito.only()).rolloverIndex(requestCaptor.capture(), Mockito.any()); + + RolloverRequest request = requestCaptor.getValue(); + assertThat(request.indicesOptions().failureStoreOptions().includeFailureIndices(), equalTo(failureStoreIndex)); + assertThat(request.indicesOptions().failureStoreOptions().includeRegularIndices(), not(equalTo(failureStoreIndex))); } public void testSkipRolloverIfDataStreamIsAlreadyRolledOver() { String dataStreamName = "test-datastream"; - IndexMetadata firstGenerationIndex = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1)) + long ts = System.currentTimeMillis(); + boolean failureStoreIndex = randomBoolean(); + IndexMetadata firstGenerationIndex = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1, ts)) .settings(settings(IndexVersion.current())) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) .build(); - IndexMetadata writeIndex = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 2)) + IndexMetadata writeIndex = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 2, ts)) + .settings(settings(IndexVersion.current())) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + + IndexMetadata firstGenerationFailureIndex = IndexMetadata.builder(DataStream.getDefaultFailureStoreName(dataStreamName, 1, ts)) + .settings(settings(IndexVersion.current())) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)) + .build(); + + IndexMetadata writeFailureIndex = IndexMetadata.builder(DataStream.getDefaultFailureStoreName(dataStreamName, 2, ts)) .settings(settings(IndexVersion.current())) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) @@ -308,9 +345,18 @@ public void testSkipRolloverIfDataStreamIsAlreadyRolledOver() { Metadata metadata = Metadata.builder() .put(firstGenerationIndex, true) .put(writeIndex, true) - .put(DataStreamTestHelper.newInstance(dataStreamName, List.of(firstGenerationIndex.getIndex(), writeIndex.getIndex()))) + .put(firstGenerationFailureIndex, true) + .put(writeFailureIndex, true) + .put( + DataStreamTestHelper.newInstance( + dataStreamName, + List.of(firstGenerationIndex.getIndex(), writeIndex.getIndex()), + List.of(firstGenerationFailureIndex.getIndex(), writeFailureIndex.getIndex()) + ) + ) .build(); - step.evaluateCondition(metadata, firstGenerationIndex.getIndex(), new AsyncWaitStep.Listener() { + IndexMetadata indexToOperateOn = failureStoreIndex ? firstGenerationFailureIndex : firstGenerationIndex; + step.evaluateCondition(metadata, indexToOperateOn.getIndex(), new AsyncWaitStep.Listener() { @Override public void onResponse(boolean complete, ToXContentObject infomationContext) { @@ -665,7 +711,7 @@ public void testCreateRolloverRequestRolloverOnlyIfHasDocuments() { String rolloverTarget = randomAlphaOfLength(5); TimeValue masterTimeout = randomPositiveTimeValue(); - RolloverRequest request = step.createRolloverRequest(rolloverTarget, masterTimeout, rolloverOnlyIfHasDocuments); + RolloverRequest request = step.createRolloverRequest(rolloverTarget, masterTimeout, rolloverOnlyIfHasDocuments, false); assertThat(request.getRolloverTarget(), is(rolloverTarget)); assertThat(request.masterNodeTimeout(), is(masterTimeout)); @@ -704,7 +750,7 @@ public void testCreateRolloverRequestRolloverBeyondMaximumPrimaryShardDocCount() c.getMinDocs(), c.getMinPrimaryShardDocs() ); - RolloverRequest request = step.createRolloverRequest(rolloverTarget, masterTimeout, true); + RolloverRequest request = step.createRolloverRequest(rolloverTarget, masterTimeout, true, false); assertThat(request.getRolloverTarget(), is(rolloverTarget)); assertThat(request.masterNodeTimeout(), is(masterTimeout)); assertThat(request.isDryRun(), is(true)); // it's always a dry_run @@ -725,7 +771,7 @@ public void testCreateRolloverRequestRolloverBeyondMaximumPrimaryShardDocCount() c.getMinDocs(), c.getMinPrimaryShardDocs() ); - request = step.createRolloverRequest(rolloverTarget, masterTimeout, true); + request = step.createRolloverRequest(rolloverTarget, masterTimeout, true, false); assertThat(request.getRolloverTarget(), is(rolloverTarget)); assertThat(request.masterNodeTimeout(), is(masterTimeout)); assertThat(request.isDryRun(), is(true)); // it's always a dry_run @@ -747,7 +793,7 @@ public void testCreateRolloverRequestRolloverBeyondMaximumPrimaryShardDocCount() c.getMinDocs(), c.getMinPrimaryShardDocs() ); - request = step.createRolloverRequest(rolloverTarget, masterTimeout, true); + request = step.createRolloverRequest(rolloverTarget, masterTimeout, true, false); assertThat(request.getRolloverTarget(), is(rolloverTarget)); assertThat(request.masterNodeTimeout(), is(masterTimeout)); assertThat(request.isDryRun(), is(true)); // it's always a dry_run diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResultsTests.java new file mode 100644 index 0000000000000..83678cd030bc2 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/InferenceChunkedTextEmbeddingFloatResultsTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults.INFERENCE; +import static org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults.TEXT; + +public class InferenceChunkedTextEmbeddingFloatResultsTests extends ESTestCase { + /** + * Similar to {@link org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults#asMap()} but it converts the + * embeddings float array into a list of floats to make testing equality easier. + */ + public static Map asMapWithListsInsteadOfArrays(InferenceChunkedTextEmbeddingFloatResults result) { + return Map.of( + InferenceChunkedTextEmbeddingFloatResults.FIELD_NAME, + result.getChunks() + .stream() + .map(InferenceChunkedTextEmbeddingFloatResultsTests::inferenceFloatEmbeddingChunkAsMapWithListsInsteadOfArrays) + .collect(Collectors.toList()) + ); + } + + /** + * Similar to {@link MlChunkedTextEmbeddingFloatResults.EmbeddingChunk#asMap()} but it converts the double array into a list of doubles + * to make testing equality easier. + */ + public static Map inferenceFloatEmbeddingChunkAsMapWithListsInsteadOfArrays( + InferenceChunkedTextEmbeddingFloatResults.InferenceFloatEmbeddingChunk chunk + ) { + var chunkAsList = new ArrayList(chunk.embedding().length); + for (double embedding : chunk.embedding()) { + chunkAsList.add((float) embedding); + } + var map = new HashMap(); + map.put(TEXT, chunk.matchedText()); + map.put(INFERENCE, chunkAsList); + return map; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java index 983e5d43a946d..2e4689de787b3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java @@ -73,6 +73,7 @@ protected Request createTestInstance() { if (randomBoolean()) { request.setPrefixType(randomFrom(TrainedModelPrefixStrings.PrefixType.values())); } + request.setChunked(randomBoolean()); return request; } @@ -87,8 +88,9 @@ protected Request mutateInstance(Request instance) { var previouslyLicensed = instance.isPreviouslyLicensed(); var timeout = instance.getInferenceTimeout(); var prefixType = instance.getPrefixType(); + var chunked = instance.isChunked(); - int change = randomIntBetween(0, 7); + int change = randomIntBetween(0, 8); switch (change) { case 0: modelId = modelId + "foo"; @@ -123,6 +125,9 @@ protected Request mutateInstance(Request instance) { prefixType = TrainedModelPrefixStrings.PrefixType.values()[(prefixType.ordinal() + 1) % TrainedModelPrefixStrings.PrefixType .values().length]; break; + case 8: + chunked = chunked == false; + break; default: throw new IllegalStateException(); } @@ -130,6 +135,7 @@ protected Request mutateInstance(Request instance) { var r = new Request(modelId, update, objectsToInfer, textInput, timeout, previouslyLicensed); r.setHighPriority(highPriority); r.setPrefixType(prefixType); + r.setChunked(chunked); return r; } @@ -246,6 +252,19 @@ protected Request mutateInstanceForVersion(Request instance, TransportVersion ve r.setHighPriority(instance.isHighPriority()); r.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE); return r; + } else if (version.before(TransportVersions.ML_CHUNK_INFERENCE_OPTION)) { + var r = new Request( + instance.getId(), + adjustedUpdate, + instance.getObjectsToInfer(), + instance.getTextInput(), + instance.getInferenceTimeout(), + instance.isPreviouslyLicensed() + ); + r.setHighPriority(instance.isHighPriority()); + r.setPrefixType(instance.getPrefixType()); + r.setChunked(false); // r.setChunked(instance.isChunked()); for the next version + return r; } return instance; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionResponseTests.java index 4d8035864729a..87049d6bde90c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionResponseTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.xpack.core.ml.inference.results.ClassificationInferenceResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.FillMaskResults; import org.elasticsearch.xpack.core.ml.inference.results.FillMaskResultsTests; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.NerResults; import org.elasticsearch.xpack.core.ml.inference.results.NerResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.PyTorchPassThroughResults; @@ -25,8 +27,6 @@ import org.elasticsearch.xpack.core.ml.inference.results.QuestionAnsweringInferenceResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.RegressionInferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.RegressionInferenceResultsTests; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.TextSimilarityInferenceResults; @@ -50,7 +50,7 @@ public class InferModelActionResponseTests extends AbstractWireSerializingTestCa PyTorchPassThroughResults.NAME, QuestionAnsweringInferenceResults.NAME, RegressionInferenceResults.NAME, - TextEmbeddingResults.NAME, + MlTextEmbeddingResults.NAME, TextExpansionResults.NAME, TextSimilarityInferenceResults.NAME, WarningInferenceResults.NAME @@ -87,7 +87,7 @@ private static InferenceResults randomInferenceResult(String resultType) { case PyTorchPassThroughResults.NAME -> PyTorchPassThroughResultsTests.createRandomResults(); case QuestionAnsweringInferenceResults.NAME -> QuestionAnsweringInferenceResultsTests.createRandomResults(); case RegressionInferenceResults.NAME -> RegressionInferenceResultsTests.createRandomResults(); - case TextEmbeddingResults.NAME -> TextEmbeddingResultsTests.createRandomResults(); + case MlTextEmbeddingResults.NAME -> MlTextEmbeddingResultsTests.createRandomResults(); case TextExpansionResults.NAME -> TextExpansionResultsTests.createRandomResults(); case TextSimilarityInferenceResults.NAME -> TextSimilarityInferenceResultsTests.createRandomResults(); case WarningInferenceResults.NAME -> WarningInferenceResultsTests.createRandomResults(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentResponseTests.java index 4db7d05b60658..eb373080eee4a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentResponseTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResultsTests; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResultsTests; import org.junit.Before; import java.util.List; @@ -50,10 +50,10 @@ protected Writeable.Reader instanceR protected InferTrainedModelDeploymentAction.Response createTestInstance() { return new InferTrainedModelDeploymentAction.Response( List.of( - TextEmbeddingResultsTests.createRandomResults(), - TextEmbeddingResultsTests.createRandomResults(), - TextEmbeddingResultsTests.createRandomResults(), - TextEmbeddingResultsTests.createRandomResults() + MlTextEmbeddingResultsTests.createRandomResults(), + MlTextEmbeddingResultsTests.createRandomResults(), + MlTextEmbeddingResultsTests.createRandomResults(), + MlTextEmbeddingResultsTests.createRandomResults() ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResultsTests.java similarity index 57% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResultsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResultsTests.java index 71eff531581e2..f5db7a2863e0c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextExpansionResultsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceChunkedTextExpansionResultsTests.java @@ -16,10 +16,10 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class ChunkedTextExpansionResultsTests extends AbstractWireSerializingTestCase { +public class InferenceChunkedTextExpansionResultsTests extends AbstractWireSerializingTestCase { - public static ChunkedTextExpansionResults createRandomResults() { - var chunks = new ArrayList(); + public static InferenceChunkedTextExpansionResults createRandomResults() { + var chunks = new ArrayList(); int numChunks = randomIntBetween(1, 5); for (int i = 0; i < numChunks; i++) { @@ -28,24 +28,24 @@ public static ChunkedTextExpansionResults createRandomResults() { for (int j = 0; j < numTokens; j++) { tokenWeights.add(new WeightedToken(Integer.toString(j), (float) randomDoubleBetween(0.0, 5.0, false))); } - chunks.add(new ChunkedTextExpansionResults.ChunkedResult(randomAlphaOfLength(6), tokenWeights)); + chunks.add(new InferenceChunkedTextExpansionResults.ChunkedResult(randomAlphaOfLength(6), tokenWeights)); } - return new ChunkedTextExpansionResults(DEFAULT_RESULTS_FIELD, chunks, randomBoolean()); + return new InferenceChunkedTextExpansionResults(DEFAULT_RESULTS_FIELD, chunks, randomBoolean()); } @Override - protected Writeable.Reader instanceReader() { - return ChunkedTextExpansionResults::new; + protected Writeable.Reader instanceReader() { + return InferenceChunkedTextExpansionResults::new; } @Override - protected ChunkedTextExpansionResults createTestInstance() { + protected InferenceChunkedTextExpansionResults createTestInstance() { return createRandomResults(); } @Override - protected ChunkedTextExpansionResults mutateInstance(ChunkedTextExpansionResults instance) throws IOException { + protected InferenceChunkedTextExpansionResults mutateInstance(InferenceChunkedTextExpansionResults instance) throws IOException { return null; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResultsTests.java similarity index 53% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResultsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResultsTests.java index 1e8f5b6a26ad2..c4d008ac77355 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/ChunkedTextEmbeddingResultsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlChunkedTextEmbeddingFloatResultsTests.java @@ -21,10 +21,10 @@ import static org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults.TEXT; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class ChunkedTextEmbeddingResultsTests extends AbstractWireSerializingTestCase { +public class MlChunkedTextEmbeddingFloatResultsTests extends AbstractWireSerializingTestCase { - public static ChunkedTextEmbeddingResults createRandomResults() { - var chunks = new ArrayList(); + public static MlChunkedTextEmbeddingFloatResults createRandomResults() { + var chunks = new ArrayList(); int columns = randomIntBetween(5, 10); int numChunks = randomIntBetween(1, 5); @@ -33,17 +33,17 @@ public static ChunkedTextEmbeddingResults createRandomResults() { for (int j = 0; j < columns; j++) { arr[j] = randomDouble(); } - chunks.add(new ChunkedTextEmbeddingResults.EmbeddingChunk(randomAlphaOfLength(6), arr)); + chunks.add(new MlChunkedTextEmbeddingFloatResults.EmbeddingChunk(randomAlphaOfLength(6), arr)); } - return new ChunkedTextEmbeddingResults(DEFAULT_RESULTS_FIELD, chunks, randomBoolean()); + return new MlChunkedTextEmbeddingFloatResults(DEFAULT_RESULTS_FIELD, chunks, randomBoolean()); } /** - * Similar to {@link ChunkedTextEmbeddingResults.EmbeddingChunk#asMap()} but it converts the double array into a list of doubles to - * make testing equality easier. + * Similar to {@link MlChunkedTextEmbeddingFloatResults.EmbeddingChunk#asMap()} but it converts the double array into a list of doubles + * to make testing equality easier. */ - public static Map asMapWithListsInsteadOfArrays(ChunkedTextEmbeddingResults.EmbeddingChunk chunk) { + public static Map asMapWithListsInsteadOfArrays(MlChunkedTextEmbeddingFloatResults.EmbeddingChunk chunk) { var map = new HashMap(); map.put(TEXT, chunk.matchedText()); map.put(INFERENCE, Arrays.stream(chunk.embedding()).boxed().collect(Collectors.toList())); @@ -51,20 +51,28 @@ public static Map asMapWithListsInsteadOfArrays(ChunkedTextEmbed } @Override - protected Writeable.Reader instanceReader() { - return ChunkedTextEmbeddingResults::new; + protected Writeable.Reader instanceReader() { + return MlChunkedTextEmbeddingFloatResults::new; } @Override - protected ChunkedTextEmbeddingResults createTestInstance() { + protected MlChunkedTextEmbeddingFloatResults createTestInstance() { return createRandomResults(); } @Override - protected ChunkedTextEmbeddingResults mutateInstance(ChunkedTextEmbeddingResults instance) throws IOException { + protected MlChunkedTextEmbeddingFloatResults mutateInstance(MlChunkedTextEmbeddingFloatResults instance) throws IOException { return switch (randomIntBetween(0, 1)) { - case 0 -> new ChunkedTextEmbeddingResults(instance.getResultsField() + "foo", instance.getChunks(), instance.isTruncated); - case 1 -> new ChunkedTextEmbeddingResults(instance.getResultsField(), instance.getChunks(), instance.isTruncated == false); + case 0 -> new MlChunkedTextEmbeddingFloatResults( + instance.getResultsField() + "foo", + instance.getChunks(), + instance.isTruncated + ); + case 1 -> new MlChunkedTextEmbeddingFloatResults( + instance.getResultsField(), + instance.getChunks(), + instance.isTruncated == false + ); default -> throw new IllegalArgumentException("unexpected case"); }; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResultsTests.java similarity index 68% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResultsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResultsTests.java index fd3ac7f8c0d12..3338609eebdc3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/TextEmbeddingResultsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/MlTextEmbeddingResultsTests.java @@ -16,35 +16,35 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -public class TextEmbeddingResultsTests extends InferenceResultsTestCase { +public class MlTextEmbeddingResultsTests extends InferenceResultsTestCase { - public static TextEmbeddingResults createRandomResults() { + public static MlTextEmbeddingResults createRandomResults() { int columns = randomIntBetween(1, 10); double[] arr = new double[columns]; for (int i = 0; i < columns; i++) { arr[i] = randomDouble(); } - return new TextEmbeddingResults(DEFAULT_RESULTS_FIELD, arr, randomBoolean()); + return new MlTextEmbeddingResults(DEFAULT_RESULTS_FIELD, arr, randomBoolean()); } @Override - protected Writeable.Reader instanceReader() { - return TextEmbeddingResults::new; + protected Writeable.Reader instanceReader() { + return MlTextEmbeddingResults::new; } @Override - protected TextEmbeddingResults createTestInstance() { + protected MlTextEmbeddingResults createTestInstance() { return createRandomResults(); } @Override - protected TextEmbeddingResults mutateInstance(TextEmbeddingResults instance) { + protected MlTextEmbeddingResults mutateInstance(MlTextEmbeddingResults instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } public void testAsMap() { - TextEmbeddingResults testInstance = createTestInstance(); + MlTextEmbeddingResults testInstance = createTestInstance(); Map asMap = testInstance.asMap(); int size = testInstance.isTruncated ? 2 : 1; assertThat(asMap.keySet(), hasSize(size)); @@ -55,7 +55,7 @@ public void testAsMap() { } @Override - void assertFieldValues(TextEmbeddingResults createdInstance, IngestDocument document, String parentField, String resultsField) { + void assertFieldValues(MlTextEmbeddingResults createdInstance, IngestDocument document, String parentField, String resultsField) { assertArrayEquals(document.getFieldValue(parentField + resultsField, double[].class), createdInstance.getInference(), 1e-10); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsActionTests.java index 7ad647075f523..893f7474c3e6e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsActionTests.java @@ -28,7 +28,13 @@ public class UpdateSecuritySettingsActionTests extends ESTestCase { public void testValidateSettingsEmpty() { - var req = new UpdateSecuritySettingsAction.Request(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap() + ); var ex = req.validate(); assertThat(ex, notNullValue()); assertThat(ex.getMessage(), containsString("No settings given to update")); @@ -40,17 +46,41 @@ public void testAllowedSettingsOk() { for (String allowedSetting : ALLOWED_SETTING_KEYS) { Map allowedSettingMap = Map.of(allowedSetting, randomAlphaOfLength(5)); allAllowedSettingsMap.put(allowedSetting, randomAlphaOfLength(5)); - var req = new UpdateSecuritySettingsAction.Request(allowedSettingMap, Collections.emptyMap(), Collections.emptyMap()); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + allowedSettingMap, + Collections.emptyMap(), + Collections.emptyMap() + ); assertThat(req.validate(), nullValue()); - req = new UpdateSecuritySettingsAction.Request(Collections.emptyMap(), allowedSettingMap, Collections.emptyMap()); + req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + Collections.emptyMap(), + allowedSettingMap, + Collections.emptyMap() + ); assertThat(req.validate(), nullValue()); - req = new UpdateSecuritySettingsAction.Request(Collections.emptyMap(), Collections.emptyMap(), allowedSettingMap); + req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + Collections.emptyMap(), + Collections.emptyMap(), + allowedSettingMap + ); assertThat(req.validate(), nullValue()); } - var req = new UpdateSecuritySettingsAction.Request(allAllowedSettingsMap, allAllowedSettingsMap, allAllowedSettingsMap); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + allAllowedSettingsMap, + allAllowedSettingsMap, + allAllowedSettingsMap + ); assertThat(req.validate(), nullValue()); } @@ -63,7 +93,13 @@ public void testDisallowedSettingsFailsValidation() { Map.of(randomFrom(ALLOWED_SETTING_KEYS), randomAlphaOfLength(5)) ); { - var req = new UpdateSecuritySettingsAction.Request(validOrEmptySettingMap, disallowedSettingMap, validOrEmptySettingMap); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + validOrEmptySettingMap, + disallowedSettingMap, + validOrEmptySettingMap + ); List errors = req.validate().validationErrors(); assertThat(errors, hasSize(1)); for (String errorMsg : errors) { @@ -81,7 +117,13 @@ public void testDisallowedSettingsFailsValidation() { } { - var req = new UpdateSecuritySettingsAction.Request(disallowedSettingMap, validOrEmptySettingMap, disallowedSettingMap); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + disallowedSettingMap, + validOrEmptySettingMap, + disallowedSettingMap + ); List errors = req.validate().validationErrors(); assertThat(errors, hasSize(2)); for (String errorMsg : errors) { @@ -101,7 +143,13 @@ public void testDisallowedSettingsFailsValidation() { } { - var req = new UpdateSecuritySettingsAction.Request(disallowedSettingMap, disallowedSettingMap, disallowedSettingMap); + var req = new UpdateSecuritySettingsAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + disallowedSettingMap, + disallowedSettingMap, + disallowedSettingMap + ); List errors = req.validate().validationErrors(); assertThat(errors, hasSize(3)); for (String errorMsg : errors) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/SubjectTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/SubjectTests.java index 1c4592c331080..625feca39cdb5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/SubjectTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/SubjectTests.java @@ -46,6 +46,7 @@ import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.CROSS_CLUSTER_ACCESS_REALM_TYPE; import static org.elasticsearch.xpack.core.security.authc.Subject.FLEET_SERVER_ROLE_DESCRIPTOR_BYTES_V_7_14; import static org.elasticsearch.xpack.core.security.authz.store.RoleReference.CrossClusterAccessRoleReference; +import static org.elasticsearch.xpack.core.security.authz.store.RoleReference.CrossClusterApiKeyRoleReference; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -182,7 +183,7 @@ public void testBuildRoleReferenceForCrossClusterApiKey() { authMetadata ); - final ApiKeyRoleReference roleReference = subject.buildRoleReferenceForCrossClusterApiKey(); + final CrossClusterApiKeyRoleReference roleReference = subject.buildRoleReferenceForCrossClusterApiKey(); assertThat(roleReference.getApiKeyId(), equalTo(apiKeyId)); assertThat(roleReference.getRoleDescriptorsBytes(), equalTo(authMetadata.get(API_KEY_ROLE_DESCRIPTORS_KEY))); } @@ -233,26 +234,29 @@ public void testGetRoleReferencesForCrossClusterAccess() { contains( isA(CrossClusterAccessRoleReference.class), isA(CrossClusterAccessRoleReference.class), - isA(ApiKeyRoleReference.class) + isA(CrossClusterApiKeyRoleReference.class) ) ); expectCrossClusterAccessReferenceAtIndex(0, roleReferences, crossClusterAccessSubjectInfo); expectCrossClusterAccessReferenceAtIndex(1, roleReferences, crossClusterAccessSubjectInfo); - final ApiKeyRoleReference roleReference = (ApiKeyRoleReference) roleReferences.get(2); + final CrossClusterApiKeyRoleReference roleReference = (CrossClusterApiKeyRoleReference) roleReferences.get(2); assertThat(roleReference.getApiKeyId(), equalTo(apiKeyId)); assertThat(roleReference.getRoleDescriptorsBytes(), equalTo(authMetadata.get(API_KEY_ROLE_DESCRIPTORS_KEY))); } else { if (isInternalUser) { - assertThat(roleReferences, contains(isA(FixedRoleReference.class), isA(ApiKeyRoleReference.class))); + assertThat(roleReferences, contains(isA(FixedRoleReference.class), isA(CrossClusterApiKeyRoleReference.class))); expectFixedReferenceAtIndex(0, roleReferences); } else { - assertThat(roleReferences, contains(isA(CrossClusterAccessRoleReference.class), isA(ApiKeyRoleReference.class))); + assertThat( + roleReferences, + contains(isA(CrossClusterAccessRoleReference.class), isA(CrossClusterApiKeyRoleReference.class)) + ); expectCrossClusterAccessReferenceAtIndex(0, roleReferences, crossClusterAccessSubjectInfo); } - final ApiKeyRoleReference roleReference = (ApiKeyRoleReference) roleReferences.get(1); + final CrossClusterApiKeyRoleReference roleReference = (CrossClusterApiKeyRoleReference) roleReferences.get(1); assertThat(roleReference.getApiKeyId(), equalTo(apiKeyId)); assertThat(roleReference.getRoleDescriptorsBytes(), equalTo(authMetadata.get(API_KEY_ROLE_DESCRIPTORS_KEY))); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index ad73944f4c64d..1ade22179ab59 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -285,7 +285,7 @@ public void testReadSecurityPrivilege() { ActionTypes.QUERY_USER_ACTION.name(), HasPrivilegesAction.NAME, GetUserPrivilegesAction.NAME, - GetSecuritySettingsAction.NAME + GetSecuritySettingsAction.INSTANCE.name() ); verifyClusterActionAllowed( ClusterPrivilegeResolver.READ_SECURITY, @@ -321,7 +321,7 @@ public void testReadSecurityPrivilege() { ActivateProfileAction.NAME, SetProfileEnabledAction.NAME, UpdateProfileDataAction.NAME, - UpdateSecuritySettingsAction.NAME + UpdateSecuritySettingsAction.INSTANCE.name() ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java index 74c8e6addf243..bc94cabab187d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java @@ -72,6 +72,22 @@ public void testApiKeyRoleReference() { assertThat(roleKey.getSource(), equalTo("apikey_" + apiKeyRoleType)); } + public void testCrossClusterApiKeyRoleReference() { + final String apiKeyId = randomAlphaOfLength(20); + final BytesArray roleDescriptorsBytes = new BytesArray(randomAlphaOfLength(50)); + final RoleReference.CrossClusterApiKeyRoleReference apiKeyRoleReference = new RoleReference.CrossClusterApiKeyRoleReference( + apiKeyId, + roleDescriptorsBytes + ); + + final RoleKey roleKey = apiKeyRoleReference.id(); + assertThat( + roleKey.getNames(), + hasItem("apikey:" + MessageDigests.toHexString(MessageDigests.digest(roleDescriptorsBytes, MessageDigests.sha256()))) + ); + assertThat(roleKey.getSource(), equalTo("apikey_" + RoleReference.ApiKeyRoleType.ASSIGNED)); + } + public void testCrossClusterAccessRoleReference() { final var roleDescriptorsBytes = new CrossClusterAccessSubjectInfo.RoleDescriptorsBytes(new BytesArray(randomAlphaOfLength(50))); final var crossClusterAccessRoleReference = new RoleReference.CrossClusterAccessRoleReference("user", roleDescriptorsBytes); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsActionNodesStatsResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsActionNodesStatsResponseTests.java new file mode 100644 index 0000000000000..b50895659ddfd --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsActionNodesStatsResponseTests.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodeStatsResponse; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodesStatsResponse; +import org.elasticsearch.xpack.core.transform.transforms.TransformSchedulerStats; + +import java.util.List; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class GetTransformNodeStatsActionNodesStatsResponseTests extends ESTestCase { + + private static final ClusterName CLUSTER_NAME = new ClusterName("my-cluster"); + + public void testEmptyResponse() { + var nodesStatsResponse = new NodesStatsResponse(CLUSTER_NAME, List.of(), List.of()); + assertThat(nodesStatsResponse.getNodes(), is(empty())); + assertThat(nodesStatsResponse.failures(), is(empty())); + assertThat(nodesStatsResponse.getTotalRegisteredTransformCount(), is(equalTo(0))); + } + + public void testResponse() { + var nodeA = new NodeStatsResponse(createNode("node-A"), new TransformSchedulerStats(7, null)); + var nodeB = new NodeStatsResponse(createNode("node-B"), new TransformSchedulerStats(0, null)); + var nodeC = new NodeStatsResponse(createNode("node-C"), new TransformSchedulerStats(4, null)); + + var nodesStatsResponse = new NodesStatsResponse(CLUSTER_NAME, List.of(nodeA, nodeB, nodeC), List.of()); + assertThat(nodesStatsResponse.getNodes(), containsInAnyOrder(nodeA, nodeB, nodeC)); + assertThat(nodesStatsResponse.failures(), is(empty())); + assertThat(nodesStatsResponse.getTotalRegisteredTransformCount(), is(equalTo(11))); + } + + public void testResponseWithFailure() { + var nodeA = new NodeStatsResponse(createNode("node-A"), new TransformSchedulerStats(7, null)); + var nodeB = new NodeStatsResponse(createNode("node-B"), new TransformSchedulerStats(0, null)); + var nodeC = new FailedNodeException("node-C", "node C failed", null); + + var nodesStatsResponse = new NodesStatsResponse(CLUSTER_NAME, List.of(nodeA, nodeB), List.of(nodeC)); + assertThat(nodesStatsResponse.getNodes(), containsInAnyOrder(nodeA, nodeB)); + assertThat(nodesStatsResponse.failures(), contains(nodeC)); + assertThat(nodesStatsResponse.getTotalRegisteredTransformCount(), is(equalTo(7))); + } + + private static DiscoveryNode createNode(String name) { + return DiscoveryNodeUtils.builder(UUIDs.randomBase64UUID(random())).name(name).build(); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/utils/FloatConversionUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/utils/FloatConversionUtilsTests.java new file mode 100644 index 0000000000000..02a4234b007d3 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/utils/FloatConversionUtilsTests.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.utils; + +import org.elasticsearch.test.ESTestCase; + +public class FloatConversionUtilsTests extends ESTestCase { + + public void testFloatArrayOf() { + double[] doublesArray = { 1.0, 2.0, 3.0 }; + float[] floatArray = FloatConversionUtils.floatArrayOf(doublesArray); + assertEquals(1.0, floatArray[0], 0.0); + assertEquals(2.0, floatArray[1], 0.0); + assertEquals(3.0, floatArray[2], 0.0); + } + +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/170_connector_update_features.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/170_connector_update_features.yml new file mode 100644 index 0000000000000..0964e4f50ebde --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/170_connector_update_features.yml @@ -0,0 +1,108 @@ +setup: + - requires: + cluster_features: ["gte_v8.15.0"] + reason: Introduced in 8.15.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Features": + - do: + connector.update_features: + connector_id: test-connector + body: + features: + document_level_security: { enabled: true } + native_connector_api_keys: { enabled: true } + incremental_sync: { enabled: false } + sync_rules: + basic: { enabled: true } + advanced: { enabled: false } + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { features.document_level_security.enabled: true } + - match: { features.native_connector_api_keys.enabled: true } + - match: { features.incremental_sync.enabled: false } + - match: { features.sync_rules.basic.enabled: true } + - match: { features.sync_rules.advanced.enabled: false } + +--- +"Update Connector Features - Partial Update": + - do: + connector.update_features: + connector_id: test-connector + body: + features: + document_level_security: { enabled: true } + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { features.document_level_security.enabled: true } + + + - do: + connector.update_features: + connector_id: test-connector + body: + features: + native_connector_api_keys: { enabled: true } + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + # Assert that existing feature remains unchanged + - match: { features.document_level_security.enabled: true } + - match: { features.native_connector_api_keys.enabled: true } + +--- +"Update Connector Features - 404 when connector doesn't exist": + - do: + catch: "missing" + connector.update_features: + connector_id: test-non-existent-connector + body: + features: + native_connector_api_keys: { enabled: true } + +--- +"Update Connector Features - 400 status code when connector_id is empty": + - do: + catch: "bad_request" + connector.update_features: + connector_id: "" + body: + features: + native_connector_api_keys: { enabled: true } + +--- +"Update Connector Features - 400 status code when payload unknown": + - do: + catch: "bad_request" + connector.update_features: + connector_id: test-connector + body: + featuresss: + not_a_feature: 12423 diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml index cafdf0791de2f..731e4a6a30f31 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml @@ -75,6 +75,33 @@ setup: - match: { last_indexed_document_count: 42 } +--- +"Update Connector Last Sync Stats - Supports sync_cursor updates": + - do: + connector.last_sync: + connector_id: test-connector + body: + last_deleted_document_count: 123 + + - match: { result: updated } + + - do: + connector.last_sync: + connector_id: test-connector + body: + sync_cursor: { pointer: 42 } + + - match: { result: updated } + + + - do: + connector.get: + connector_id: test-connector + + - match: { sync_cursor: { pointer: 42 } } + - match: { last_deleted_document_count: 123 } + + --- "Update Connector Last Sync Stats - Connector doesn't exist": - do: diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index bc3da1a82fba4..871bf7fb122b9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorApiKeyIdAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorErrorAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFeaturesAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringValidationAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorIndexNameAction; @@ -78,6 +79,7 @@ import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorApiKeyIdAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorErrorAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFeaturesAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringValidationAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorIndexNameAction; @@ -93,6 +95,7 @@ import org.elasticsearch.xpack.application.connector.action.UpdateConnectorApiKeyIdAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFeaturesAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringValidationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorIndexNameAction; @@ -267,6 +270,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(UpdateConnectorApiKeyIdAction.INSTANCE, TransportUpdateConnectorApiKeyIdAction.class), new ActionHandler<>(UpdateConnectorConfigurationAction.INSTANCE, TransportUpdateConnectorConfigurationAction.class), new ActionHandler<>(UpdateConnectorErrorAction.INSTANCE, TransportUpdateConnectorErrorAction.class), + new ActionHandler<>(UpdateConnectorFeaturesAction.INSTANCE, TransportUpdateConnectorFeaturesAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), new ActionHandler<>(UpdateConnectorActiveFilteringAction.INSTANCE, TransportUpdateConnectorActiveFilteringAction.class), new ActionHandler<>( @@ -368,6 +372,7 @@ public List getRestHandlers( new RestUpdateConnectorConfigurationAction(), new RestUpdateConnectorErrorAction(), new RestUpdateConnectorActiveFilteringAction(), + new RestUpdateConnectorFeaturesAction(), new RestUpdateConnectorFilteringValidationAction(), new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorIndexNameAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index e9447149c7e6c..a9c488b024d49 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -213,7 +213,7 @@ public Connector(StreamInput in) throws IOException { static final ParseField CUSTOM_SCHEDULING_FIELD = new ParseField("custom_scheduling"); public static final ParseField DESCRIPTION_FIELD = new ParseField("description"); public static final ParseField ERROR_FIELD = new ParseField("error"); - static final ParseField FEATURES_FIELD = new ParseField("features"); + public static final ParseField FEATURES_FIELD = new ParseField("features"); public static final ParseField FILTERING_FIELD = new ParseField("filtering"); public static final ParseField INDEX_NAME_FIELD = new ParseField("index_name"); public static final ParseField IS_NATIVE_FIELD = new ParseField("is_native"); @@ -224,7 +224,7 @@ public Connector(StreamInput in) throws IOException { public static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); public static final ParseField SERVICE_TYPE_FIELD = new ParseField("service_type"); public static final ParseField STATUS_FIELD = new ParseField("status"); - static final ParseField SYNC_CURSOR_FIELD = new ParseField("sync_cursor"); + public static final ParseField SYNC_CURSOR_FIELD = new ParseField("sync_cursor"); static final ParseField SYNC_NOW_FIELD = new ParseField("sync_now"); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFeatures.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFeatures.java index 1b2e7209e41e5..0b9a72f06ad53 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFeatures.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFeatures.java @@ -102,6 +102,22 @@ public static ConnectorFeatures fromXContentBytes(BytesReference source, XConten } } + public FeatureEnabled getDocumentLevelSecurityEnabled() { + return documentLevelSecurityEnabled; + } + + public FeatureEnabled getIncrementalSyncEnabled() { + return incrementalSyncEnabled; + } + + public FeatureEnabled getNativeConnectorAPIKeysEnabled() { + return nativeConnectorAPIKeysEnabled; + } + + public SyncRulesFeatures getSyncRulesFeatures() { + return syncRulesFeatures; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 50e2633bb8c76..e5314a20bdccf 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -549,6 +549,33 @@ public void updateConnectorFiltering(String connectorId, List listener) { + try { + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(Map.of(Connector.FEATURES_FIELD.getPreferredName(), features)) + ); + client.update(updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); + return; + } + l.onResponse(updateResponse); + })); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Updates the draft filtering in a given {@link Connector}. * diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFeaturesAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFeaturesAction.java new file mode 100644 index 0000000000000..48bf87b114548 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFeaturesAction.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +@ServerlessScope(Scope.PUBLIC) +public class RestUpdateConnectorFeaturesAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_features_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_features")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorFeaturesAction.Request request = UpdateConnectorFeaturesAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorFeaturesAction.INSTANCE, + request, + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFeaturesAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFeaturesAction.java new file mode 100644 index 0000000000000..c86ddf902519f --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFeaturesAction.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorFeaturesAction extends HandledTransportAction< + UpdateConnectorFeaturesAction.Request, + ConnectorUpdateActionResponse> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorFeaturesAction(TransportService transportService, ActionFilters actionFilters, Client client) { + super( + UpdateConnectorFeaturesAction.NAME, + transportService, + actionFilters, + UpdateConnectorFeaturesAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorFeaturesAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorFeatures( + request.getConnectorId(), + request.getFeatures(), + listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFeaturesAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFeaturesAction.java new file mode 100644 index 0000000000000..c1f62c0efe6e8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFeaturesAction.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorFeatures; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class UpdateConnectorFeaturesAction { + + public static final String NAME = "indices:data/write/xpack/connector/update_features"; + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private UpdateConnectorFeaturesAction() {/* no instances */} + + public static class Request extends ConnectorActionRequest implements ToXContentObject { + + private final String connectorId; + + private final ConnectorFeatures features; + + public Request(String connectorId, ConnectorFeatures features) { + this.connectorId = connectorId; + this.features = features; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.features = in.readOptionalWriteable(ConnectorFeatures::new); + } + + public String getConnectorId() { + return connectorId; + } + + public ConnectorFeatures getFeatures() { + return features; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); + } + + return validationException; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_update_features_request", + false, + ((args, connectorId) -> new UpdateConnectorFeaturesAction.Request(connectorId, (ConnectorFeatures) args[0])) + ); + + static { + PARSER.declareObject(optionalConstructorArg(), (p, c) -> ConnectorFeatures.fromXContent(p), Connector.FEATURES_FIELD); + } + + public static UpdateConnectorFeaturesAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorFeaturesAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorFeaturesAction.Request fromXContent(XContentParser parser, String connectorId) throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.FEATURES_FIELD.getPreferredName(), features); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeOptionalWriteable(features); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(features, request.features); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, features); + } + + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java index 4823803d94030..1628a493cbec5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ToXContentObject; @@ -22,6 +23,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorSyncInfo; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorUtils; @@ -45,16 +47,20 @@ public static class Request extends ConnectorActionRequest implements ToXContent private final String connectorId; private final ConnectorSyncInfo syncInfo; + @Nullable + private final Object syncCursor; - public Request(String connectorId, ConnectorSyncInfo syncInfo) { + private Request(String connectorId, ConnectorSyncInfo syncInfo, Object syncCursor) { this.connectorId = connectorId; this.syncInfo = syncInfo; + this.syncCursor = syncCursor; } public Request(StreamInput in) throws IOException { super(in); this.connectorId = in.readString(); this.syncInfo = in.readOptionalWriteable(ConnectorSyncInfo::new); + this.syncCursor = in.readGenericValue(); } public String getConnectorId() { @@ -65,6 +71,10 @@ public ConnectorSyncInfo getSyncInfo() { return syncInfo; } + public Object getSyncCursor() { + return syncCursor; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; @@ -79,20 +89,22 @@ public ActionRequestValidationException validate() { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("connector_update_last_sync_stats_request", false, ((args, connectorId) -> { int i = 0; - return new UpdateConnectorLastSyncStatsAction.Request( - connectorId, - new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) - .setLastAccessControlSyncScheduledAt((Instant) args[i++]) - .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) - .setLastDeletedDocumentCount((Long) args[i++]) - .setLastIncrementalSyncScheduledAt((Instant) args[i++]) - .setLastIndexedDocumentCount((Long) args[i++]) - .setLastSyncError((String) args[i++]) - .setLastSyncScheduledAt((Instant) args[i++]) - .setLastSyncStatus((ConnectorSyncStatus) args[i++]) - .setLastSynced((Instant) args[i++]) - .build() - ); + return new Builder().setConnectorId(connectorId) + .setSyncInfo( + new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) + .setLastAccessControlSyncScheduledAt((Instant) args[i++]) + .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastDeletedDocumentCount((Long) args[i++]) + .setLastIncrementalSyncScheduledAt((Instant) args[i++]) + .setLastIndexedDocumentCount((Long) args[i++]) + .setLastSyncError((String) args[i++]) + .setLastSyncScheduledAt((Instant) args[i++]) + .setLastSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastSynced((Instant) args[i++]) + .build() + ) + .setSyncCursor(args[i]) + .build(); })); static { @@ -142,6 +154,7 @@ public ActionRequestValidationException validate() { ConnectorSyncInfo.LAST_SYNCED_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); + PARSER.declareObjectOrNull(optionalConstructorArg(), (p, c) -> p.map(), null, Connector.SYNC_CURSOR_FIELD); } public static UpdateConnectorLastSyncStatsAction.Request fromXContentBytes( @@ -166,6 +179,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); { syncInfo.toXContent(builder, params); + if (syncCursor != null) { + builder.field(Connector.SYNC_CURSOR_FIELD.getPreferredName(), syncCursor); + } } builder.endObject(); return builder; @@ -176,6 +192,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(connectorId); out.writeOptionalWriteable(syncInfo); + out.writeGenericValue(syncCursor); } @Override @@ -183,12 +200,41 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(connectorId, request.connectorId) && Objects.equals(syncInfo, request.syncInfo); + return Objects.equals(connectorId, request.connectorId) + && Objects.equals(syncInfo, request.syncInfo) + && Objects.equals(syncCursor, request.syncCursor); } @Override public int hashCode() { - return Objects.hash(connectorId, syncInfo); + return Objects.hash(connectorId, syncInfo, syncCursor); + } + + public static class Builder { + + private String connectorId; + private ConnectorSyncInfo syncInfo; + private Object syncCursor; + + public Builder setConnectorId(String connectorId) { + this.connectorId = connectorId; + return this; + } + + public Builder setSyncInfo(ConnectorSyncInfo syncInfo) { + this.syncInfo = syncInfo; + return this; + } + + public Builder setSyncCursor(Object syncCursor) { + this.syncCursor = syncCursor; + return this; + } + + public Request build() { + return new Request(connectorId, syncInfo, syncCursor); + } } + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 045cb725e477c..698c061d1bd6c 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.MockScriptEngine; @@ -56,7 +57,9 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.getRandomConnectorFeatures; import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.getRandomCronExpression; +import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.randomConnectorFeatureEnabled; import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.registerSimplifiedConnectorIndexTemplates; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; @@ -240,6 +243,51 @@ public void testUpdateConnectorPipeline() throws Exception { assertThat(updatedPipeline, equalTo(indexedConnector.getPipeline())); } + public void testUpdateConnectorFeatures() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + ConnectorFeatures newFeatures = getRandomConnectorFeatures(); + + DocWriteResponse updateResponse = awaitUpdateConnectorFeatures(connectorId, newFeatures); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + Connector indexedConnector = awaitGetConnector(connectorId); + assertThat(newFeatures, equalTo(indexedConnector.getFeatures())); + + } + + public void testUpdateConnectorFeatures_partialUpdate() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + ConnectorFeatures features = getRandomConnectorFeatures(); + + awaitUpdateConnectorFeatures(connectorId, features); + + Connector indexedConnector = awaitGetConnector(connectorId); + assertThat(features, equalTo(indexedConnector.getFeatures())); + + // Partial update of DLS feature + ConnectorFeatures dlsFeature = new ConnectorFeatures.Builder().setDocumentLevelSecurityEnabled(randomConnectorFeatureEnabled()) + .build(); + awaitUpdateConnectorFeatures(connectorId, dlsFeature); + indexedConnector = awaitGetConnector(connectorId); + + // Assert that partial update was applied + assertThat(dlsFeature.getDocumentLevelSecurityEnabled(), equalTo(indexedConnector.getFeatures().getDocumentLevelSecurityEnabled())); + + // Assert other features are unchanged + assertThat(features.getSyncRulesFeatures(), equalTo(indexedConnector.getFeatures().getSyncRulesFeatures())); + assertThat(features.getNativeConnectorAPIKeysEnabled(), equalTo(indexedConnector.getFeatures().getNativeConnectorAPIKeysEnabled())); + assertThat(features.getIncrementalSyncEnabled(), equalTo(indexedConnector.getFeatures().getIncrementalSyncEnabled())); + } + public void testUpdateConnectorFiltering() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); @@ -464,7 +512,9 @@ public void testUpdateConnectorLastSyncStats() throws Exception { ConnectorSyncInfo syncStats = ConnectorTestUtils.getRandomConnectorSyncInfo(); - UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request(connectorId, syncStats); + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request.Builder().setConnectorId( + connectorId + ).setSyncInfo(syncStats).build(); DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); @@ -486,7 +536,9 @@ public void testUpdateConnectorLastSyncStats_withPartialUpdate() throws Exceptio .setLastDeletedDocumentCount(randomLong()) .build(); - UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request(connectorId, syncStats); + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request.Builder().setConnectorId( + connectorId + ).setSyncInfo(syncStats).build(); DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); @@ -500,7 +552,9 @@ public void testUpdateConnectorLastSyncStats_withPartialUpdate() throws Exceptio ConnectorSyncInfo nextSyncStats = new ConnectorSyncInfo.Builder().setLastIndexedDocumentCount(randomLong()).build(); - lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request(connectorId, nextSyncStats); + lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request.Builder().setConnectorId(connectorId) + .setSyncInfo(nextSyncStats) + .build(); updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); @@ -516,6 +570,27 @@ public void testUpdateConnectorLastSyncStats_withPartialUpdate() throws Exceptio } + public void testUpdateConnectorLastSyncStats_syncCursor() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + Map syncCursor = randomMap(2, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))); + + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request.Builder().setConnectorId( + connectorId + ).setSyncInfo(new ConnectorSyncInfo.Builder().build()).setSyncCursor(syncCursor).build(); + + DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + // Check sync_cursor got updated + assertThat(syncCursor, equalTo(indexedConnector.getSyncCursor())); + } + public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); @@ -890,6 +965,32 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorFeatures(String connectorId, ConnectorFeatures features) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorFeatures(connectorId, features, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + + assertTrue("Timeout waiting for update features request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update features request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorFiltering(String connectorId, List filtering) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 230de44a8f6c5..f052ef79d82fb 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -371,7 +371,7 @@ public static ConnectorSearchResult getRandomConnectorSearchResult() { .build(); } - private static ConnectorFeatures.FeatureEnabled randomConnectorFeatureEnabled() { + public static ConnectorFeatures.FeatureEnabled randomConnectorFeatureEnabled() { return new ConnectorFeatures.FeatureEnabled(randomBoolean()); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFeaturesActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFeaturesActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..9a191dba2e525 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFeaturesActionRequestBWCSerializingTests.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorFeaturesActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorFeaturesAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorFeaturesAction.Request::new; + } + + @Override + protected UpdateConnectorFeaturesAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorFeaturesAction.Request(connectorId, ConnectorTestUtils.getRandomConnectorFeatures()); + } + + @Override + protected UpdateConnectorFeaturesAction.Request mutateInstance(UpdateConnectorFeaturesAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorFeaturesAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorFeaturesAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorFeaturesAction.Request mutateInstanceForVersion( + UpdateConnectorFeaturesAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java index 0728a7b328eb4..b324a43b46b81 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Tuple; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; @@ -28,7 +29,10 @@ protected Writeable.Reader instanceR @Override protected UpdateConnectorLastSyncStatsAction.Request createTestInstance() { this.connectorId = randomUUID(); - return new UpdateConnectorLastSyncStatsAction.Request(connectorId, ConnectorTestUtils.getRandomConnectorSyncInfo()); + return new UpdateConnectorLastSyncStatsAction.Request.Builder().setConnectorId(connectorId) + .setSyncInfo(ConnectorTestUtils.getRandomConnectorSyncInfo()) + .setSyncCursor(randomMap(0, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4)))) + .build(); } @Override diff --git a/x-pack/plugin/esql-core/build.gradle b/x-pack/plugin/esql-core/build.gradle index ed9f5066d10e7..796ec1d137155 100644 --- a/x-pack/plugin/esql-core/build.gradle +++ b/x-pack/plugin/esql-core/build.gradle @@ -1,5 +1,6 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' +apply plugin: 'elasticsearch.publish' esplugin { name 'x-pack-esql-core' diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/AnalyzerRules.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/AnalyzerRules.java index 2bc150db64950..ce188511fe7bc 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/AnalyzerRules.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/AnalyzerRules.java @@ -8,24 +8,10 @@ package org.elasticsearch.xpack.esql.core.analyzer; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionDefinition; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; -import org.elasticsearch.xpack.esql.core.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.Equals; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; import org.elasticsearch.xpack.esql.core.rule.Rule; -import org.elasticsearch.xpack.esql.core.session.Configuration; -import org.elasticsearch.xpack.esql.core.type.DataTypes; -import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; -import org.elasticsearch.xpack.esql.core.type.UnsupportedEsField; import java.util.ArrayList; import java.util.Collection; @@ -34,48 +20,10 @@ import java.util.function.Predicate; import java.util.function.Supplier; -import static java.util.Arrays.asList; import static java.util.Collections.singletonList; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; public final class AnalyzerRules { - public static class AddMissingEqualsToBoolField extends AnalyzerRule { - - @Override - protected LogicalPlan rule(Filter filter) { - if (filter.resolved() == false) { - return filter; - } - // check the condition itself - Expression condition = replaceRawBoolFieldWithEquals(filter.condition()); - // otherwise look for binary logic - if (condition == filter.condition()) { - condition = condition.transformUp( - BinaryLogic.class, - b -> b.replaceChildren(asList(replaceRawBoolFieldWithEquals(b.left()), replaceRawBoolFieldWithEquals(b.right()))) - ); - } - - if (condition != filter.condition()) { - filter = filter.with(condition); - } - return filter; - } - - private static Expression replaceRawBoolFieldWithEquals(Expression e) { - if (e instanceof FieldAttribute && e.dataType() == BOOLEAN) { - e = new Equals(e.source(), e, Literal.of(e, Boolean.TRUE)); - } - return e; - } - - @Override - protected boolean skipResolved() { - return false; - } - } - public abstract static class AnalyzerRule extends Rule { // transformUp (post-order) - that is first children and then the node @@ -123,24 +71,6 @@ protected LogicalPlan rule(LogicalPlan plan) { protected abstract LogicalPlan doRule(LogicalPlan plan); } - public static Function resolveFunction(UnresolvedFunction uf, Configuration configuration, FunctionRegistry functionRegistry) { - Function f = null; - if (uf.analyzed()) { - f = uf; - } else if (uf.childrenResolved() == false) { - f = uf; - } else { - String functionName = functionRegistry.resolveAlias(uf.name()); - if (functionRegistry.functionExists(functionName) == false) { - f = uf.missing(functionName, functionRegistry.listFunctions()); - } else { - FunctionDefinition def = functionRegistry.resolveFunction(functionName); - f = uf.buildResolved(configuration, def); - } - } - return f; - } - public static List maybeResolveAgainstList( UnresolvedAttribute u, Collection attrList, @@ -218,42 +148,4 @@ public static List maybeResolveAgainstList( ) ); } - - public static Attribute handleSpecialFields(UnresolvedAttribute u, Attribute named, boolean allowCompound) { - // if it's a object/compound type, keep it unresolved with a nice error message - if (named instanceof FieldAttribute fa) { - - // incompatible mappings - if (fa.field() instanceof InvalidMappedField imf) { - named = u.withUnresolvedMessage("Cannot use field [" + fa.name() + "] due to ambiguities being " + imf.errorMessage()); - } - // unsupported types - else if (DataTypes.isUnsupported(fa.dataType())) { - UnsupportedEsField unsupportedField = (UnsupportedEsField) fa.field(); - if (unsupportedField.hasInherited()) { - named = u.withUnresolvedMessage( - "Cannot use field [" - + fa.name() - + "] with unsupported type [" - + unsupportedField.getOriginalType() - + "] in hierarchy (field [" - + unsupportedField.getInherited() - + "])" - ); - } else { - named = u.withUnresolvedMessage( - "Cannot use field [" + fa.name() + "] with unsupported type [" + unsupportedField.getOriginalType() + "]" - ); - } - } - // compound fields - else if (allowCompound == false && DataTypes.isPrimitive(fa.dataType()) == false) { - named = u.withUnresolvedMessage( - "Cannot use field [" + fa.name() + "] type [" + fa.dataType().typeName() + "] only its subfields" - ); - } - } - // make sure to copy the resolved attribute with the proper location - return named.withLocation(u.source()); - } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/PreAnalyzer.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/PreAnalyzer.java deleted file mode 100644 index bf40370b5fe4f..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/PreAnalyzer.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.analyzer; - -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnresolvedRelation; - -import java.util.ArrayList; -import java.util.List; - -import static java.util.Collections.emptyList; - -// Since the pre-analyzer only inspect (and does NOT transform) the tree -// it is not built as a rule executor. -// Further more it applies 'the rules' only once and needs to return some -// state back. -public class PreAnalyzer { - - public static class PreAnalysis { - public static final PreAnalysis EMPTY = new PreAnalysis(emptyList()); - - public final List indices; - - public PreAnalysis(List indices) { - this.indices = indices; - } - } - - public PreAnalysis preAnalyze(LogicalPlan plan) { - if (plan.analyzed()) { - return PreAnalysis.EMPTY; - } - - return doPreAnalyze(plan); - } - - private static PreAnalysis doPreAnalyze(LogicalPlan plan) { - List indices = new ArrayList<>(); - - plan.forEachUp(UnresolvedRelation.class, p -> indices.add(new TableInfo(p.table(), p.frozen()))); - - // mark plan as preAnalyzed (if it were marked, there would be no analysis) - plan.forEachUp(LogicalPlan::setPreAnalyzed); - - return new PreAnalysis(indices); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/VerifierChecks.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/VerifierChecks.java index 44ac786ba32fe..36ce187d8600c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/VerifierChecks.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/VerifierChecks.java @@ -15,7 +15,7 @@ import java.util.Set; import static org.elasticsearch.xpack.esql.core.common.Failure.fail; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; public final class VerifierChecks { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/AsyncTaskManagementService.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/AsyncTaskManagementService.java index deb5cae1172f4..94bac95b91501 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/AsyncTaskManagementService.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/AsyncTaskManagementService.java @@ -43,7 +43,7 @@ import static org.elasticsearch.core.Strings.format; /** - * Service for managing EQL requests + * Service for managing ESQL requests */ public class AsyncTaskManagementService< Request extends TaskAwareRequest, @@ -84,7 +84,7 @@ T createTask( } /** - * Wrapper for EqlSearchRequest that creates an async version of EqlSearchTask + * Wrapper for EsqlQueryRequest that creates an async version of EsqlQueryTask */ private class AsyncRequestWrapper implements TaskAwareRequest { private final Request request; @@ -278,8 +278,7 @@ private void storeResults(T searchTask, StoredAsyncResponse storedResp ActionListener.wrap( // We should only unregister after the result is saved resp -> { - // TODO: generalize the logging, not just eql - logger.trace(() -> "stored eql search results for [" + searchTask.getExecutionId().getEncoded() + "]"); + logger.trace(() -> "stored ESQL search results for [" + searchTask.getExecutionId().getEncoded() + "]"); taskManager.unregister(searchTask); if (storedResponse.getException() != null) { searchTask.onFailure(storedResponse.getException()); @@ -297,8 +296,7 @@ private void storeResults(T searchTask, StoredAsyncResponse storedResp if (cause instanceof DocumentMissingException == false && cause instanceof VersionConflictEngineException == false) { logger.error( - // TODO: generalize the logging, not just eql - () -> format("failed to store eql search results for [%s]", searchTask.getExecutionId().getEncoded()), + () -> format("failed to store ESQL search results for [%s]", searchTask.getExecutionId().getEncoded()), exc ); } @@ -311,7 +309,7 @@ private void storeResults(T searchTask, StoredAsyncResponse storedResp } catch (Exception exc) { taskManager.unregister(searchTask); searchTask.onFailure(exc); - logger.error(() -> "failed to store eql search results for [" + searchTask.getExecutionId().getEncoded() + "]", exc); + logger.error(() -> "failed to store ESQL search results for [" + searchTask.getExecutionId().getEncoded() + "]", exc); } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/execution/search/extractor/AbstractFieldHitExtractor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/execution/search/extractor/AbstractFieldHitExtractor.java index 6a8bd61f89c03..9f7155a78e66f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/execution/search/extractor/AbstractFieldHitExtractor.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/execution/search/extractor/AbstractFieldHitExtractor.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.io.IOException; import java.time.ZoneId; @@ -85,7 +84,7 @@ protected AbstractFieldHitExtractor(StreamInput in) throws IOException { } protected DataType loadTypeFromName(String typeName) { - return DataTypes.fromTypeName(typeName); + return DataType.fromTypeName(typeName); } protected abstract ZoneId readZoneId(StreamInput in) throws IOException; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java index 58203c8a0072e..d9f99b6d92318 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java @@ -6,11 +6,18 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; +import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; +import java.io.IOException; import java.util.List; +import java.util.Objects; import static java.util.Collections.singletonList; @@ -22,7 +29,8 @@ * And in {@code SELECT col AS x} "col" is a named expression that gets renamed to "x" through an alias. * */ -public class Alias extends NamedExpression { +public final class Alias extends NamedExpression { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(NamedExpression.class, "Alias", Alias::new); private final Expression child; private final String qualifier; @@ -51,6 +59,32 @@ public Alias(Source source, String name, String qualifier, Expression child, Nam this.qualifier = qualifier; } + public Alias(StreamInput in) throws IOException { + this( + Source.readFrom((StreamInput & PlanStreamInput) in), + in.readString(), + in.readOptionalString(), + ((PlanStreamInput) in).readExpression(), + NameId.readFrom((StreamInput & PlanStreamInput) in), + in.readBoolean() + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + Source.EMPTY.writeTo(out); + out.writeString(name()); + out.writeOptionalString(qualifier()); + ((PlanStreamOutput) out).writeExpression(child()); + id().writeTo(out); + out.writeBoolean(synthetic()); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + @Override protected NodeInfo info() { return NodeInfo.create(this, Alias::new, name(), qualifier, child, id(), synthetic()); @@ -113,4 +147,18 @@ public String nodeString() { public static Expression unwrap(Expression e) { return e instanceof Alias as ? as.child() : e; } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj) == false) { + return false; + } + Alias other = (Alias) obj; + return Objects.equals(qualifier, other.qualifier); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), qualifier); + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java index 5326825ec1105..e89f39294a28b 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression; -import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -30,7 +29,7 @@ * is a named expression (an {@code Alias} will be created automatically for it). * The rest are not as they are not part of the projection and thus are not part of the derived table. */ -public abstract class Attribute extends NamedExpression implements NamedWriteable { +public abstract class Attribute extends NamedExpression { public static List getNamedWriteables() { // TODO add UnsupportedAttribute when these are moved to the same project return List.of(FieldAttribute.ENTRY, MetadataAttribute.ENTRY, ReferenceAttribute.ENTRY); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/EmptyAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/EmptyAttribute.java index 7a724eaa2be65..5824358e57525 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/EmptyAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/EmptyAttribute.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.StringUtils; import java.io.IOException; @@ -60,7 +59,7 @@ public boolean resolved() { @Override public DataType dataType() { - return DataTypes.NULL; + return DataType.NULL; } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java index 980a0312c29f0..8baffbf887e47 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.core.Tuple; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import java.util.ArrayList; import java.util.Collection; @@ -133,7 +133,7 @@ public static String name(Expression e) { } public static boolean isNull(Expression e) { - return e.dataType() == DataTypes.NULL || (e.foldable() && e.fold() == null); + return e.dataType() == DataType.NULL || (e.foldable() && e.fold() == null); } public static List names(Collection e) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java index 35fe402035f69..a6e713007a97f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java @@ -78,9 +78,9 @@ public FieldAttribute( } @SuppressWarnings("unchecked") - public FieldAttribute(StreamInput in) throws IOException { + public FieldAttribute(StreamInput in) throws IOException { /* - * The funny casting dance with `` and `(S) in` is required + * The funny casting dance with `(StreamInput & PlanStreamInput) in` is required * because we're in esql-core here and the real PlanStreamInput is in * esql-proper. And because NamedWriteableRegistry.Entry needs StreamInput, * not a PlanStreamInput. And we need PlanStreamInput to handle Source @@ -88,14 +88,14 @@ public FieldAttribute(StreamInput in) * of esql-core. */ this( - Source.readFrom((S) in), + Source.readFrom((StreamInput & PlanStreamInput) in), in.readOptionalWriteable(FieldAttribute::new), in.readString(), DataType.readFrom(in), in.readNamedWriteable(EsField.class), in.readOptionalString(), in.readEnum(Nullability.class), - NameId.readFrom((S) in), + NameId.readFrom((StreamInput & PlanStreamInput) in), in.readBoolean() ); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java index ea37629df6085..68780f5b32e9c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.util.Objects; @@ -19,9 +18,9 @@ */ public class Literal extends LeafExpression { - public static final Literal TRUE = new Literal(Source.EMPTY, Boolean.TRUE, DataTypes.BOOLEAN); - public static final Literal FALSE = new Literal(Source.EMPTY, Boolean.FALSE, DataTypes.BOOLEAN); - public static final Literal NULL = new Literal(Source.EMPTY, null, DataTypes.NULL); + public static final Literal TRUE = new Literal(Source.EMPTY, Boolean.TRUE, DataType.BOOLEAN); + public static final Literal FALSE = new Literal(Source.EMPTY, Boolean.FALSE, DataType.BOOLEAN); + public static final Literal NULL = new Literal(Source.EMPTY, null, DataType.NULL); private final Object value; private final DataType dataType; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java index 9cbee26f443ba..eac3586cf139d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; import java.io.IOException; @@ -35,15 +34,15 @@ public class MetadataAttribute extends TypedAttribute { private static final Map> ATTRIBUTES_MAP = Map.of( "_version", - tuple(DataTypes.LONG, false), // _version field is not searchable + tuple(DataType.LONG, false), // _version field is not searchable "_index", - tuple(DataTypes.KEYWORD, true), + tuple(DataType.KEYWORD, true), IdFieldMapper.NAME, - tuple(DataTypes.KEYWORD, false), // actually searchable, but fielddata access on the _id field is disallowed by default + tuple(DataType.KEYWORD, false), // actually searchable, but fielddata access on the _id field is disallowed by default IgnoredFieldMapper.NAME, - tuple(DataTypes.KEYWORD, true), + tuple(DataType.KEYWORD, true), SourceFieldMapper.NAME, - tuple(DataTypes.SOURCE, false) + tuple(DataType.SOURCE, false) ); private final boolean searchable; @@ -67,9 +66,9 @@ public MetadataAttribute(Source source, String name, DataType dataType, boolean } @SuppressWarnings("unchecked") - public MetadataAttribute(StreamInput in) throws IOException { + public MetadataAttribute(StreamInput in) throws IOException { /* - * The funny casting dance with `` and `(S) in` is required + * The funny casting dance with `(StreamInput & PlanStreamInput) in` is required * because we're in esql-core here and the real PlanStreamInput is in * esql-proper. And because NamedWriteableRegistry.Entry needs StreamInput, * not a PlanStreamInput. And we need PlanStreamInput to handle Source @@ -77,12 +76,12 @@ public MetadataAttribute(StreamInput i * of esql-core. */ this( - Source.readFrom((S) in), + Source.readFrom((StreamInput & PlanStreamInput) in), in.readString(), DataType.readFrom(in), in.readOptionalString(), in.readEnum(Nullability.class), - NameId.readFrom((S) in), + NameId.readFrom((StreamInput & PlanStreamInput) in), in.readBoolean(), in.readBoolean() ); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java index 4a3666c8b8aa7..e3e9a60180da7 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java @@ -6,8 +6,11 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -15,7 +18,15 @@ * An expression that has a name. Named expressions can be used as a result * (by converting to an attribute). */ -public abstract class NamedExpression extends Expression { +public abstract class NamedExpression extends Expression implements NamedWriteable { + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + for (NamedWriteableRegistry.Entry e : Attribute.getNamedWriteables()) { + entries.add(new NamedWriteableRegistry.Entry(NamedExpression.class, e.name, in -> (NamedExpression) e.reader.read(in))); + } + entries.add(Alias.ENTRY); + return entries; + } private final String name; private final NameId id; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java index 8bac20e9347bc..d9a70787a56ed 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ReferenceAttribute.java @@ -43,9 +43,9 @@ public ReferenceAttribute( } @SuppressWarnings("unchecked") - public ReferenceAttribute(StreamInput in) throws IOException { + public ReferenceAttribute(StreamInput in) throws IOException { /* - * The funny casting dance with `` and `(S) in` is required + * The funny casting dance with `(StreamInput & PlanStreamInput) in` is required * because we're in esql-core here and the real PlanStreamInput is in * esql-proper. And because NamedWriteableRegistry.Entry needs StreamInput, * not a PlanStreamInput. And we need PlanStreamInput to handle Source @@ -53,12 +53,12 @@ public ReferenceAttribute(StreamInput * of esql-core. */ this( - Source.readFrom((S) in), + Source.readFrom((StreamInput & PlanStreamInput) in), in.readString(), DataType.readFrom(in), in.readOptionalString(), in.readEnum(Nullability.class), - NameId.readFrom((S) in), + NameId.readFrom((StreamInput & PlanStreamInput) in), in.readBoolean() ); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java index a0b61022c1450..588b0a2af55d3 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java @@ -8,7 +8,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression.TypeResolution; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import java.util.Locale; @@ -18,10 +17,10 @@ import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.expression.Expressions.name; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; public final class TypeResolutions { @@ -60,7 +59,7 @@ public static TypeResolution isNumeric(Expression e, String operationName, Param } public static TypeResolution isString(Expression e, String operationName, ParamOrdinal paramOrd) { - return isType(e, DataTypes::isString, operationName, paramOrd, "string"); + return isType(e, DataType::isString, operationName, paramOrd, "string"); } public static TypeResolution isIP(Expression e, String operationName, ParamOrdinal paramOrd) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAlias.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAlias.java deleted file mode 100644 index a4b0d06f54b83..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedAlias.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression; - -import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.List; -import java.util.Objects; - -import static java.util.Collections.singletonList; - -public class UnresolvedAlias extends UnresolvedNamedExpression { - - private final Expression child; - - public UnresolvedAlias(Source source, Expression child) { - super(source, singletonList(child)); - this.child = child; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, UnresolvedAlias::new, child); - } - - @Override - public Expression replaceChildren(List newChildren) { - return new UnresolvedAlias(source(), newChildren.get(0)); - } - - public Expression child() { - return child; - } - - @Override - public String unresolvedMessage() { - return "Unknown alias [" + name() + "]"; - } - - @Override - public Nullability nullable() { - throw new UnresolvedException("nullable", this); - } - - @Override - public int hashCode() { - return Objects.hash(child); - } - - @Override - public boolean equals(Object obj) { - /* - * Intentionally not calling the superclass - * equals because it uses id which we always - * mutate when we make a clone. - */ - if (obj == null || obj.getClass() != getClass()) { - return false; - } - return Objects.equals(child, ((UnresolvedAlias) obj).child); - } - - @Override - public String toString() { - return child + " AS ?"; - } - - @Override - public String nodeString() { - return toString(); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedStar.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedStar.java index 198016c710ce3..f3b52cfcccf90 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedStar.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/UnresolvedStar.java @@ -6,10 +6,12 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; import java.util.List; import java.util.Objects; @@ -25,6 +27,16 @@ public UnresolvedStar(Source source, UnresolvedAttribute qualifier) { this.qualifier = qualifier; } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException("doesn't escape the node"); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException("doesn't escape the node"); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, UnresolvedStar::new, qualifier); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/BinaryComparisonCaseInsensitiveFunction.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/BinaryComparisonCaseInsensitiveFunction.java index 920e77d52d0d3..4739fe910b769 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/BinaryComparisonCaseInsensitiveFunction.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/BinaryComparisonCaseInsensitiveFunction.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.util.Objects; @@ -53,7 +52,7 @@ public Expression right() { @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/whitelist/InternalQlScriptUtils.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/whitelist/InternalQlScriptUtils.java index 8895a2dc5473c..e361d2465a1c5 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/whitelist/InternalQlScriptUtils.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/whitelist/InternalQlScriptUtils.java @@ -22,9 +22,9 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.core.type.DataType.fromTypeName; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.convert; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.toUnsignedLong; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.fromTypeName; public class InternalQlScriptUtils { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java index 9d0dd9b161b4d..ee48fd84b8add 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.DateUtils; import java.time.DateTimeException; @@ -104,7 +103,7 @@ public Object fold() { protected boolean areBoundariesInvalid() { Object lowerValue = lower.fold(); Object upperValue = upper.fold(); - if (DataTypes.isDateTime(value.dataType()) || DataTypes.isDateTime(lower.dataType()) || DataTypes.isDateTime(upper.dataType())) { + if (DataType.isDateTime(value.dataType()) || DataType.isDateTime(lower.dataType()) || DataType.isDateTime(upper.dataType())) { try { if (upperValue instanceof String upperString) { upperValue = DateUtils.asDateTime(upperString); @@ -127,7 +126,7 @@ protected boolean areBoundariesInvalid() { @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java index 06b70b4afa27c..8da858865ed3f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.util.List; import java.util.Map; @@ -65,7 +64,7 @@ public Nullability nullable() { @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java index a7145a7acfe29..39de0e0643c13 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isBoolean; @@ -25,7 +24,7 @@ protected BinaryLogic(Source source, Expression left, Expression right, BinaryLo @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java index 379e67a3e599d..31c63393afaea 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isBoolean; @@ -36,7 +35,7 @@ protected Not replaceChild(Expression newChild) { @Override protected TypeResolution resolveType() { - if (DataTypes.BOOLEAN == field().dataType()) { + if (DataType.BOOLEAN == field().dataType()) { return TypeResolution.TYPE_RESOLVED; } return isBoolean(field(), sourceText(), DEFAULT); @@ -67,7 +66,7 @@ public Expression negate() { @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } static Expression negate(Expression exp) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java index 5eebbd1294cc5..52375c5db01a1 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; public class IsNotNull extends UnaryScalarFunction implements Negatable { @@ -35,7 +34,7 @@ protected IsNotNull replaceChild(Expression newChild) { @Override public Object fold() { - return field().fold() != null && DataTypes.isNull(field().dataType()) == false; + return field().fold() != null && DataType.isNull(field().dataType()) == false; } @Override @@ -50,7 +49,7 @@ public Nullability nullable() { @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java index 56c8fa578da39..d52eec9114df6 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; public class IsNull extends UnaryScalarFunction implements Negatable { @@ -35,7 +34,7 @@ protected IsNull replaceChild(Expression newChild) { @Override public Object fold() { - return field().fold() == null || DataTypes.isNull(field().dataType()); + return field().fold() == null || DataType.isNull(field().dataType()); } @Override @@ -50,7 +49,7 @@ public Nullability nullable() { @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Add.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Add.java deleted file mode 100644 index 5b16b478f6519..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Add.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * Addition function ({@code a + b}). - */ -public class Add extends DateTimeArithmeticOperation implements BinaryComparisonInversible { - public Add(Source source, Expression left, Expression right) { - super(source, left, right, DefaultBinaryArithmeticOperation.ADD); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, Add::new, left(), right()); - } - - @Override - protected Add replaceChildren(Expression left, Expression right) { - return new Add(source(), left, right); - } - - @Override - public Add swapLeftAndRight() { - return new Add(source(), right(), left()); - } - - @Override - public ArithmeticOperationFactory binaryComparisonInverse() { - return Sub::new; - } - - @Override - protected boolean isCommutative() { - return true; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java deleted file mode 100644 index 9e08cea749a34..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; - -abstract class DateTimeArithmeticOperation extends ArithmeticOperation { - - DateTimeArithmeticOperation(Source source, Expression left, Expression right, BinaryArithmeticOperation operation) { - super(source, left, right, operation); - } - - @Override - protected TypeResolution resolveType() { - if (childrenResolved() == false) { - return new TypeResolution("Unresolved children"); - } - - // arithmetic operation can work on numbers in QL - - DataType l = left().dataType(); - DataType r = right().dataType(); - - // 1. both are numbers - if (l.isNumeric() && r.isNumeric()) { - return TypeResolution.TYPE_RESOLVED; - } - - // fall-back to default checks - return super.resolveType(); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Div.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Div.java deleted file mode 100644 index 5f4c660479579..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Div.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypeConverter; - -/** - * Division function ({@code a / b}). - */ -public class Div extends ArithmeticOperation implements BinaryComparisonInversible { - - private DataType dataType; - - public Div(Source source, Expression left, Expression right) { - this(source, left, right, null); - } - - public Div(Source source, Expression left, Expression right, DataType dataType) { - super(source, left, right, DefaultBinaryArithmeticOperation.DIV); - this.dataType = dataType; - } - - @Override - protected NodeInfo

info() { - return NodeInfo.create(this, Div::new, left(), right(), dataType); - } - - @Override - protected Div replaceChildren(Expression newLeft, Expression newRight) { - return new Div(source(), newLeft, newRight, dataType); - } - - @Override - public DataType dataType() { - if (dataType == null) { - dataType = DataTypeConverter.commonType(left().dataType(), right().dataType()); - } - return dataType; - } - - @Override - public ArithmeticOperationFactory binaryComparisonInverse() { - return Mul::new; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Mod.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Mod.java deleted file mode 100644 index dea7d4e02e0b3..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Mod.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * Modulo - * function ({@code a % b}). - * - * Note this operator is also registered as a function (needed for ODBC/SQL) purposes. - */ -public class Mod extends ArithmeticOperation { - - public Mod(Source source, Expression left, Expression right) { - super(source, left, right, DefaultBinaryArithmeticOperation.MOD); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, Mod::new, left(), right()); - } - - @Override - protected Mod replaceChildren(Expression newLeft, Expression newRight) { - return new Mod(source(), newLeft, newRight); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Mul.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Mul.java deleted file mode 100644 index 46cc72766d531..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Mul.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; - -import static org.elasticsearch.common.logging.LoggerMessageFormat.format; - -/** - * Multiplication function ({@code a * b}). - */ -public class Mul extends ArithmeticOperation implements BinaryComparisonInversible { - - public Mul(Source source, Expression left, Expression right) { - super(source, left, right, DefaultBinaryArithmeticOperation.MUL); - } - - @Override - protected TypeResolution resolveType() { - if (childrenResolved() == false) { - return new TypeResolution("Unresolved children"); - } - - DataType l = left().dataType(); - DataType r = right().dataType(); - - // 1. both are numbers - if (DataTypes.isNullOrNumeric(l) && DataTypes.isNullOrNumeric(r)) { - return TypeResolution.TYPE_RESOLVED; - } - - return new TypeResolution(format(null, "[{}] has arguments with incompatible types [{}] and [{}]", symbol(), l, r)); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, Mul::new, left(), right()); - } - - @Override - protected Mul replaceChildren(Expression newLeft, Expression newRight) { - return new Mul(source(), newLeft, newRight); - } - - @Override - public Mul swapLeftAndRight() { - return new Mul(source(), right(), left()); - } - - @Override - public ArithmeticOperationFactory binaryComparisonInverse() { - return Div::new; - } - - @Override - protected boolean isCommutative() { - return true; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Sub.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Sub.java deleted file mode 100644 index 8a345986e5fba..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Sub.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * Subtraction function ({@code a - b}). - */ -public class Sub extends DateTimeArithmeticOperation implements BinaryComparisonInversible { - - public Sub(Source source, Expression left, Expression right) { - super(source, left, right, DefaultBinaryArithmeticOperation.SUB); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, Sub::new, left(), right()); - } - - @Override - protected Sub replaceChildren(Expression newLeft, Expression newRight) { - return new Sub(source(), newLeft, newRight); - } - - @Override - public ArithmeticOperationFactory binaryComparisonInverse() { - return Add::new; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java index a9c4fd439a1ff..193b77f2344c0 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.time.ZoneId; @@ -38,7 +37,7 @@ protected TypeResolution resolveInputType(Expression e, ParamOrdinal paramOrdina @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } public static Integer compare(Object left, Object right) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java index ff188cb30d7d6..21fbfa56b0d98 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.DataTypeConverter; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import java.time.ZoneId; @@ -71,7 +70,7 @@ public List list() { @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override @@ -110,7 +109,7 @@ protected List foldAndConvertListOfValues(List expressions, } protected boolean areCompatible(DataType left, DataType right) { - return DataTypes.areCompatible(left, right); + return DataType.areCompatible(left, right); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexMatch.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexMatch.java index 084bdb06ca6d2..4e7e70685dc3a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexMatch.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexMatch.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.util.Objects; @@ -42,7 +41,7 @@ public boolean caseInsensitive() { @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/IndexCompatibility.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/IndexCompatibility.java index e0ee089680254..6cc0816661f01 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/IndexCompatibility.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/IndexCompatibility.java @@ -15,7 +15,7 @@ import java.util.Map; import static org.elasticsearch.xpack.esql.core.index.VersionCompatibilityChecks.isTypeSupportedInVersion; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isPrimitive; +import static org.elasticsearch.xpack.esql.core.type.DataType.isPrimitive; import static org.elasticsearch.xpack.esql.core.type.Types.propagateUnsupportedType; public final class IndexCompatibility { @@ -27,9 +27,9 @@ public static Map compatible(Map mapping, Vers if (isPrimitive(dataType) == false) { compatible(esField.getProperties(), version); } else if (isTypeSupportedInVersion(dataType, version) == false) { - EsField field = new UnsupportedEsField(entry.getKey(), dataType.name(), null, esField.getProperties()); + EsField field = new UnsupportedEsField(entry.getKey(), dataType.nameUpper(), null, esField.getProperties()); entry.setValue(field); - propagateUnsupportedType(entry.getKey(), dataType.name(), esField.getProperties()); + propagateUnsupportedType(entry.getKey(), dataType.nameUpper(), esField.getProperties()); } } return mapping; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/IndexResolver.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/IndexResolver.java index 21222b5d36a55..63467eaadd8df 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/IndexResolver.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/IndexResolver.java @@ -65,11 +65,11 @@ import static org.elasticsearch.common.Strings.hasText; import static org.elasticsearch.common.regex.Regex.simpleMatch; import static org.elasticsearch.transport.RemoteClusterAware.buildRemoteIndexName; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.OBJECT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSUPPORTED; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.OBJECT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; import static org.elasticsearch.xpack.esql.core.util.StringUtils.qualifyAndJoinIndices; import static org.elasticsearch.xpack.esql.core.util.StringUtils.splitQualifiedIndex; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/VersionCompatibilityChecks.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/VersionCompatibilityChecks.java index ce6d26bc3528f..e4ae4f8f0d51f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/VersionCompatibilityChecks.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/index/VersionCompatibilityChecks.java @@ -15,8 +15,8 @@ import static org.elasticsearch.Version.V_8_2_0; import static org.elasticsearch.Version.V_8_4_0; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; public final class VersionCompatibilityChecks { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java index 12b496e51fa1b..ba19a73f91c06 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java @@ -8,21 +8,16 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.Order; import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.function.scalar.SurrogateFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.Range; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; @@ -31,27 +26,17 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.Equals; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.GreaterThan; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.LessThan; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.NotEquals; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.NullEquals; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.StringPattern; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.rule.Rule; -import org.elasticsearch.xpack.esql.core.type.DataTypes; -import org.elasticsearch.xpack.esql.core.util.CollectionUtils; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; import java.time.ZoneId; import java.util.ArrayList; -import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; @@ -60,7 +45,6 @@ import java.util.Set; import java.util.function.BiFunction; -import static java.util.Collections.emptySet; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.combineAnd; @@ -73,20 +57,8 @@ public final class OptimizerRules { - public static final class ConstantFolding extends OptimizerExpressionRule { - - public ConstantFolding() { - super(TransformDirection.DOWN); - } - - @Override - public Expression rule(Expression e) { - return e.foldable() ? Literal.of(e) : e; - } - } - /** - * This rule must always be placed after {@link LiteralsOnTheRight}, since it looks at TRUE/FALSE literals' existence + * This rule must always be placed after LiteralsOnTheRight, since it looks at TRUE/FALSE literals' existence * on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. */ public static final class BooleanFunctionEqualsElimination extends OptimizerExpressionRule { @@ -143,7 +115,7 @@ private static Expression simplifyAndOr(BinaryPredicate bc) { } if (FALSE.equals(l) || FALSE.equals(r)) { - return new Literal(bc.source(), Boolean.FALSE, DataTypes.BOOLEAN); + return new Literal(bc.source(), Boolean.FALSE, DataType.BOOLEAN); } if (l.semanticEquals(r)) { return l; @@ -173,7 +145,7 @@ private static Expression simplifyAndOr(BinaryPredicate bc) { if (bc instanceof Or) { if (TRUE.equals(l) || TRUE.equals(r)) { - return new Literal(bc.source(), Boolean.TRUE, DataTypes.BOOLEAN); + return new Literal(bc.source(), Boolean.TRUE, DataType.BOOLEAN); } if (FALSE.equals(l)) { @@ -218,10 +190,10 @@ private Expression simplifyNot(Not n) { Expression c = n.field(); if (TRUE.semanticEquals(c)) { - return new Literal(n.source(), Boolean.FALSE, DataTypes.BOOLEAN); + return new Literal(n.source(), Boolean.FALSE, DataType.BOOLEAN); } if (FALSE.semanticEquals(c)) { - return new Literal(n.source(), Boolean.TRUE, DataTypes.BOOLEAN); + return new Literal(n.source(), Boolean.TRUE, DataType.BOOLEAN); } Expression negated = maybeSimplifyNegatable(c); @@ -248,936 +220,6 @@ protected Expression maybeSimplifyNegatable(Expression e) { } } - public static class BinaryComparisonSimplification extends OptimizerExpressionRule { - - public BinaryComparisonSimplification() { - super(TransformDirection.DOWN); - } - - @Override - protected Expression rule(BinaryComparison bc) { - Expression l = bc.left(); - Expression r = bc.right(); - - // true for equality - if (bc instanceof Equals || bc instanceof GreaterThanOrEqual || bc instanceof LessThanOrEqual) { - if (l.nullable() == Nullability.FALSE && r.nullable() == Nullability.FALSE && l.semanticEquals(r)) { - return new Literal(bc.source(), Boolean.TRUE, DataTypes.BOOLEAN); - } - } - if (bc instanceof NullEquals) { - if (l.semanticEquals(r)) { - return new Literal(bc.source(), Boolean.TRUE, DataTypes.BOOLEAN); - } - if (Expressions.isNull(r)) { - return new IsNull(bc.source(), l); - } - } - - // false for equality - if (bc instanceof NotEquals || bc instanceof GreaterThan || bc instanceof LessThan) { - if (l.nullable() == Nullability.FALSE && r.nullable() == Nullability.FALSE && l.semanticEquals(r)) { - return new Literal(bc.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } - } - - return bc; - } - } - - public static final class LiteralsOnTheRight extends OptimizerExpressionRule> { - - public LiteralsOnTheRight() { - super(TransformDirection.UP); - } - - @Override - public BinaryOperator rule(BinaryOperator be) { - return be.left() instanceof Literal && (be.right() instanceof Literal) == false ? be.swapLeftAndRight() : be; - } - } - - /** - * Propagate Equals to eliminate conjuncted Ranges or BinaryComparisons. - * When encountering a different Equals, non-containing {@link Range} or {@link BinaryComparison}, the conjunction becomes false. - * When encountering a containing {@link Range}, {@link BinaryComparison} or {@link NotEquals}, these get eliminated by the equality. - * - * Since this rule can eliminate Ranges and BinaryComparisons, it should be applied before {@link CombineBinaryComparisons}. - * - * This rule doesn't perform any promotion of {@link BinaryComparison}s, that is handled by - * {@link CombineBinaryComparisons} on purpose as the resulting Range might be foldable - * (which is picked by the folding rule on the next run). - */ - public static final class PropagateEquals extends OptimizerExpressionRule { - - public PropagateEquals() { - super(TransformDirection.DOWN); - } - - @Override - public Expression rule(BinaryLogic e) { - if (e instanceof And) { - return propagate((And) e); - } else if (e instanceof Or) { - return propagate((Or) e); - } - return e; - } - - // combine conjunction - private static Expression propagate(And and) { - List ranges = new ArrayList<>(); - // Only equalities, not-equalities and inequalities with a foldable .right are extracted separately; - // the others go into the general 'exps'. - List equals = new ArrayList<>(); - List notEquals = new ArrayList<>(); - List inequalities = new ArrayList<>(); - List exps = new ArrayList<>(); - - boolean changed = false; - - for (Expression ex : Predicates.splitAnd(and)) { - if (ex instanceof Range) { - ranges.add((Range) ex); - } else if (ex instanceof Equals || ex instanceof NullEquals) { - BinaryComparison otherEq = (BinaryComparison) ex; - // equals on different values evaluate to FALSE - // ignore date/time fields as equality comparison might actually be a range check - if (otherEq.right().foldable() && DataTypes.isDateTime(otherEq.left().dataType()) == false) { - for (BinaryComparison eq : equals) { - if (otherEq.left().semanticEquals(eq.left())) { - Integer comp = BinaryComparison.compare(eq.right().fold(), otherEq.right().fold()); - if (comp != null) { - // var cannot be equal to two different values at the same time - if (comp != 0) { - return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } - } - } - } - equals.add(otherEq); - } else { - exps.add(otherEq); - } - } else if (ex instanceof GreaterThan - || ex instanceof GreaterThanOrEqual - || ex instanceof LessThan - || ex instanceof LessThanOrEqual) { - BinaryComparison bc = (BinaryComparison) ex; - if (bc.right().foldable()) { - inequalities.add(bc); - } else { - exps.add(ex); - } - } else if (ex instanceof NotEquals otherNotEq) { - if (otherNotEq.right().foldable()) { - notEquals.add(otherNotEq); - } else { - exps.add(ex); - } - } else { - exps.add(ex); - } - } - - // check - for (BinaryComparison eq : equals) { - Object eqValue = eq.right().fold(); - - for (Iterator iterator = ranges.iterator(); iterator.hasNext();) { - Range range = iterator.next(); - - if (range.value().semanticEquals(eq.left())) { - // if equals is outside the interval, evaluate the whole expression to FALSE - if (range.lower().foldable()) { - Integer compare = BinaryComparison.compare(range.lower().fold(), eqValue); - if (compare != null && ( - // eq outside the lower boundary - compare > 0 || - // eq matches the boundary but should not be included - (compare == 0 && range.includeLower() == false))) { - return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } - } - if (range.upper().foldable()) { - Integer compare = BinaryComparison.compare(range.upper().fold(), eqValue); - if (compare != null && ( - // eq outside the upper boundary - compare < 0 || - // eq matches the boundary but should not be included - (compare == 0 && range.includeUpper() == false))) { - return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } - } - - // it's in the range and thus, remove it - iterator.remove(); - changed = true; - } - } - - // evaluate all NotEquals against the Equal - for (Iterator iter = notEquals.iterator(); iter.hasNext();) { - NotEquals neq = iter.next(); - if (eq.left().semanticEquals(neq.left())) { - Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); - if (comp != null) { - if (comp == 0) { // clashing and conflicting: a = 1 AND a != 1 - return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } else { // clashing and redundant: a = 1 AND a != 2 - iter.remove(); - changed = true; - } - } - } - } - - // evaluate all inequalities against the Equal - for (Iterator iter = inequalities.iterator(); iter.hasNext();) { - BinaryComparison bc = iter.next(); - if (eq.left().semanticEquals(bc.left())) { - Integer compare = BinaryComparison.compare(eqValue, bc.right().fold()); - if (compare != null) { - if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { // a = 2 AND a />= ? - if ((compare == 0 && bc instanceof GreaterThan) || // a = 2 AND a > 2 - compare < 0) { // a = 2 AND a >/>= 3 - return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } - } - - iter.remove(); - changed = true; - } - } - } - } - - return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : and; - } - - // combine disjunction: - // a = 2 OR a > 3 -> nop; a = 2 OR a > 1 -> a > 1 - // a = 2 OR a < 3 -> a < 3; a = 2 OR a < 1 -> nop - // a = 2 OR 3 < a < 5 -> nop; a = 2 OR 1 < a < 3 -> 1 < a < 3; a = 2 OR 0 < a < 1 -> nop - // a = 2 OR a != 2 -> TRUE; a = 2 OR a = 5 -> nop; a = 2 OR a != 5 -> a != 5 - private static Expression propagate(Or or) { - List exps = new ArrayList<>(); - List equals = new ArrayList<>(); // foldable right term Equals - List notEquals = new ArrayList<>(); // foldable right term NotEquals - List ranges = new ArrayList<>(); - List inequalities = new ArrayList<>(); // foldable right term (=limit) BinaryComparision - - // split expressions by type - for (Expression ex : Predicates.splitOr(or)) { - if (ex instanceof Equals eq) { - if (eq.right().foldable()) { - equals.add(eq); - } else { - exps.add(ex); - } - } else if (ex instanceof NotEquals neq) { - if (neq.right().foldable()) { - notEquals.add(neq); - } else { - exps.add(ex); - } - } else if (ex instanceof Range) { - ranges.add((Range) ex); - } else if (ex instanceof BinaryComparison bc) { - if (bc.right().foldable()) { - inequalities.add(bc); - } else { - exps.add(ex); - } - } else { - exps.add(ex); - } - } - - boolean updated = false; // has the expression been modified? - - // evaluate the impact of each Equal over the different types of Expressions - for (Iterator iterEq = equals.iterator(); iterEq.hasNext();) { - Equals eq = iterEq.next(); - Object eqValue = eq.right().fold(); - boolean removeEquals = false; - - // Equals OR NotEquals - for (NotEquals neq : notEquals) { - if (eq.left().semanticEquals(neq.left())) { // a = 2 OR a != ? -> ... - Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); - if (comp != null) { - if (comp == 0) { // a = 2 OR a != 2 -> TRUE - return TRUE; - } else { // a = 2 OR a != 5 -> a != 5 - removeEquals = true; - break; - } - } - } - } - if (removeEquals) { - iterEq.remove(); - updated = true; - continue; - } - - // Equals OR Range - for (int i = 0; i < ranges.size(); i++) { // might modify list, so use index loop - Range range = ranges.get(i); - if (eq.left().semanticEquals(range.value())) { - Integer lowerComp = range.lower().foldable() ? BinaryComparison.compare(eqValue, range.lower().fold()) : null; - Integer upperComp = range.upper().foldable() ? BinaryComparison.compare(eqValue, range.upper().fold()) : null; - - if (lowerComp != null && lowerComp == 0) { - if (range.includeLower() == false) { // a = 2 OR 2 < a < ? -> 2 <= a < ? - ranges.set( - i, - new Range( - range.source(), - range.value(), - range.lower(), - true, - range.upper(), - range.includeUpper(), - range.zoneId() - ) - ); - } // else : a = 2 OR 2 <= a < ? -> 2 <= a < ? - removeEquals = true; // update range with lower equality instead or simply superfluous - break; - } else if (upperComp != null && upperComp == 0) { - if (range.includeUpper() == false) { // a = 2 OR ? < a < 2 -> ? < a <= 2 - ranges.set( - i, - new Range( - range.source(), - range.value(), - range.lower(), - range.includeLower(), - range.upper(), - true, - range.zoneId() - ) - ); - } // else : a = 2 OR ? < a <= 2 -> ? < a <= 2 - removeEquals = true; // update range with upper equality instead - break; - } else if (lowerComp != null && upperComp != null) { - if (0 < lowerComp && upperComp < 0) { // a = 2 OR 1 < a < 3 - removeEquals = true; // equality is superfluous - break; - } - } - } - } - if (removeEquals) { - iterEq.remove(); - updated = true; - continue; - } - - // Equals OR Inequality - for (int i = 0; i < inequalities.size(); i++) { - BinaryComparison bc = inequalities.get(i); - if (eq.left().semanticEquals(bc.left())) { - Integer comp = BinaryComparison.compare(eqValue, bc.right().fold()); - if (comp != null) { - if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { - if (comp < 0) { // a = 1 OR a > 2 -> nop - continue; - } else if (comp == 0 && bc instanceof GreaterThan) { // a = 2 OR a > 2 -> a >= 2 - inequalities.set(i, new GreaterThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); - } // else (0 < comp || bc instanceof GreaterThanOrEqual) : - // a = 3 OR a > 2 -> a > 2; a = 2 OR a => 2 -> a => 2 - - removeEquals = true; // update range with equality instead or simply superfluous - break; - } else if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { - if (comp > 0) { // a = 2 OR a < 1 -> nop - continue; - } - if (comp == 0 && bc instanceof LessThan) { // a = 2 OR a < 2 -> a <= 2 - inequalities.set(i, new LessThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); - } // else (comp < 0 || bc instanceof LessThanOrEqual) : a = 2 OR a < 3 -> a < 3; a = 2 OR a <= 2 -> a <= 2 - removeEquals = true; // update range with equality instead or simply superfluous - break; - } - } - } - } - if (removeEquals) { - iterEq.remove(); - updated = true; - } - } - - return updated ? Predicates.combineOr(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : or; - } - } - - public static final class CombineBinaryComparisons extends OptimizerExpressionRule { - - public CombineBinaryComparisons() { - super(TransformDirection.DOWN); - } - - @Override - public Expression rule(BinaryLogic e) { - if (e instanceof And) { - return combine((And) e); - } else if (e instanceof Or) { - return combine((Or) e); - } - return e; - } - - // combine conjunction - private static Expression combine(And and) { - List ranges = new ArrayList<>(); - List bcs = new ArrayList<>(); - List exps = new ArrayList<>(); - - boolean changed = false; - - List andExps = Predicates.splitAnd(and); - // Ranges need to show up before BinaryComparisons in list, to allow the latter be optimized away into a Range, if possible. - // NotEquals need to be last in list, to have a complete set of Ranges (ranges) and BinaryComparisons (bcs) and allow these to - // optimize the NotEquals away. - andExps.sort((o1, o2) -> { - if (o1 instanceof Range && o2 instanceof Range) { - return 0; // keep ranges' order - } else if (o1 instanceof Range || o2 instanceof Range) { - return o2 instanceof Range ? 1 : -1; // push Ranges down - } else if (o1 instanceof NotEquals && o2 instanceof NotEquals) { - return 0; // keep NotEquals' order - } else if (o1 instanceof NotEquals || o2 instanceof NotEquals) { - return o1 instanceof NotEquals ? 1 : -1; // push NotEquals up - } else { - return 0; // keep non-Ranges' and non-NotEquals' order - } - }); - for (Expression ex : andExps) { - if (ex instanceof Range r) { - if (findExistingRange(r, ranges, true)) { - changed = true; - } else { - ranges.add(r); - } - } else if (ex instanceof BinaryComparison bc && (ex instanceof Equals || ex instanceof NotEquals) == false) { - - if (bc.right().foldable() && (findConjunctiveComparisonInRange(bc, ranges) || findExistingComparison(bc, bcs, true))) { - changed = true; - } else { - bcs.add(bc); - } - } else if (ex instanceof NotEquals neq) { - if (neq.right().foldable() && notEqualsIsRemovableFromConjunction(neq, ranges, bcs)) { - // the non-equality can simply be dropped: either superfluous or has been merged with an updated range/inequality - changed = true; - } else { // not foldable OR not overlapping - exps.add(ex); - } - } else { - exps.add(ex); - } - } - - // finally try combining any left BinaryComparisons into possible Ranges - // this could be a different rule but it's clearer here wrt the order of comparisons - - for (int i = 0, step = 1; i < bcs.size() - 1; i += step, step = 1) { - BinaryComparison main = bcs.get(i); - - for (int j = i + 1; j < bcs.size(); j++) { - BinaryComparison other = bcs.get(j); - - if (main.left().semanticEquals(other.left())) { - // >/>= AND />= - else if ((other instanceof GreaterThan || other instanceof GreaterThanOrEqual) - && (main instanceof LessThan || main instanceof LessThanOrEqual)) { - bcs.remove(j); - bcs.remove(i); - - ranges.add( - new Range( - and.source(), - main.left(), - other.right(), - other instanceof GreaterThanOrEqual, - main.right(), - main instanceof LessThanOrEqual, - main.zoneId() - ) - ); - - changed = true; - step = 0; - break; - } - } - } - } - - return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, bcs, ranges)) : and; - } - - // combine disjunction - private static Expression combine(Or or) { - List bcs = new ArrayList<>(); - List ranges = new ArrayList<>(); - List exps = new ArrayList<>(); - - boolean changed = false; - - for (Expression ex : Predicates.splitOr(or)) { - if (ex instanceof Range r) { - if (findExistingRange(r, ranges, false)) { - changed = true; - } else { - ranges.add(r); - } - } else if (ex instanceof BinaryComparison bc) { - if (bc.right().foldable() && findExistingComparison(bc, bcs, false)) { - changed = true; - } else { - bcs.add(bc); - } - } else { - exps.add(ex); - } - } - - return changed ? Predicates.combineOr(CollectionUtils.combine(exps, bcs, ranges)) : or; - } - - private static boolean findExistingRange(Range main, List ranges, boolean conjunctive) { - if (main.lower().foldable() == false && main.upper().foldable() == false) { - return false; - } - // NB: the loop modifies the list (hence why the int is used) - for (int i = 0; i < ranges.size(); i++) { - Range other = ranges.get(i); - - if (main.value().semanticEquals(other.value())) { - - // make sure the comparison was done - boolean compared = false; - - boolean lower = false; - boolean upper = false; - // boundary equality (useful to differentiate whether a range is included or not) - // and thus whether it should be preserved or ignored - boolean lowerEq = false; - boolean upperEq = false; - - // evaluate lower - if (main.lower().foldable() && other.lower().foldable()) { - compared = true; - - Integer comp = BinaryComparison.compare(main.lower().fold(), other.lower().fold()); - // values are comparable - if (comp != null) { - // boundary equality - lowerEq = comp == 0 && main.includeLower() == other.includeLower(); - // AND - if (conjunctive) { - // (2 < a < 3) AND (1 < a < 3) -> (2 < a < 3) - lower = comp > 0 || - // (2 < a < 3) AND (2 <= a < 3) -> (2 < a < 3) - (comp == 0 && main.includeLower() == false && other.includeLower()); - } - // OR - else { - // (1 < a < 3) OR (2 < a < 3) -> (1 < a < 3) - lower = comp < 0 || - // (2 <= a < 3) OR (2 < a < 3) -> (2 <= a < 3) - (comp == 0 && main.includeLower() && other.includeLower() == false) || lowerEq; - } - } - } - // evaluate upper - if (main.upper().foldable() && other.upper().foldable()) { - compared = true; - - Integer comp = BinaryComparison.compare(main.upper().fold(), other.upper().fold()); - // values are comparable - if (comp != null) { - // boundary equality - upperEq = comp == 0 && main.includeUpper() == other.includeUpper(); - - // AND - if (conjunctive) { - // (1 < a < 2) AND (1 < a < 3) -> (1 < a < 2) - upper = comp < 0 || - // (1 < a < 2) AND (1 < a <= 2) -> (1 < a < 2) - (comp == 0 && main.includeUpper() == false && other.includeUpper()); - } - // OR - else { - // (1 < a < 3) OR (1 < a < 2) -> (1 < a < 3) - upper = comp > 0 || - // (1 < a <= 3) OR (1 < a < 3) -> (2 < a < 3) - (comp == 0 && main.includeUpper() && other.includeUpper() == false) || upperEq; - } - } - } - - // AND - at least one of lower or upper - if (conjunctive) { - // can tighten range - if (lower || upper) { - ranges.set( - i, - new Range( - main.source(), - main.value(), - lower ? main.lower() : other.lower(), - lower ? main.includeLower() : other.includeLower(), - upper ? main.upper() : other.upper(), - upper ? main.includeUpper() : other.includeUpper(), - main.zoneId() - ) - ); - } - - // range was comparable - return compared; - } - // OR - needs both upper and lower to loosen range - else { - // can loosen range - if (lower && upper) { - ranges.set( - i, - new Range( - main.source(), - main.value(), - main.lower(), - main.includeLower(), - main.upper(), - main.includeUpper(), - main.zoneId() - ) - ); - return true; - } - - // if the range in included, no need to add it - return compared && (((lower && lowerEq == false) || (upper && upperEq == false)) == false); - } - } - } - return false; - } - - private static boolean findConjunctiveComparisonInRange(BinaryComparison main, List ranges) { - Object value = main.right().fold(); - - // NB: the loop modifies the list (hence why the int is used) - for (int i = 0; i < ranges.size(); i++) { - Range other = ranges.get(i); - - if (main.left().semanticEquals(other.value())) { - - if (main instanceof GreaterThan || main instanceof GreaterThanOrEqual) { - if (other.lower().foldable()) { - Integer comp = BinaryComparison.compare(value, other.lower().fold()); - if (comp != null) { - // 2 < a AND (2 <= a < 3) -> 2 < a < 3 - boolean lowerEq = comp == 0 && other.includeLower() && main instanceof GreaterThan; - // 2 < a AND (1 < a < 3) -> 2 < a < 3 - boolean lower = comp > 0 || lowerEq; - - if (lower) { - ranges.set( - i, - new Range( - other.source(), - other.value(), - main.right(), - lowerEq ? false : main instanceof GreaterThanOrEqual, - other.upper(), - other.includeUpper(), - other.zoneId() - ) - ); - } - - // found a match - return true; - } - } - } else if (main instanceof LessThan || main instanceof LessThanOrEqual) { - if (other.upper().foldable()) { - Integer comp = BinaryComparison.compare(value, other.upper().fold()); - if (comp != null) { - // a < 2 AND (1 < a <= 2) -> 1 < a < 2 - boolean upperEq = comp == 0 && other.includeUpper() && main instanceof LessThan; - // a < 2 AND (1 < a < 3) -> 1 < a < 2 - boolean upper = comp < 0 || upperEq; - - if (upper) { - ranges.set( - i, - new Range( - other.source(), - other.value(), - other.lower(), - other.includeLower(), - main.right(), - upperEq ? false : main instanceof LessThanOrEqual, - other.zoneId() - ) - ); - } - - // found a match - return true; - } - } - } - - return false; - } - } - return false; - } - - /** - * Find commonalities between the given comparison in the given list. - * The method can be applied both for conjunctive (AND) or disjunctive purposes (OR). - */ - private static boolean findExistingComparison(BinaryComparison main, List bcs, boolean conjunctive) { - Object value = main.right().fold(); - - // NB: the loop modifies the list (hence why the int is used) - for (int i = 0; i < bcs.size(); i++) { - BinaryComparison other = bcs.get(i); - // skip if cannot evaluate - if (other.right().foldable() == false) { - continue; - } - // if bc is a higher/lower value or gte vs gt, use it instead - if ((other instanceof GreaterThan || other instanceof GreaterThanOrEqual) - && (main instanceof GreaterThan || main instanceof GreaterThanOrEqual)) { - - if (main.left().semanticEquals(other.left())) { - Integer compare = BinaryComparison.compare(value, other.right().fold()); - - if (compare != null) { - // AND - if ((conjunctive && - // a > 3 AND a > 2 -> a > 3 - (compare > 0 || - // a > 2 AND a >= 2 -> a > 2 - (compare == 0 && main instanceof GreaterThan && other instanceof GreaterThanOrEqual))) || - // OR - (conjunctive == false && - // a > 2 OR a > 3 -> a > 2 - (compare < 0 || - // a >= 2 OR a > 2 -> a >= 2 - (compare == 0 && main instanceof GreaterThanOrEqual && other instanceof GreaterThan)))) { - bcs.remove(i); - bcs.add(i, main); - } - // found a match - return true; - } - - return false; - } - } - // if bc is a lower/higher value or lte vs lt, use it instead - else if ((other instanceof LessThan || other instanceof LessThanOrEqual) - && (main instanceof LessThan || main instanceof LessThanOrEqual)) { - - if (main.left().semanticEquals(other.left())) { - Integer compare = BinaryComparison.compare(value, other.right().fold()); - - if (compare != null) { - // AND - if ((conjunctive && - // a < 2 AND a < 3 -> a < 2 - (compare < 0 || - // a < 2 AND a <= 2 -> a < 2 - (compare == 0 && main instanceof LessThan && other instanceof LessThanOrEqual))) || - // OR - (conjunctive == false && - // a < 2 OR a < 3 -> a < 3 - (compare > 0 || - // a <= 2 OR a < 2 -> a <= 2 - (compare == 0 && main instanceof LessThanOrEqual && other instanceof LessThan)))) { - bcs.remove(i); - bcs.add(i, main); - - } - // found a match - return true; - } - - return false; - } - } - } - - return false; - } - - private static boolean notEqualsIsRemovableFromConjunction(NotEquals notEquals, List ranges, List bcs) { - Object neqVal = notEquals.right().fold(); - Integer comp; - - // check on "condition-overlapping" ranges: - // a != 2 AND 3 < a < 5 -> 3 < a < 5; a != 2 AND 0 < a < 1 -> 0 < a < 1 (discard NotEquals) - // a != 2 AND 2 <= a < 3 -> 2 < a < 3; a != 3 AND 2 < a <= 3 -> 2 < a < 3 (discard NotEquals, plus update Range) - // a != 2 AND 1 < a < 3 -> nop (do nothing) - for (int i = 0; i < ranges.size(); i++) { - Range range = ranges.get(i); - - if (notEquals.left().semanticEquals(range.value())) { - comp = range.lower().foldable() ? BinaryComparison.compare(neqVal, range.lower().fold()) : null; - if (comp != null) { - if (comp <= 0) { - if (comp == 0 && range.includeLower()) { // a != 2 AND 2 <= a < ? -> 2 < a < ? - ranges.set( - i, - new Range( - range.source(), - range.value(), - range.lower(), - false, - range.upper(), - range.includeUpper(), - range.zoneId() - ) - ); - } - // else: !.includeLower() : a != 2 AND 2 < a < 3 -> 2 < a < 3; or: - // else: comp < 0 : a != 2 AND 3 < a < ? -> 3 < a < ? - - return true; - } else { // comp > 0 : a != 4 AND 2 < a < ? : can only remove NotEquals if outside the range - comp = range.upper().foldable() ? BinaryComparison.compare(neqVal, range.upper().fold()) : null; - if (comp != null && comp >= 0) { - if (comp == 0 && range.includeUpper()) { // a != 4 AND 2 < a <= 4 -> 2 < a < 4 - ranges.set( - i, - new Range( - range.source(), - range.value(), - range.lower(), - range.includeLower(), - range.upper(), - false, - range.zoneId() - ) - ); - } - // else: !.includeUpper() : a != 4 AND 2 < a < 4 -> 2 < a < 4 - // else: comp > 0 : a != 4 AND 2 < a < 3 -> 2 < a < 3 - - return true; - } - // else: comp < 0 : a != 4 AND 2 < a < 5 -> nop; or: - // else: comp == null : upper bound not comparable -> nop - } - } // else: comp == null : lower bound not comparable: evaluate upper bound, in case non-equality value is ">=" - - comp = range.upper().foldable() ? BinaryComparison.compare(neqVal, range.upper().fold()) : null; - if (comp != null && comp >= 0) { - if (comp == 0 && range.includeUpper()) { // a != 3 AND ?? < a <= 3 -> ?? < a < 3 - ranges.set( - i, - new Range( - range.source(), - range.value(), - range.lower(), - range.includeLower(), - range.upper(), - false, - range.zoneId() - ) - ); - } - // else: !.includeUpper() : a != 3 AND ?? < a < 3 -> ?? < a < 3 - // else: comp > 0 : a != 3 and ?? < a < 2 -> ?? < a < 2 - - return true; - } - // else: comp < 0 : a != 3 AND ?? < a < 4 -> nop, as a decision can't be drawn; or: - // else: comp == null : a != 3 AND ?? < a < ?? -> nop - } - } - - // check on "condition-overlapping" inequalities: - // a != 2 AND a > 3 -> a > 3 (discard NotEquals) - // a != 2 AND a >= 2 -> a > 2 (discard NotEquals plus update inequality) - // a != 2 AND a > 1 -> nop (do nothing) - // - // a != 2 AND a < 3 -> nop - // a != 2 AND a <= 2 -> a < 2 - // a != 2 AND a < 1 -> a < 1 - for (int i = 0; i < bcs.size(); i++) { - BinaryComparison bc = bcs.get(i); - - if (notEquals.left().semanticEquals(bc.left())) { - if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { - comp = bc.right().foldable() ? BinaryComparison.compare(neqVal, bc.right().fold()) : null; - if (comp != null) { - if (comp >= 0) { - if (comp == 0 && bc instanceof LessThanOrEqual) { // a != 2 AND a <= 2 -> a < 2 - bcs.set(i, new LessThan(bc.source(), bc.left(), bc.right(), bc.zoneId())); - } // else : comp > 0 (a != 2 AND a a a < 2) - return true; - } // else: comp < 0 : a != 2 AND a nop - } // else: non-comparable, nop - } else if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { - comp = bc.right().foldable() ? BinaryComparison.compare(neqVal, bc.right().fold()) : null; - if (comp != null) { - if (comp <= 0) { - if (comp == 0 && bc instanceof GreaterThanOrEqual) { // a != 2 AND a >= 2 -> a > 2 - bcs.set(i, new GreaterThan(bc.source(), bc.left(), bc.right(), bc.zoneId())); - } // else: comp < 0 (a != 2 AND a >/>= 3 -> a >/>= 3), or == 0 && bc i.of ">" (a != 2 AND a > 2 -> a > 2) - return true; - } // else: comp > 0 : a != 2 AND a >/>= 1 -> nop - } // else: non-comparable, nop - } // else: other non-relevant type - } - } - - return false; - } - - } - /** * Combine disjunctions on the same field into an In expression. * This rule looks for both simple equalities: @@ -1296,7 +338,7 @@ private static Expression foldBinaryLogic(BinaryLogic binaryLogic) { boolean nullLeft = Expressions.isNull(or.left()); boolean nullRight = Expressions.isNull(or.right()); if (nullLeft && nullRight) { - return new Literal(binaryLogic.source(), null, DataTypes.NULL); + return new Literal(binaryLogic.source(), null, DataType.NULL); } if (nullLeft) { return or.right(); @@ -1307,39 +349,13 @@ private static Expression foldBinaryLogic(BinaryLogic binaryLogic) { } if (binaryLogic instanceof And and) { if (Expressions.isNull(and.left()) || Expressions.isNull(and.right())) { - return new Literal(binaryLogic.source(), null, DataTypes.NULL); + return new Literal(binaryLogic.source(), null, DataType.NULL); } } return binaryLogic; } } - public static final class PruneLiteralsInOrderBy extends OptimizerRule { - - @Override - protected LogicalPlan rule(OrderBy ob) { - List prunedOrders = new ArrayList<>(); - - for (Order o : ob.order()) { - if (o.child().foldable()) { - prunedOrders.add(o); - } - } - - // everything was eliminated, the order isn't needed anymore - if (prunedOrders.size() == ob.order().size()) { - return ob.child(); - } - if (prunedOrders.size() > 0) { - List newOrders = new ArrayList<>(ob.order()); - newOrders.removeAll(prunedOrders); - return new OrderBy(ob.source(), ob.child(), newOrders); - } - - return ob; - } - } - // NB: it is important to start replacing casts from the bottom to properly replace aliases public abstract static class PruneCast extends Rule { @@ -1376,33 +392,6 @@ protected LogicalPlan rule(Limit limit) { protected abstract LogicalPlan skipPlan(Limit limit); } - public static class ReplaceRegexMatch extends OptimizerExpressionRule> { - - public ReplaceRegexMatch() { - super(TransformDirection.DOWN); - } - - @Override - protected Expression rule(RegexMatch regexMatch) { - Expression e = regexMatch; - StringPattern pattern = regexMatch.pattern(); - if (pattern.matchesAll()) { - e = new IsNotNull(e.source(), regexMatch.field()); - } else { - String match = pattern.exactMatch(); - if (match != null) { - Literal literal = new Literal(regexMatch.source(), match, DataTypes.KEYWORD); - e = regexToEquals(regexMatch, literal); - } - } - return e; - } - - protected Expression regexToEquals(RegexMatch regexMatch, Literal literal) { - return new Equals(regexMatch.source(), regexMatch.field(), literal); - } - } - public static class FoldNull extends OptimizerExpressionRule { public FoldNull() { @@ -1410,7 +399,7 @@ public FoldNull() { } @Override - protected Expression rule(Expression e) { + public Expression rule(Expression e) { Expression result = tryReplaceIsNullIsNotNull(e); if (result != e) { return result; @@ -1429,11 +418,11 @@ protected Expression rule(Expression e) { protected Expression tryReplaceIsNullIsNotNull(Expression e) { if (e instanceof IsNotNull isnn) { if (isnn.field().nullable() == Nullability.FALSE) { - return new Literal(e.source(), Boolean.TRUE, DataTypes.BOOLEAN); + return new Literal(e.source(), Boolean.TRUE, DataType.BOOLEAN); } } else if (e instanceof IsNull isn) { if (isn.field().nullable() == Nullability.FALSE) { - return new Literal(e.source(), Boolean.FALSE, DataTypes.BOOLEAN); + return new Literal(e.source(), Boolean.FALSE, DataType.BOOLEAN); } } return e; @@ -1450,7 +439,7 @@ public PropagateNullable() { } @Override - protected Expression rule(And and) { + public Expression rule(And and) { List splits = Predicates.splitAnd(and); Set nullExpressions = new LinkedHashSet<>(); @@ -1529,108 +518,6 @@ protected Expression nonNullify(Expression exp, Expression nonNullExp) { } } - /** - * Simplify IsNotNull targets by resolving the underlying expression to its root fields with unknown - * nullability. - * e.g. - * (x + 1) / 2 IS NOT NULL --> x IS NOT NULL AND (x+1) / 2 IS NOT NULL - * SUBSTRING(x, 3) > 4 IS NOT NULL --> x IS NOT NULL AND SUBSTRING(x, 3) > 4 IS NOT NULL - * When dealing with multiple fields, a conjunction/disjunction based on the predicate: - * (x + y) / 4 IS NOT NULL --> x IS NOT NULL AND y IS NOT NULL AND (x + y) / 4 IS NOT NULL - * This handles the case of fields nested inside functions or expressions in order to avoid: - * - having to evaluate the whole expression - * - not pushing down the filter due to expression evaluation - * IS NULL cannot be simplified since it leads to a disjunction which prevents the filter to be - * pushed down: - * (x + 1) IS NULL --> x IS NULL OR x + 1 IS NULL - * and x IS NULL cannot be pushed down - *
- * Implementation-wise this rule goes bottom-up, keeping an alias up to date to the current plan - * and then looks for replacing the target. - */ - public static class InferIsNotNull extends Rule { - - @Override - public LogicalPlan apply(LogicalPlan plan) { - // the alias map is shared across the whole plan - AttributeMap aliases = new AttributeMap<>(); - // traverse bottom-up to pick up the aliases as we go - plan = plan.transformUp(p -> inspectPlan(p, aliases)); - return plan; - } - - private LogicalPlan inspectPlan(LogicalPlan plan, AttributeMap aliases) { - // inspect just this plan properties - plan.forEachExpression(Alias.class, a -> aliases.put(a.toAttribute(), a.child())); - // now go about finding isNull/isNotNull - LogicalPlan newPlan = plan.transformExpressionsOnlyUp(IsNotNull.class, inn -> inferNotNullable(inn, aliases)); - return newPlan; - } - - private Expression inferNotNullable(IsNotNull inn, AttributeMap aliases) { - Expression result = inn; - Set refs = resolveExpressionAsRootAttributes(inn.field(), aliases); - // no refs found or could not detect - return the original function - if (refs.size() > 0) { - // add IsNull for the filters along with the initial inn - var innList = CollectionUtils.combine(refs.stream().map(r -> (Expression) new IsNotNull(inn.source(), r)).toList(), inn); - result = Predicates.combineAnd(innList); - } - return result; - } - - /** - * Unroll the expression to its references to get to the root fields - * that really matter for filtering. - */ - protected Set resolveExpressionAsRootAttributes(Expression exp, AttributeMap aliases) { - Set resolvedExpressions = new LinkedHashSet<>(); - boolean changed = doResolve(exp, aliases, resolvedExpressions); - return changed ? resolvedExpressions : emptySet(); - } - - private boolean doResolve(Expression exp, AttributeMap aliases, Set resolvedExpressions) { - boolean changed = false; - // check if the expression can be skipped or is not nullabe - if (skipExpression(exp)) { - resolvedExpressions.add(exp); - } else { - for (Expression e : exp.references()) { - Expression resolved = aliases.resolve(e, e); - // found a root attribute, bail out - if (resolved instanceof Attribute a && resolved == e) { - resolvedExpressions.add(a); - // don't mark things as change if the original expression hasn't been broken down - changed |= resolved != exp; - } else { - // go further - changed |= doResolve(resolved, aliases, resolvedExpressions); - } - } - } - return changed; - } - - protected boolean skipExpression(Expression e) { - return e.nullable() == Nullability.FALSE; - } - } - - public static final class SetAsOptimized extends Rule { - - @Override - public LogicalPlan apply(LogicalPlan plan) { - plan.forEachUp(SetAsOptimized::rule); - return plan; - } - - private static void rule(LogicalPlan plan) { - if (plan.optimized() == false) { - plan.setOptimized(); - } - } - } - public abstract static class OptimizerRule extends Rule { private final TransformDirection direction; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java index cf19b8722b258..6eab4a0cd9a75 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java @@ -52,7 +52,6 @@ import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.Check; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.versionfield.Version; @@ -67,9 +66,9 @@ import java.util.List; import java.util.Set; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.unsignedLongAsNumber; public final class ExpressionTranslators { @@ -283,7 +282,7 @@ static Query translate(BinaryComparison bc, TranslatorHandler handler) { } ZoneId zoneId = null; - if (DataTypes.isDateTime(attribute.dataType())) { + if (DataType.isDateTime(attribute.dataType())) { zoneId = bc.zoneId(); } if (bc instanceof GreaterThan) { @@ -379,7 +378,7 @@ public static Query doTranslate(In in, TranslatorHandler handler) { } private static boolean needsTypeSpecificValueHandling(DataType fieldType) { - return DataTypes.isDateTime(fieldType) || fieldType == IP || fieldType == VERSION || fieldType == UNSIGNED_LONG; + return DataType.isDateTime(fieldType) || fieldType == IP || fieldType == VERSION || fieldType == UNSIGNED_LONG; } private static Query translate(In in, TranslatorHandler handler) { @@ -389,7 +388,7 @@ private static Query translate(In in, TranslatorHandler handler) { List queries = new ArrayList<>(); for (Expression rhs : in.list()) { - if (DataTypes.isNull(rhs.dataType()) == false) { + if (DataType.isNull(rhs.dataType()) == false) { if (needsTypeSpecificValueHandling(attribute.dataType())) { // delegates to BinaryComparisons translator to ensure consistent handling of date and time values Query query = BinaryComparisons.translate(new Equals(in.source(), in.value(), rhs, in.zoneId()), handler); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 1dacc394c8d21..9d6a325a6028f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -4,19 +4,65 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ - package org.elasticsearch.xpack.esql.core.type; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.sort.ScriptSortBuilder; +import org.elasticsearch.index.mapper.SourceFieldMapper; import java.io.IOException; +import java.math.BigInteger; +import java.time.ZonedDateTime; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; import java.util.Locale; -import java.util.Objects; +import java.util.Map; +import java.util.stream.Stream; + +import static java.util.stream.Collectors.toMap; +import static java.util.stream.Collectors.toUnmodifiableMap; + +public enum DataType { + UNSUPPORTED("UNSUPPORTED", null, 0, false, false, false), + NULL("null", 0, false, false, false), + BOOLEAN("boolean", 1, false, false, false), + BYTE("byte", Byte.BYTES, true, false, true), + SHORT("short", Short.BYTES, true, false, true), + INTEGER("integer", Integer.BYTES, true, false, true), + LONG("long", Long.BYTES, true, false, true), + UNSIGNED_LONG("unsigned_long", Long.BYTES, true, false, true), + DOUBLE("double", Double.BYTES, false, true, true), + FLOAT("float", Float.BYTES, false, true, true), + HALF_FLOAT("half_float", Float.BYTES, false, true, true), + SCALED_FLOAT("scaled_float", Long.BYTES, false, true, true), + KEYWORD("keyword", Integer.MAX_VALUE, false, false, true), + TEXT("text", Integer.MAX_VALUE, false, false, false), + DATETIME("DATETIME", "date", Long.BYTES, false, false, true), + IP("ip", 45, false, false, true), + VERSION("version", Integer.MAX_VALUE, false, false, true), + OBJECT("object", 0, false, false, false), + NESTED("nested", 0, false, false, false), + SOURCE(SourceFieldMapper.NAME, SourceFieldMapper.NAME, Integer.MAX_VALUE, false, false, false), + DATE_PERIOD("DATE_PERIOD", null, 3 * Integer.BYTES, false, false, false), + TIME_DURATION("TIME_DURATION", null, Integer.BYTES + Long.BYTES, false, false, false), + GEO_POINT("geo_point", Double.BYTES * 2, false, false, true), + CARTESIAN_POINT("cartesian_point", Double.BYTES * 2, false, false, true), + CARTESIAN_SHAPE("cartesian_shape", Integer.MAX_VALUE, false, false, true), + GEO_SHAPE("geo_shape", Integer.MAX_VALUE, false, false, true), -public class DataType implements Writeable { + /** + * These are numeric fields labeled as metric counters in time-series indices. Although stored + * internally as numeric fields, they represent cumulative metrics and must not be treated as regular + * numeric fields. Therefore, we define them differently and separately from their parent numeric field. + * These fields are strictly for use in retrieval from indices, rate aggregation, and casting to their + * parent numeric type. + */ + COUNTER_LONG("counter_long", Long.BYTES, false, false, true), + COUNTER_INTEGER("counter_integer", Integer.BYTES, false, false, true), + COUNTER_DOUBLE("counter_double", Double.BYTES, false, false, true), + DOC_DATA_TYPE("_doc", Integer.BYTES * 3, false, false, false), + TSID_DATA_TYPE("_tsid", Integer.MAX_VALUE, false, false, true); private final String typeName; @@ -41,11 +87,11 @@ public class DataType implements Writeable { */ private final boolean docValues; - public DataType(String esName, int size, boolean isInteger, boolean isRational, boolean hasDocValues) { + DataType(String esName, int size, boolean isInteger, boolean isRational, boolean hasDocValues) { this(null, esName, size, isInteger, isRational, hasDocValues); } - public DataType(String typeName, String esType, int size, boolean isInteger, boolean isRational, boolean hasDocValues) { + DataType(String typeName, String esType, int size, boolean isInteger, boolean isRational, boolean hasDocValues) { String typeString = typeName != null ? typeName : esType; this.typeName = typeString.toLowerCase(Locale.ROOT); this.name = typeString.toUpperCase(Locale.ROOT); @@ -56,7 +102,139 @@ public DataType(String typeName, String esType, int size, boolean isInteger, boo this.docValues = hasDocValues; } - public String name() { + private static final Collection TYPES = Stream.of( + UNSUPPORTED, + NULL, + BOOLEAN, + BYTE, + SHORT, + INTEGER, + LONG, + UNSIGNED_LONG, + DOUBLE, + FLOAT, + HALF_FLOAT, + SCALED_FLOAT, + KEYWORD, + TEXT, + DATETIME, + IP, + VERSION, + OBJECT, + NESTED, + SOURCE, + DATE_PERIOD, + TIME_DURATION, + GEO_POINT, + CARTESIAN_POINT, + CARTESIAN_SHAPE, + GEO_SHAPE, + COUNTER_LONG, + COUNTER_INTEGER, + COUNTER_DOUBLE + ).sorted(Comparator.comparing(DataType::typeName)).toList(); + + private static final Map NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t)); + + private static Map ES_TO_TYPE; + + static { + Map map = TYPES.stream().filter(e -> e.esType() != null).collect(toMap(DataType::esType, t -> t)); + map.put("date_nanos", DATETIME); + ES_TO_TYPE = Collections.unmodifiableMap(map); + } + + public static Collection types() { + return TYPES; + } + + public static DataType fromTypeName(String name) { + return NAME_TO_TYPE.get(name.toLowerCase(Locale.ROOT)); + } + + public static DataType fromEs(String name) { + DataType type = ES_TO_TYPE.get(name); + return type != null ? type : UNSUPPORTED; + } + + public static DataType fromJava(Object value) { + if (value == null) { + return NULL; + } + if (value instanceof Integer) { + return INTEGER; + } + if (value instanceof Long) { + return LONG; + } + if (value instanceof BigInteger) { + return UNSIGNED_LONG; + } + if (value instanceof Boolean) { + return BOOLEAN; + } + if (value instanceof Double) { + return DOUBLE; + } + if (value instanceof Float) { + return FLOAT; + } + if (value instanceof Byte) { + return BYTE; + } + if (value instanceof Short) { + return SHORT; + } + if (value instanceof ZonedDateTime) { + return DATETIME; + } + if (value instanceof String || value instanceof Character) { + return KEYWORD; + } + + return null; + } + + public static boolean isUnsupported(DataType from) { + return from == UNSUPPORTED; + } + + public static boolean isString(DataType t) { + return t == KEYWORD || t == TEXT; + } + + public static boolean isPrimitive(DataType t) { + return t != OBJECT && t != NESTED && t != UNSUPPORTED; + } + + public static boolean isNull(DataType t) { + return t == NULL; + } + + public static boolean isNullOrNumeric(DataType t) { + return t.isNumeric() || isNull(t); + } + + public static boolean isSigned(DataType t) { + return t.isNumeric() && t.equals(UNSIGNED_LONG) == false; + } + + public static boolean isDateTime(DataType type) { + return type == DATETIME; + } + + public static boolean areCompatible(DataType left, DataType right) { + if (left == right) { + return true; + } else { + return (left == NULL || right == NULL) + || (isString(left) && isString(right)) + || (left.isNumeric() && right.isNumeric()) + || (isDateTime(left) && isDateTime(right)); + } + } + + public String nameUpper() { return name; } @@ -68,12 +246,6 @@ public String esType() { return esType; } - public ScriptSortBuilder.ScriptSortType scriptSortType() { - return isNumeric() ? ScriptSortBuilder.ScriptSortType.NUMBER - : this == DataTypes.VERSION ? ScriptSortBuilder.ScriptSortType.VERSION - : ScriptSortBuilder.ScriptSortType.STRING; - } - public boolean isInteger() { return isInteger; } @@ -94,49 +266,20 @@ public boolean hasDocValues() { return docValues; } - @Override - public int hashCode() { - return Objects.hash(typeName, esType, size, isInteger, isRational, docValues); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - DataType other = (DataType) obj; - return Objects.equals(typeName, other.typeName) - && Objects.equals(esType, other.esType) - && size == other.size - && isInteger == other.isInteger - && isRational == other.isRational - && docValues == other.docValues; - } - - @Override - public String toString() { - return name; + public void writeTo(StreamOutput out) throws IOException { + out.writeString(typeName); } public static DataType readFrom(StreamInput in) throws IOException { + // TODO: Use our normal enum serialization pattern String name = in.readString(); - if (name.equalsIgnoreCase(DataTypes.DOC_DATA_TYPE.name())) { - return DataTypes.DOC_DATA_TYPE; + if (name.equalsIgnoreCase(DataType.DOC_DATA_TYPE.nameUpper())) { + return DataType.DOC_DATA_TYPE; } - DataType dataType = DataTypes.fromTypeName(name); + DataType dataType = DataType.fromTypeName(name); if (dataType == null) { throw new IOException("Unknown DataType for type name: " + name); } return dataType; } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(typeName); - } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java index 03466655ccc32..bb53472d06e71 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java @@ -22,23 +22,23 @@ import java.util.function.DoubleFunction; import java.util.function.Function; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BYTE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.FLOAT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NULL; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.SHORT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isDateTime; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isPrimitive; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isString; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.BYTE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.SHORT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTime; +import static org.elasticsearch.xpack.esql.core.type.DataType.isPrimitive; +import static org.elasticsearch.xpack.esql.core.type.DataType.isString; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.UNSIGNED_LONG_MAX; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.inUnsignedLongRange; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.isUnsignedLong; @@ -470,7 +470,7 @@ public static boolean convertToBoolean(String val) { * Throws InvalidArgumentException if such conversion is not possible */ public static Object convert(Object value, DataType dataType) { - DataType detectedType = DataTypes.fromJava(value); + DataType detectedType = DataType.fromJava(value); if (detectedType == dataType || value == null) { return value; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypes.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypes.java deleted file mode 100644 index 8d4129d1abbcc..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypes.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.type; - -import org.elasticsearch.index.mapper.SourceFieldMapper; - -import java.math.BigInteger; -import java.time.ZonedDateTime; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.Locale; -import java.util.Map; -import java.util.stream.Stream; - -import static java.util.stream.Collectors.toMap; -import static java.util.stream.Collectors.toUnmodifiableMap; - -public final class DataTypes { - - // tag::noformat - public static final DataType UNSUPPORTED = new DataType("UNSUPPORTED", null, 0, false, false, false); - - public static final DataType NULL = new DataType("null", 0, false, false, false); - - public static final DataType BOOLEAN = new DataType("boolean", 1, false, false, false); - // integer numeric - public static final DataType BYTE = new DataType("byte", Byte.BYTES, true, false, true); - public static final DataType SHORT = new DataType("short", Short.BYTES, true, false, true); - public static final DataType INTEGER = new DataType("integer", Integer.BYTES, true, false, true); - public static final DataType LONG = new DataType("long", Long.BYTES, true, false, true); - public static final DataType UNSIGNED_LONG = new DataType("unsigned_long", Long.BYTES, true, false, true); - // decimal numeric - public static final DataType DOUBLE = new DataType("double", Double.BYTES, false, true, true); - public static final DataType FLOAT = new DataType("float", Float.BYTES, false, true, true); - public static final DataType HALF_FLOAT = new DataType("half_float", Float.BYTES, false, true, true); - public static final DataType SCALED_FLOAT = new DataType("scaled_float", Long.BYTES, false, true, true); - // string - public static final DataType KEYWORD = new DataType("keyword", Integer.MAX_VALUE, false, false, true); - public static final DataType TEXT = new DataType("text", Integer.MAX_VALUE, false, false, false); - // date - public static final DataType DATETIME = new DataType("DATETIME", "date", Long.BYTES, false, false, true); - // ip - public static final DataType IP = new DataType("ip", 45, false, false, true); - // version - public static final DataType VERSION = new DataType("version", Integer.MAX_VALUE, false, false, true); - // complex types - public static final DataType OBJECT = new DataType("object", 0, false, false, false); - public static final DataType NESTED = new DataType("nested", 0, false, false, false); - //end::noformat - public static final DataType SOURCE = new DataType( - SourceFieldMapper.NAME, - SourceFieldMapper.NAME, - Integer.MAX_VALUE, - false, - false, - false - ); - public static final DataType DATE_PERIOD = new DataType("DATE_PERIOD", null, 3 * Integer.BYTES, false, false, false); - public static final DataType TIME_DURATION = new DataType("TIME_DURATION", null, Integer.BYTES + Long.BYTES, false, false, false); - public static final DataType GEO_POINT = new DataType("geo_point", Double.BYTES * 2, false, false, true); - public static final DataType CARTESIAN_POINT = new DataType("cartesian_point", Double.BYTES * 2, false, false, true); - public static final DataType GEO_SHAPE = new DataType("geo_shape", Integer.MAX_VALUE, false, false, true); - public static final DataType CARTESIAN_SHAPE = new DataType("cartesian_shape", Integer.MAX_VALUE, false, false, true); - - /** - * These are numeric fields labeled as metric counters in time-series indices. Although stored - * internally as numeric fields, they represent cumulative metrics and must not be treated as regular - * numeric fields. Therefore, we define them differently and separately from their parent numeric field. - * These fields are strictly for use in retrieval from indices, rate aggregation, and casting to their - * parent numeric type. - */ - public static final DataType COUNTER_LONG = new DataType("counter_long", Long.BYTES, false, false, true); - public static final DataType COUNTER_INTEGER = new DataType("counter_integer", Integer.BYTES, false, false, true); - public static final DataType COUNTER_DOUBLE = new DataType("counter_double", Double.BYTES, false, false, true); - - public static final DataType DOC_DATA_TYPE = new DataType("_doc", Integer.BYTES * 3, false, false, false); - public static final DataType TSID_DATA_TYPE = new DataType("_tsid", Integer.MAX_VALUE, false, false, true); - - private static final Collection TYPES = Stream.of( - UNSUPPORTED, - NULL, - BOOLEAN, - BYTE, - SHORT, - INTEGER, - LONG, - UNSIGNED_LONG, - DOUBLE, - FLOAT, - HALF_FLOAT, - SCALED_FLOAT, - KEYWORD, - TEXT, - DATETIME, - IP, - VERSION, - OBJECT, - NESTED, - SOURCE, - DATE_PERIOD, - TIME_DURATION, - GEO_POINT, - CARTESIAN_POINT, - CARTESIAN_SHAPE, - GEO_SHAPE, - COUNTER_LONG, - COUNTER_INTEGER, - COUNTER_DOUBLE - ).sorted(Comparator.comparing(DataType::typeName)).toList(); - - private static final Map NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t)); - - private static Map ES_TO_TYPE; - - static { - Map map = TYPES.stream().filter(e -> e.esType() != null).collect(toMap(DataType::esType, t -> t)); - map.put("date_nanos", DATETIME); - ES_TO_TYPE = Collections.unmodifiableMap(map); - } - - private DataTypes() {} - - public static Collection types() { - return TYPES; - } - - public static DataType fromTypeName(String name) { - return NAME_TO_TYPE.get(name.toLowerCase(Locale.ROOT)); - } - - public static DataType fromEs(String name) { - DataType type = ES_TO_TYPE.get(name); - return type != null ? type : UNSUPPORTED; - } - - public static DataType fromJava(Object value) { - if (value == null) { - return NULL; - } - if (value instanceof Integer) { - return INTEGER; - } - if (value instanceof Long) { - return LONG; - } - if (value instanceof BigInteger) { - return UNSIGNED_LONG; - } - if (value instanceof Boolean) { - return BOOLEAN; - } - if (value instanceof Double) { - return DOUBLE; - } - if (value instanceof Float) { - return FLOAT; - } - if (value instanceof Byte) { - return BYTE; - } - if (value instanceof Short) { - return SHORT; - } - if (value instanceof ZonedDateTime) { - return DATETIME; - } - if (value instanceof String || value instanceof Character) { - return KEYWORD; - } - - return null; - } - - public static boolean isUnsupported(DataType from) { - return from == UNSUPPORTED; - } - - public static boolean isString(DataType t) { - return t == KEYWORD || t == TEXT; - } - - public static boolean isPrimitive(DataType t) { - return t != OBJECT && t != NESTED && t != UNSUPPORTED; - } - - public static boolean isNull(DataType t) { - return t == NULL; - } - - public static boolean isNullOrNumeric(DataType t) { - return t.isNumeric() || isNull(t); - } - - public static boolean isSigned(DataType t) { - return t.isNumeric() && t.equals(UNSIGNED_LONG) == false; - } - - public static boolean isDateTime(DataType type) { - return type == DATETIME; - } - - public static boolean areCompatible(DataType left, DataType right) { - if (left == right) { - return true; - } else { - return (left == NULL || right == NULL) - || (isString(left) && isString(right)) - || (left.isNumeric() && right.isNumeric()) - || (isDateTime(left) && isDateTime(right)); - } - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java index 9d2341f77505e..01728954a2e1b 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DateEsField.java @@ -20,7 +20,7 @@ public class DateEsField extends EsField { static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(EsField.class, "DateEsField", DateEsField::new); public static DateEsField dateEsField(String name, Map properties, boolean hasDocValues) { - return new DateEsField(name, DataTypes.DATETIME, properties, hasDocValues); + return new DateEsField(name, DataType.DATETIME, properties, hasDocValues); } private DateEsField(String name, DataType dataType, Map properties, boolean hasDocValues) { @@ -28,7 +28,7 @@ private DateEsField(String name, DataType dataType, Map propert } private DateEsField(StreamInput in) throws IOException { - this(in.readString(), DataTypes.DATETIME, in.readMap(i -> i.readNamedWriteable(EsField.class)), in.readBoolean()); + this(in.readString(), DataType.DATETIME, in.readMap(i -> i.readNamedWriteable(EsField.class)), in.readBoolean()); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DefaultDataTypeRegistry.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DefaultDataTypeRegistry.java index cba409236e8c9..9bba9698faff7 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DefaultDataTypeRegistry.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DefaultDataTypeRegistry.java @@ -19,22 +19,22 @@ private DefaultDataTypeRegistry() {} @Override public Collection dataTypes() { - return DataTypes.types(); + return DataType.types(); } @Override public DataType fromEs(String typeName, TimeSeriesParams.MetricType metricType) { - return DataTypes.fromEs(typeName); + return DataType.fromEs(typeName); } @Override public DataType fromJava(Object value) { - return DataTypes.fromJava(value); + return DataType.fromJava(value); } @Override public boolean isUnsupported(DataType type) { - return DataTypes.isUnsupported(type); + return DataType.isUnsupported(type); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java index 3f9a1415df70c..fd7bfbec4730f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/InvalidMappedField.java @@ -31,7 +31,7 @@ public class InvalidMappedField extends EsField { private final String errorMessage; public InvalidMappedField(String name, String errorMessage, Map properties) { - super(name, DataTypes.UNSUPPORTED, properties, false); + super(name, DataType.UNSUPPORTED, properties, false); this.errorMessage = errorMessage; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java index 975bd75f91f18..d856e3d9d8297 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/KeywordEsField.java @@ -15,7 +15,7 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; /** * Information about a field in an ES index with the {@code keyword} type. diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java index ba5295f1f9a8a..c52230fa65829 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/TextEsField.java @@ -16,8 +16,8 @@ import java.util.Map; import java.util.function.Function; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; /** * Information about a field in an es index with the {@code text} type. diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/Types.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/Types.java index a5a457207ba84..5daa2e0050543 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/Types.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/Types.java @@ -16,12 +16,12 @@ import java.util.Map.Entry; import static java.util.Collections.emptyMap; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NESTED; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.OBJECT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSUPPORTED; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.NESTED; +import static org.elasticsearch.xpack.esql.core.type.DataType.OBJECT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; public abstract class Types { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java index af1d0d3659bc0..13e4d6ad953a8 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/UnsupportedEsField.java @@ -34,7 +34,7 @@ public UnsupportedEsField(String name, String originalType) { } public UnsupportedEsField(String name, String originalType, String inherited, Map properties) { - super(name, DataTypes.UNSUPPORTED, properties, false); + super(name, DataType.UNSUPPORTED, properties, false); this.originalType = originalType; this.inherited = inherited; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java index df8fac06dd478..485084bac60b3 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.core.util; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -33,4 +35,10 @@ public interface PlanStreamInput { * the same result. */ NameId mapNameId(long id) throws IOException; + + /** + * Read an {@link Expression} from the stream. This will soon be replaced with + * {@link StreamInput#readNamedWriteable}. + */ + Expression readExpression() throws IOException; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java new file mode 100644 index 0000000000000..6a3d8fb77316c --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.util; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.expression.Expression; + +import java.io.IOException; + +/** + * Interface for streams that can serialize plan components. This exists so + * ESQL proper can expose streaming capability to ESQL-core. If the world is kind + * and just we'll remove this when we flatten everything from ESQL-core into + * ESQL proper. + */ +public interface PlanStreamOutput { + /** + * Write an {@link Expression} to the stream. This will soon be replaced with + * {@link StreamOutput#writeNamedWriteable}. + */ + void writeExpression(Expression expression) throws IOException; +} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/analyzer/PreAnalyzerTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/analyzer/PreAnalyzerTests.java deleted file mode 100644 index f48d766c0dedb..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/analyzer/PreAnalyzerTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.analyzer; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.analyzer.PreAnalyzer.PreAnalysis; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnresolvedRelation; - -import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; - -public class PreAnalyzerTests extends ESTestCase { - - private PreAnalyzer preAnalyzer = new PreAnalyzer(); - - public void testBasicIndex() { - LogicalPlan plan = new UnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "index"), null, false); - PreAnalysis result = preAnalyzer.preAnalyze(plan); - assertThat(plan.preAnalyzed(), is(true)); - assertThat(result.indices, hasSize(1)); - assertThat(result.indices.get(0).id().cluster(), nullValue()); - assertThat(result.indices.get(0).id().index(), is("index")); - } - - public void testBasicIndexWithCatalog() { - LogicalPlan plan = new UnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, "elastic", "index"), null, false); - PreAnalysis result = preAnalyzer.preAnalyze(plan); - assertThat(plan.preAnalyzed(), is(true)); - assertThat(result.indices, hasSize(1)); - assertThat(result.indices.get(0).id().cluster(), is("elastic")); - assertThat(result.indices.get(0).id().index(), is("index")); - } - - public void testComplicatedQuery() { - LogicalPlan plan = new Limit( - EMPTY, - new Literal(EMPTY, 10, INTEGER), - new UnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "aaa"), null, false) - ); - PreAnalysis result = preAnalyzer.preAnalyze(plan); - assertThat(plan.preAnalyzed(), is(true)); - assertThat(result.indices, hasSize(1)); - assertThat(result.indices.get(0).id().cluster(), nullValue()); - assertThat(result.indices.get(0).id().index(), is("aaa")); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/AttributeMapTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/AttributeMapTests.java index 7e8ba58285fd3..c077af4026974 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/AttributeMapTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/AttributeMapTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import java.util.Collection; import java.util.List; @@ -69,8 +69,8 @@ public void testAttributeMapWithSameAliasesCanResolveAttributes() { public void testResolve() { AttributeMap.Builder builder = AttributeMap.builder(); Attribute one = a("one"); - Attribute two = fieldAttribute("two", DataTypes.INTEGER); - Attribute three = fieldAttribute("three", DataTypes.INTEGER); + Attribute two = fieldAttribute("two", DataType.INTEGER); + Attribute three = fieldAttribute("three", DataType.INTEGER); Alias threeAlias = new Alias(Source.EMPTY, "three_alias", three); Alias threeAliasAlias = new Alias(Source.EMPTY, "three_alias_alias", threeAlias); builder.put(one, of("one")); @@ -94,8 +94,8 @@ public void testResolve() { public void testResolveOneHopCycle() { AttributeMap.Builder builder = AttributeMap.builder(); - Attribute a = fieldAttribute("a", DataTypes.INTEGER); - Attribute b = fieldAttribute("b", DataTypes.INTEGER); + Attribute a = fieldAttribute("a", DataType.INTEGER); + Attribute b = fieldAttribute("b", DataType.INTEGER); builder.put(a, a); builder.put(b, a); AttributeMap map = builder.build(); @@ -107,10 +107,10 @@ public void testResolveOneHopCycle() { public void testResolveMultiHopCycle() { AttributeMap.Builder builder = AttributeMap.builder(); - Attribute a = fieldAttribute("a", DataTypes.INTEGER); - Attribute b = fieldAttribute("b", DataTypes.INTEGER); - Attribute c = fieldAttribute("c", DataTypes.INTEGER); - Attribute d = fieldAttribute("d", DataTypes.INTEGER); + Attribute a = fieldAttribute("a", DataType.INTEGER); + Attribute b = fieldAttribute("b", DataType.INTEGER); + Attribute c = fieldAttribute("c", DataType.INTEGER); + Attribute d = fieldAttribute("d", DataType.INTEGER); builder.put(a, b); builder.put(b, c); builder.put(c, d); @@ -124,7 +124,7 @@ public void testResolveMultiHopCycle() { private Alias createIntParameterAlias(int index, int value) { Source source = new Source(1, index * 5, "?"); - Literal literal = new Literal(source, value, DataTypes.INTEGER); + Literal literal = new Literal(source, value, DataType.INTEGER); Alias alias = new Alias(literal.source(), literal.source().text(), literal); return alias; } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/LiteralTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/LiteralTests.java index 32a953165b53c..7e57e8f358ae1 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/LiteralTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/LiteralTests.java @@ -21,14 +21,14 @@ import java.util.function.Supplier; import static java.util.Collections.emptyList; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BYTE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.FLOAT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.SHORT; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.BYTE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.SHORT; public class LiteralTests extends AbstractNodeTestCase { static class ValueAndCompatibleTypes { diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/NullabilityTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/NullabilityTests.java index 53132fe1c4b8a..fbeac1748ac81 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/NullabilityTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/NullabilityTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import static java.util.Arrays.asList; import static org.elasticsearch.xpack.esql.core.expression.Nullability.FALSE; @@ -36,7 +35,7 @@ public Nullability nullable() { @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/FunctionTestUtils.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/FunctionTestUtils.java index eba95df9d6dd5..8f0ff30074b83 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/FunctionTestUtils.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/FunctionTestUtils.java @@ -10,7 +10,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.time.Instant; import java.time.ZonedDateTime; @@ -18,15 +17,15 @@ import java.util.Iterator; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; public final class FunctionTestUtils { public static Literal l(Object value) { - return new Literal(EMPTY, value, DataTypes.fromJava(value)); + return new Literal(EMPTY, value, DataType.fromJava(value)); } public static Literal l(Object value, DataType type) { diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java index f7e650b7727b7..6009ca774f8cd 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java @@ -17,15 +17,15 @@ import java.util.Arrays; import static org.elasticsearch.xpack.esql.core.expression.function.scalar.FunctionTestUtils.l; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.FLOAT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.SHORT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.SHORT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; public class RangeTests extends ESTestCase { diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java index 440e48b30536b..12dbb23a86c59 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java @@ -8,95 +8,24 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.TestUtils; -import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Add; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Div; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Mod; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Mul; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Sub; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.Equals; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.GreaterThan; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.GreaterThanOrEqual; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.LessThan; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.NotEquals; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.NullEquals; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BinaryComparisonSimplification; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanFunctionEqualsElimination; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.CombineBinaryComparisons; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.ConstantFolding; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.FoldNull; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.LiteralsOnTheRight; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PropagateEquals; -import org.elasticsearch.xpack.esql.core.plan.logical.EsRelation; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; -import org.elasticsearch.xpack.esql.core.util.StringUtils; -import java.time.ZoneId; import java.util.Collections; import java.util.List; -import static java.util.Arrays.asList; -import static java.util.Collections.singletonList; -import static org.elasticsearch.xpack.esql.core.TestUtils.equalsOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.fieldAttribute; -import static org.elasticsearch.xpack.esql.core.TestUtils.greaterThanOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.greaterThanOrEqualOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.lessThanOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.lessThanOrEqualOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.notEqualsOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.nullEqualsOf; import static org.elasticsearch.xpack.esql.core.TestUtils.of; import static org.elasticsearch.xpack.esql.core.TestUtils.rangeOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.relation; -import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; -import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; -import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; -import static org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.CombineDisjunctionsToIn; -import static org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PropagateNullable; -import static org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.ReplaceRegexMatch; -import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.hamcrest.Matchers.contains; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; public class OptimizerRulesTests extends ESTestCase { - private static final Expression DUMMY_EXPRESSION = new DummyBooleanExpression(EMPTY, 0); - - private static final Literal ONE = L(1); - private static final Literal TWO = L(2); - private static final Literal THREE = L(3); - private static final Literal FOUR = L(4); private static final Literal FIVE = L(5); private static final Literal SIX = L(6); @@ -153,198 +82,6 @@ private static FieldAttribute getFieldAttribute() { return TestUtils.getFieldAttribute("a"); } - // - // Constant folding - // - - public void testConstantFolding() { - Expression exp = new Add(EMPTY, TWO, THREE); - - assertTrue(exp.foldable()); - Expression result = new ConstantFolding().rule(exp); - assertTrue(result instanceof Literal); - assertEquals(5, ((Literal) result).value()); - - // check now with an alias - result = new ConstantFolding().rule(new Alias(EMPTY, "a", exp)); - assertEquals("a", Expressions.name(result)); - assertEquals(Alias.class, result.getClass()); - } - - public void testConstantFoldingBinaryComparison() { - assertEquals(FALSE, new ConstantFolding().rule(greaterThanOf(TWO, THREE)).canonical()); - assertEquals(FALSE, new ConstantFolding().rule(greaterThanOrEqualOf(TWO, THREE)).canonical()); - assertEquals(FALSE, new ConstantFolding().rule(equalsOf(TWO, THREE)).canonical()); - assertEquals(FALSE, new ConstantFolding().rule(nullEqualsOf(TWO, THREE)).canonical()); - assertEquals(FALSE, new ConstantFolding().rule(nullEqualsOf(TWO, NULL)).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(notEqualsOf(TWO, THREE)).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(lessThanOrEqualOf(TWO, THREE)).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(lessThanOf(TWO, THREE)).canonical()); - } - - public void testConstantFoldingBinaryLogic() { - assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, greaterThanOf(TWO, THREE), TRUE)).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, greaterThanOrEqualOf(TWO, THREE), TRUE)).canonical()); - } - - public void testConstantFoldingBinaryLogic_WithNullHandling() { - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, NULL, TRUE)).canonical().nullable()); - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, TRUE, NULL)).canonical().nullable()); - assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, NULL, FALSE)).canonical()); - assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, FALSE, NULL)).canonical()); - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, NULL, NULL)).canonical().nullable()); - - assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, TRUE)).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, TRUE, NULL)).canonical()); - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, FALSE)).canonical().nullable()); - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, FALSE, NULL)).canonical().nullable()); - assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, NULL)).canonical().nullable()); - } - - public void testConstantFoldingRange() { - assertEquals(true, new ConstantFolding().rule(rangeOf(FIVE, FIVE, true, L(10), false)).fold()); - assertEquals(false, new ConstantFolding().rule(rangeOf(FIVE, FIVE, false, L(10), false)).fold()); - } - - public void testConstantNot() { - assertEquals(FALSE, new ConstantFolding().rule(new Not(EMPTY, TRUE))); - assertEquals(TRUE, new ConstantFolding().rule(new Not(EMPTY, FALSE))); - } - - public void testConstantFoldingLikes() { - assertEquals(TRUE, new ConstantFolding().rule(new Like(EMPTY, of("test_emp"), new LikePattern("test%", (char) 0))).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(new WildcardLike(EMPTY, of("test_emp"), new WildcardPattern("test*"))).canonical()); - assertEquals(TRUE, new ConstantFolding().rule(new RLike(EMPTY, of("test_emp"), new RLikePattern("test.emp"))).canonical()); - } - - public void testArithmeticFolding() { - assertEquals(10, foldOperator(new Add(EMPTY, L(7), THREE))); - assertEquals(4, foldOperator(new Sub(EMPTY, L(7), THREE))); - assertEquals(21, foldOperator(new Mul(EMPTY, L(7), THREE))); - assertEquals(2, foldOperator(new Div(EMPTY, L(7), THREE))); - assertEquals(1, foldOperator(new Mod(EMPTY, L(7), THREE))); - } - - private static Object foldOperator(BinaryOperator b) { - return ((Literal) new ConstantFolding().rule(b)).value(); - } - - // - // Logical simplifications - // - - public void testLiteralsOnTheRight() { - Alias a = new Alias(EMPTY, "a", L(10)); - Expression result = new LiteralsOnTheRight().rule(equalsOf(FIVE, a)); - assertTrue(result instanceof Equals); - Equals eq = (Equals) result; - assertEquals(a, eq.left()); - assertEquals(FIVE, eq.right()); - - a = new Alias(EMPTY, "a", L(10)); - result = new LiteralsOnTheRight().rule(nullEqualsOf(FIVE, a)); - assertTrue(result instanceof NullEquals); - NullEquals nullEquals = (NullEquals) result; - assertEquals(a, nullEquals.left()); - assertEquals(FIVE, nullEquals.right()); - } - - public void testBoolSimplifyOr() { - BooleanSimplification simplification = new BooleanSimplification(); - - assertEquals(TRUE, simplification.rule(new Or(EMPTY, TRUE, TRUE))); - assertEquals(TRUE, simplification.rule(new Or(EMPTY, TRUE, DUMMY_EXPRESSION))); - assertEquals(TRUE, simplification.rule(new Or(EMPTY, DUMMY_EXPRESSION, TRUE))); - - assertEquals(FALSE, simplification.rule(new Or(EMPTY, FALSE, FALSE))); - assertEquals(DUMMY_EXPRESSION, simplification.rule(new Or(EMPTY, FALSE, DUMMY_EXPRESSION))); - assertEquals(DUMMY_EXPRESSION, simplification.rule(new Or(EMPTY, DUMMY_EXPRESSION, FALSE))); - } - - public void testBoolSimplifyAnd() { - BooleanSimplification simplification = new BooleanSimplification(); - - assertEquals(TRUE, simplification.rule(new And(EMPTY, TRUE, TRUE))); - assertEquals(DUMMY_EXPRESSION, simplification.rule(new And(EMPTY, TRUE, DUMMY_EXPRESSION))); - assertEquals(DUMMY_EXPRESSION, simplification.rule(new And(EMPTY, DUMMY_EXPRESSION, TRUE))); - - assertEquals(FALSE, simplification.rule(new And(EMPTY, FALSE, FALSE))); - assertEquals(FALSE, simplification.rule(new And(EMPTY, FALSE, DUMMY_EXPRESSION))); - assertEquals(FALSE, simplification.rule(new And(EMPTY, DUMMY_EXPRESSION, FALSE))); - } - - public void testBoolCommonFactorExtraction() { - BooleanSimplification simplification = new BooleanSimplification(); - - Expression a1 = new DummyBooleanExpression(EMPTY, 1); - Expression a2 = new DummyBooleanExpression(EMPTY, 1); - Expression b = new DummyBooleanExpression(EMPTY, 2); - Expression c = new DummyBooleanExpression(EMPTY, 3); - - Or actual = new Or(EMPTY, new And(EMPTY, a1, b), new And(EMPTY, a2, c)); - And expected = new And(EMPTY, a1, new Or(EMPTY, b, c)); - - assertEquals(expected, simplification.rule(actual)); - } - - public void testBinaryComparisonSimplification() { - assertEquals(TRUE, new BinaryComparisonSimplification().rule(equalsOf(FIVE, FIVE))); - assertEquals(TRUE, new BinaryComparisonSimplification().rule(nullEqualsOf(FIVE, FIVE))); - assertEquals(TRUE, new BinaryComparisonSimplification().rule(nullEqualsOf(NULL, NULL))); - assertEquals(FALSE, new BinaryComparisonSimplification().rule(notEqualsOf(FIVE, FIVE))); - assertEquals(TRUE, new BinaryComparisonSimplification().rule(greaterThanOrEqualOf(FIVE, FIVE))); - assertEquals(TRUE, new BinaryComparisonSimplification().rule(lessThanOrEqualOf(FIVE, FIVE))); - - assertEquals(FALSE, new BinaryComparisonSimplification().rule(greaterThanOf(FIVE, FIVE))); - assertEquals(FALSE, new BinaryComparisonSimplification().rule(lessThanOf(FIVE, FIVE))); - } - - public void testNullEqualsWithNullLiteralBecomesIsNull() { - LiteralsOnTheRight swapLiteralsToRight = new LiteralsOnTheRight(); - BinaryComparisonSimplification bcSimpl = new BinaryComparisonSimplification(); - FieldAttribute fa = getFieldAttribute(); - Source source = new Source(1, 10, "IS_NULL(a)"); - - Expression e = bcSimpl.rule((BinaryComparison) swapLiteralsToRight.rule(new NullEquals(source, fa, NULL, randomZone()))); - assertEquals(IsNull.class, e.getClass()); - IsNull isNull = (IsNull) e; - assertEquals(source, isNull.source()); - - e = bcSimpl.rule((BinaryComparison) swapLiteralsToRight.rule(new NullEquals(source, NULL, fa, randomZone()))); - assertEquals(IsNull.class, e.getClass()); - isNull = (IsNull) e; - assertEquals(source, isNull.source()); - } - - public void testBoolEqualsSimplificationOnExpressions() { - BooleanFunctionEqualsElimination s = new BooleanFunctionEqualsElimination(); - Expression exp = new GreaterThan(EMPTY, getFieldAttribute(), L(0), null); - - assertEquals(exp, s.rule(new Equals(EMPTY, exp, TRUE))); - assertEquals(new Not(EMPTY, exp), s.rule(new Equals(EMPTY, exp, FALSE))); - } - - public void testBoolEqualsSimplificationOnFields() { - BooleanFunctionEqualsElimination s = new BooleanFunctionEqualsElimination(); - - FieldAttribute field = getFieldAttribute(); - - List comparisons = asList( - new Equals(EMPTY, field, TRUE), - new Equals(EMPTY, field, FALSE), - notEqualsOf(field, TRUE), - notEqualsOf(field, FALSE), - new Equals(EMPTY, NULL, TRUE), - new Equals(EMPTY, NULL, FALSE), - notEqualsOf(NULL, TRUE), - notEqualsOf(NULL, FALSE) - ); - - for (BinaryComparison comparison : comparisons) { - assertEquals(comparison, s.rule(comparison)); - } - } - // // Range optimization // @@ -369,1399 +106,4 @@ public void testFoldExcludingRangeWithDifferentTypesToFalse() { // Conjunction - public void testCombineBinaryComparisonsNotComparable() { - FieldAttribute fa = getFieldAttribute(); - LessThanOrEqual lte = lessThanOrEqualOf(fa, SIX); - LessThan lt = lessThanOf(fa, FALSE); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - And and = new And(EMPTY, lte, lt); - Expression exp = rule.rule(and); - assertEquals(exp, and); - } - - // a <= 6 AND a < 5 -> a < 5 - public void testCombineBinaryComparisonsUpper() { - FieldAttribute fa = getFieldAttribute(); - LessThanOrEqual lte = lessThanOrEqualOf(fa, SIX); - LessThan lt = lessThanOf(fa, FIVE); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - - Expression exp = rule.rule(new And(EMPTY, lte, lt)); - assertEquals(LessThan.class, exp.getClass()); - LessThan r = (LessThan) exp; - assertEquals(FIVE, r.right()); - } - - // 6 <= a AND 5 < a -> 6 <= a - public void testCombineBinaryComparisonsLower() { - FieldAttribute fa = getFieldAttribute(); - GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, SIX); - GreaterThan gt = greaterThanOf(fa, FIVE); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - - Expression exp = rule.rule(new And(EMPTY, gte, gt)); - assertEquals(GreaterThanOrEqual.class, exp.getClass()); - GreaterThanOrEqual r = (GreaterThanOrEqual) exp; - assertEquals(SIX, r.right()); - } - - // 5 <= a AND 5 < a -> 5 < a - public void testCombineBinaryComparisonsInclude() { - FieldAttribute fa = getFieldAttribute(); - GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, FIVE); - GreaterThan gt = greaterThanOf(fa, FIVE); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - - Expression exp = rule.rule(new And(EMPTY, gte, gt)); - assertEquals(GreaterThan.class, exp.getClass()); - GreaterThan r = (GreaterThan) exp; - assertEquals(FIVE, r.right()); - } - - // 2 < a AND (2 <= a < 3) -> 2 < a < 3 - public void testCombineBinaryComparisonsAndRangeLower() { - FieldAttribute fa = getFieldAttribute(); - - GreaterThan gt = greaterThanOf(fa, TWO); - Range range = rangeOf(fa, TWO, true, THREE, false); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(new And(EMPTY, gt, range)); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(TWO, r.lower()); - assertFalse(r.includeLower()); - assertEquals(THREE, r.upper()); - assertFalse(r.includeUpper()); - } - - // a < 4 AND (1 < a < 3) -> 1 < a < 3 - public void testCombineBinaryComparisonsAndRangeUpper() { - FieldAttribute fa = getFieldAttribute(); - - LessThan lt = lessThanOf(fa, FOUR); - Range range = rangeOf(fa, ONE, false, THREE, false); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(new And(EMPTY, range, lt)); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(ONE, r.lower()); - assertFalse(r.includeLower()); - assertEquals(THREE, r.upper()); - assertFalse(r.includeUpper()); - } - - // a <= 2 AND (1 < a < 3) -> 1 < a <= 2 - public void testCombineBinaryComparisonsAndRangeUpperEqual() { - FieldAttribute fa = getFieldAttribute(); - - LessThanOrEqual lte = lessThanOrEqualOf(fa, TWO); - Range range = rangeOf(fa, ONE, false, THREE, false); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(new And(EMPTY, lte, range)); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(ONE, r.lower()); - assertFalse(r.includeLower()); - assertEquals(TWO, r.upper()); - assertTrue(r.includeUpper()); - } - - // 3 <= a AND 4 < a AND a <= 7 AND a < 6 -> 4 < a < 6 - public void testCombineMultipleBinaryComparisons() { - FieldAttribute fa = getFieldAttribute(); - GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, THREE); - GreaterThan gt = greaterThanOf(fa, FOUR); - LessThanOrEqual lte = lessThanOrEqualOf(fa, L(7)); - LessThan lt = lessThanOf(fa, SIX); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - - Expression exp = rule.rule(new And(EMPTY, gte, new And(EMPTY, gt, new And(EMPTY, lt, lte)))); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(FOUR, r.lower()); - assertFalse(r.includeLower()); - assertEquals(SIX, r.upper()); - assertFalse(r.includeUpper()); - } - - // 3 <= a AND TRUE AND 4 < a AND a != 5 AND a <= 7 -> 4 < a <= 7 AND a != 5 AND TRUE - public void testCombineMixedMultipleBinaryComparisons() { - FieldAttribute fa = getFieldAttribute(); - GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, THREE); - GreaterThan gt = greaterThanOf(fa, FOUR); - LessThanOrEqual lte = lessThanOrEqualOf(fa, L(7)); - Expression ne = new Not(EMPTY, equalsOf(fa, FIVE)); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - - // TRUE AND a != 5 AND 4 < a <= 7 - Expression exp = rule.rule(new And(EMPTY, gte, new And(EMPTY, TRUE, new And(EMPTY, gt, new And(EMPTY, ne, lte))))); - assertEquals(And.class, exp.getClass()); - And and = ((And) exp); - assertEquals(Range.class, and.right().getClass()); - Range r = (Range) and.right(); - assertEquals(FOUR, r.lower()); - assertFalse(r.includeLower()); - assertEquals(L(7), r.upper()); - assertTrue(r.includeUpper()); - } - - // 1 <= a AND a < 5 -> 1 <= a < 5 - public void testCombineComparisonsIntoRange() { - FieldAttribute fa = getFieldAttribute(); - GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, ONE); - LessThan lt = lessThanOf(fa, FIVE); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(new And(EMPTY, gte, lt)); - assertEquals(Range.class, exp.getClass()); - - Range r = (Range) exp; - assertEquals(ONE, r.lower()); - assertTrue(r.includeLower()); - assertEquals(FIVE, r.upper()); - assertFalse(r.includeUpper()); - } - - // 1 < a AND a < 3 AND 2 < b AND b < 4 AND c < 4 -> (1 < a < 3) AND (2 < b < 4) AND c < 4 - public void testCombineMultipleComparisonsIntoRange() { - FieldAttribute fa = TestUtils.getFieldAttribute("a"); - FieldAttribute fb = TestUtils.getFieldAttribute("b"); - FieldAttribute fc = TestUtils.getFieldAttribute("c"); - - ZoneId zoneId = randomZone(); - GreaterThan agt1 = new GreaterThan(EMPTY, fa, ONE, zoneId); - LessThan alt3 = new LessThan(EMPTY, fa, THREE, zoneId); - GreaterThan bgt2 = new GreaterThan(EMPTY, fb, TWO, zoneId); - LessThan blt4 = new LessThan(EMPTY, fb, FOUR, zoneId); - LessThan clt4 = new LessThan(EMPTY, fc, FOUR, zoneId); - - Expression inputAnd = Predicates.combineAnd(asList(agt1, alt3, bgt2, blt4, clt4)); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression outputAnd = rule.rule((And) inputAnd); - - Range agt1lt3 = new Range(EMPTY, fa, ONE, false, THREE, false, zoneId); - Range bgt2lt4 = new Range(EMPTY, fb, TWO, false, FOUR, false, zoneId); - - // The actual outcome is (c < 4) AND (1 < a < 3) AND (2 < b < 4), due to the way the Expression types are combined in the Optimizer - Expression expectedAnd = Predicates.combineAnd(asList(clt4, agt1lt3, bgt2lt4)); - - assertTrue(outputAnd.semanticEquals(expectedAnd)); - } - - // (2 < a < 3) AND (1 < a < 4) -> (2 < a < 3) - public void testCombineBinaryComparisonsConjunctionOfIncludedRange() { - FieldAttribute fa = getFieldAttribute(); - - Range r1 = rangeOf(fa, TWO, false, THREE, false); - Range r2 = rangeOf(fa, ONE, false, FOUR, false); - - And and = new And(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(r1, exp); - } - - // (2 < a < 3) AND a < 2 -> 2 < a < 2 - public void testCombineBinaryComparisonsConjunctionOfNonOverlappingBoundaries() { - FieldAttribute fa = getFieldAttribute(); - - Range r1 = rangeOf(fa, TWO, false, THREE, false); - Range r2 = rangeOf(fa, ONE, false, TWO, false); - - And and = new And(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(TWO, r.lower()); - assertFalse(r.includeLower()); - assertEquals(TWO, r.upper()); - assertFalse(r.includeUpper()); - assertEquals(Boolean.FALSE, r.fold()); - } - - // (2 < a < 3) AND (2 < a <= 3) -> 2 < a < 3 - public void testCombineBinaryComparisonsConjunctionOfUpperEqualsOverlappingBoundaries() { - FieldAttribute fa = getFieldAttribute(); - - Range r1 = rangeOf(fa, TWO, false, THREE, false); - Range r2 = rangeOf(fa, TWO, false, THREE, true); - - And and = new And(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(r1, exp); - } - - // (2 < a < 3) AND (1 < a < 3) -> 2 < a < 3 - public void testCombineBinaryComparisonsConjunctionOverlappingUpperBoundary() { - FieldAttribute fa = getFieldAttribute(); - - Range r2 = rangeOf(fa, TWO, false, THREE, false); - Range r1 = rangeOf(fa, ONE, false, THREE, false); - - And and = new And(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(r2, exp); - } - - // (2 < a <= 3) AND (1 < a < 3) -> 2 < a < 3 - public void testCombineBinaryComparisonsConjunctionWithDifferentUpperLimitInclusion() { - FieldAttribute fa = getFieldAttribute(); - - Range r1 = rangeOf(fa, ONE, false, THREE, false); - Range r2 = rangeOf(fa, TWO, false, THREE, true); - - And and = new And(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(TWO, r.lower()); - assertFalse(r.includeLower()); - assertEquals(THREE, r.upper()); - assertFalse(r.includeUpper()); - } - - // (0 < a <= 1) AND (0 <= a < 2) -> 0 < a <= 1 - public void testRangesOverlappingConjunctionNoLowerBoundary() { - FieldAttribute fa = getFieldAttribute(); - - Range r1 = rangeOf(fa, L(0), false, ONE, true); - Range r2 = rangeOf(fa, L(0), true, TWO, false); - - And and = new And(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(r1, exp); - } - - // a != 2 AND 3 < a < 5 -> 3 < a < 5 - public void testCombineBinaryComparisonsConjunction_Neq2AndRangeGt3Lt5() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, TWO); - Range range = rangeOf(fa, THREE, false, FIVE, false); - And and = new And(EMPTY, range, neq); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(THREE, r.lower()); - assertFalse(r.includeLower()); - assertEquals(FIVE, r.upper()); - assertFalse(r.includeUpper()); - } - - // a != 2 AND 0 < a < 1 -> 0 < a < 1 - public void testCombineBinaryComparisonsConjunction_Neq2AndRangeGt0Lt1() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, TWO); - Range range = rangeOf(fa, L(0), false, ONE, false); - And and = new And(EMPTY, neq, range); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(L(0), r.lower()); - assertFalse(r.includeLower()); - assertEquals(ONE, r.upper()); - assertFalse(r.includeUpper()); - } - - // a != 2 AND 2 <= a < 3 -> 2 < a < 3 - public void testCombineBinaryComparisonsConjunction_Neq2AndRangeGte2Lt3() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, TWO); - Range range = rangeOf(fa, TWO, true, THREE, false); - And and = new And(EMPTY, neq, range); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(TWO, r.lower()); - assertFalse(r.includeLower()); - assertEquals(THREE, r.upper()); - assertFalse(r.includeUpper()); - } - - // a != 3 AND 2 < a <= 3 -> 2 < a < 3 - public void testCombineBinaryComparisonsConjunction_Neq3AndRangeGt2Lte3() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, THREE); - Range range = rangeOf(fa, TWO, false, THREE, true); - And and = new And(EMPTY, neq, range); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(TWO, r.lower()); - assertFalse(r.includeLower()); - assertEquals(THREE, r.upper()); - assertFalse(r.includeUpper()); - } - - // a != 2 AND 1 < a < 3 - public void testCombineBinaryComparisonsConjunction_Neq2AndRangeGt1Lt3() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, TWO); - Range range = rangeOf(fa, ONE, false, THREE, false); - And and = new And(EMPTY, neq, range); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(And.class, exp.getClass()); // can't optimize - } - - // a != 2 AND a > 3 -> a > 3 - public void testCombineBinaryComparisonsConjunction_Neq2AndGt3() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, TWO); - GreaterThan gt = greaterThanOf(fa, THREE); - And and = new And(EMPTY, neq, gt); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(gt, exp); - } - - // a != 2 AND a >= 2 -> a > 2 - public void testCombineBinaryComparisonsConjunction_Neq2AndGte2() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, TWO); - GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, TWO); - And and = new And(EMPTY, neq, gte); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(GreaterThan.class, exp.getClass()); - GreaterThan gt = (GreaterThan) exp; - assertEquals(TWO, gt.right()); - } - - // a != 2 AND a >= 1 -> nop - public void testCombineBinaryComparisonsConjunction_Neq2AndGte1() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, TWO); - GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, ONE); - And and = new And(EMPTY, neq, gte); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(And.class, exp.getClass()); // can't optimize - } - - // a != 2 AND a <= 3 -> nop - public void testCombineBinaryComparisonsConjunction_Neq2AndLte3() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, TWO); - LessThanOrEqual lte = lessThanOrEqualOf(fa, THREE); - And and = new And(EMPTY, neq, lte); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(and, exp); // can't optimize - } - - // a != 2 AND a <= 2 -> a < 2 - public void testCombineBinaryComparisonsConjunction_Neq2AndLte2() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, TWO); - LessThanOrEqual lte = lessThanOrEqualOf(fa, TWO); - And and = new And(EMPTY, neq, lte); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(LessThan.class, exp.getClass()); - LessThan lt = (LessThan) exp; - assertEquals(TWO, lt.right()); - } - - // a != 2 AND a <= 1 -> a <= 1 - public void testCombineBinaryComparisonsConjunction_Neq2AndLte1() { - FieldAttribute fa = getFieldAttribute(); - - NotEquals neq = notEqualsOf(fa, TWO); - LessThanOrEqual lte = lessThanOrEqualOf(fa, ONE); - And and = new And(EMPTY, neq, lte); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals(lte, exp); - } - - // Disjunction - - public void testCombineBinaryComparisonsDisjunctionNotComparable() { - FieldAttribute fa = getFieldAttribute(); - - GreaterThan gt1 = greaterThanOf(fa, ONE); - GreaterThan gt2 = greaterThanOf(fa, FALSE); - - Or or = new Or(EMPTY, gt1, gt2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(exp, or); - } - - // 2 < a OR 1 < a OR 3 < a -> 1 < a - public void testCombineBinaryComparisonsDisjunctionLowerBound() { - FieldAttribute fa = getFieldAttribute(); - - GreaterThan gt1 = greaterThanOf(fa, ONE); - GreaterThan gt2 = greaterThanOf(fa, TWO); - GreaterThan gt3 = greaterThanOf(fa, THREE); - - Or or = new Or(EMPTY, gt1, new Or(EMPTY, gt2, gt3)); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(GreaterThan.class, exp.getClass()); - - GreaterThan gt = (GreaterThan) exp; - assertEquals(ONE, gt.right()); - } - - // 2 < a OR 1 < a OR 3 <= a -> 1 < a - public void testCombineBinaryComparisonsDisjunctionIncludeLowerBounds() { - FieldAttribute fa = getFieldAttribute(); - - GreaterThan gt1 = greaterThanOf(fa, ONE); - GreaterThan gt2 = greaterThanOf(fa, TWO); - GreaterThanOrEqual gte3 = greaterThanOrEqualOf(fa, THREE); - - Or or = new Or(EMPTY, new Or(EMPTY, gt1, gt2), gte3); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(GreaterThan.class, exp.getClass()); - - GreaterThan gt = (GreaterThan) exp; - assertEquals(ONE, gt.right()); - } - - // a < 1 OR a < 2 OR a < 3 -> a < 3 - public void testCombineBinaryComparisonsDisjunctionUpperBound() { - FieldAttribute fa = getFieldAttribute(); - - LessThan lt1 = lessThanOf(fa, ONE); - LessThan lt2 = lessThanOf(fa, TWO); - LessThan lt3 = lessThanOf(fa, THREE); - - Or or = new Or(EMPTY, new Or(EMPTY, lt1, lt2), lt3); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(LessThan.class, exp.getClass()); - - LessThan lt = (LessThan) exp; - assertEquals(THREE, lt.right()); - } - - // a < 2 OR a <= 2 OR a < 1 -> a <= 2 - public void testCombineBinaryComparisonsDisjunctionIncludeUpperBounds() { - FieldAttribute fa = getFieldAttribute(); - - LessThan lt1 = lessThanOf(fa, ONE); - LessThan lt2 = lessThanOf(fa, TWO); - LessThanOrEqual lte2 = lessThanOrEqualOf(fa, TWO); - - Or or = new Or(EMPTY, lt2, new Or(EMPTY, lte2, lt1)); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(LessThanOrEqual.class, exp.getClass()); - - LessThanOrEqual lte = (LessThanOrEqual) exp; - assertEquals(TWO, lte.right()); - } - - // a < 2 OR 3 < a OR a < 1 OR 4 < a -> a < 2 OR 3 < a - public void testCombineBinaryComparisonsDisjunctionOfLowerAndUpperBounds() { - FieldAttribute fa = getFieldAttribute(); - - LessThan lt1 = lessThanOf(fa, ONE); - LessThan lt2 = lessThanOf(fa, TWO); - - GreaterThan gt3 = greaterThanOf(fa, THREE); - GreaterThan gt4 = greaterThanOf(fa, FOUR); - - Or or = new Or(EMPTY, new Or(EMPTY, lt2, gt3), new Or(EMPTY, lt1, gt4)); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(Or.class, exp.getClass()); - - Or ro = (Or) exp; - - assertEquals(LessThan.class, ro.left().getClass()); - LessThan lt = (LessThan) ro.left(); - assertEquals(TWO, lt.right()); - assertEquals(GreaterThan.class, ro.right().getClass()); - GreaterThan gt = (GreaterThan) ro.right(); - assertEquals(THREE, gt.right()); - } - - // (2 < a < 3) OR (1 < a < 4) -> (1 < a < 4) - public void testCombineBinaryComparisonsDisjunctionOfIncludedRangeNotComparable() { - FieldAttribute fa = getFieldAttribute(); - - Range r1 = rangeOf(fa, TWO, false, THREE, false); - Range r2 = rangeOf(fa, ONE, false, FALSE, false); - - Or or = new Or(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(or, exp); - } - - // (2 < a < 3) OR (1 < a < 4) -> (1 < a < 4) - public void testCombineBinaryComparisonsDisjunctionOfIncludedRange() { - FieldAttribute fa = getFieldAttribute(); - - Range r1 = rangeOf(fa, TWO, false, THREE, false); - Range r2 = rangeOf(fa, ONE, false, FOUR, false); - - Or or = new Or(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(Range.class, exp.getClass()); - - Range r = (Range) exp; - assertEquals(ONE, r.lower()); - assertFalse(r.includeLower()); - assertEquals(FOUR, r.upper()); - assertFalse(r.includeUpper()); - } - - // (2 < a < 3) OR (1 < a < 2) -> same - public void testCombineBinaryComparisonsDisjunctionOfNonOverlappingBoundaries() { - FieldAttribute fa = getFieldAttribute(); - - Range r1 = rangeOf(fa, TWO, false, THREE, false); - Range r2 = rangeOf(fa, ONE, false, TWO, false); - - Or or = new Or(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(or, exp); - } - - // (2 < a < 3) OR (2 < a <= 3) -> 2 < a <= 3 - public void testCombineBinaryComparisonsDisjunctionOfUpperEqualsOverlappingBoundaries() { - FieldAttribute fa = getFieldAttribute(); - - Range r1 = rangeOf(fa, TWO, false, THREE, false); - Range r2 = rangeOf(fa, TWO, false, THREE, true); - - Or or = new Or(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(r2, exp); - } - - // (2 < a < 3) OR (1 < a < 3) -> 1 < a < 3 - public void testCombineBinaryComparisonsOverlappingUpperBoundary() { - FieldAttribute fa = getFieldAttribute(); - - Range r2 = rangeOf(fa, TWO, false, THREE, false); - Range r1 = rangeOf(fa, ONE, false, THREE, false); - - Or or = new Or(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(r1, exp); - } - - // (2 < a <= 3) OR (1 < a < 3) -> same (the <= prevents the ranges from being combined) - public void testCombineBinaryComparisonsWithDifferentUpperLimitInclusion() { - FieldAttribute fa = getFieldAttribute(); - - Range r1 = rangeOf(fa, ONE, false, THREE, false); - Range r2 = rangeOf(fa, TWO, false, THREE, true); - - Or or = new Or(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(or, exp); - } - - // (a = 1 AND b = 3 AND c = 4) OR (a = 2 AND b = 3 AND c = 4) -> (b = 3 AND c = 4) AND (a = 1 OR a = 2) - public void testBooleanSimplificationCommonExpressionSubstraction() { - FieldAttribute fa = TestUtils.getFieldAttribute("a"); - FieldAttribute fb = TestUtils.getFieldAttribute("b"); - FieldAttribute fc = TestUtils.getFieldAttribute("c"); - - Expression a1 = equalsOf(fa, ONE); - Expression a2 = equalsOf(fa, TWO); - And common = new And(EMPTY, equalsOf(fb, THREE), equalsOf(fc, FOUR)); - And left = new And(EMPTY, a1, common); - And right = new And(EMPTY, a2, common); - Or or = new Or(EMPTY, left, right); - - Expression exp = new BooleanSimplification().rule(or); - assertEquals(new And(EMPTY, common, new Or(EMPTY, a1, a2)), exp); - } - - // (0 < a <= 1) OR (0 < a < 2) -> 0 < a < 2 - public void testRangesOverlappingNoLowerBoundary() { - FieldAttribute fa = getFieldAttribute(); - - Range r2 = rangeOf(fa, L(0), false, TWO, false); - Range r1 = rangeOf(fa, L(0), false, ONE, true); - - Or or = new Or(EMPTY, r1, r2); - - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(or); - assertEquals(r2, exp); - } - - public void testBinaryComparisonAndOutOfRangeNotEqualsDifferentFields() { - FieldAttribute doubleOne = fieldAttribute("double", DOUBLE); - FieldAttribute doubleTwo = fieldAttribute("double2", DOUBLE); - FieldAttribute intOne = fieldAttribute("int", INTEGER); - FieldAttribute datetimeOne = fieldAttribute("datetime", INTEGER); - FieldAttribute keywordOne = fieldAttribute("keyword", KEYWORD); - FieldAttribute keywordTwo = fieldAttribute("keyword2", KEYWORD); - - List testCases = asList( - // double > 10 AND integer != -10 - new And(EMPTY, greaterThanOf(doubleOne, L(10)), notEqualsOf(intOne, L(-10))), - // keyword > '5' AND keyword2 != '48' - new And(EMPTY, greaterThanOf(keywordOne, L("5")), notEqualsOf(keywordTwo, L("48"))), - // keyword != '2021' AND datetime <= '2020-12-04T17:48:22.954240Z' - new And(EMPTY, notEqualsOf(keywordOne, L("2021")), lessThanOrEqualOf(datetimeOne, L("2020-12-04T17:48:22.954240Z"))), - // double > 10.1 AND double2 != -10.1 - new And(EMPTY, greaterThanOf(doubleOne, L(10.1d)), notEqualsOf(doubleTwo, L(-10.1d))) - ); - - for (And and : testCases) { - CombineBinaryComparisons rule = new CombineBinaryComparisons(); - Expression exp = rule.rule(and); - assertEquals("Rule should not have transformed [" + and.nodeString() + "]", and, exp); - } - } - - // Equals & NullEquals - - // 1 <= a < 10 AND a == 1 -> a == 1 - public void testEliminateRangeByEqualsInInterval() { - FieldAttribute fa = getFieldAttribute(); - Equals eq1 = equalsOf(fa, ONE); - Range r = rangeOf(fa, ONE, true, L(10), false); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq1, r)); - assertEquals(eq1, exp); - } - - // 1 <= a < 10 AND a <=> 1 -> a <=> 1 - public void testEliminateRangeByNullEqualsInInterval() { - FieldAttribute fa = getFieldAttribute(); - NullEquals eq1 = nullEqualsOf(fa, ONE); - Range r = rangeOf(fa, ONE, true, L(10), false); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq1, r)); - assertEquals(eq1, exp); - } - - // The following tests should work only to simplify filters and - // not if the expressions are part of a projection - // See: https://github.com/elastic/elasticsearch/issues/35859 - - // a == 1 AND a == 2 -> FALSE - public void testDualEqualsConjunction() { - FieldAttribute fa = getFieldAttribute(); - Equals eq1 = equalsOf(fa, ONE); - Equals eq2 = equalsOf(fa, TWO); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq1, eq2)); - assertEquals(FALSE, exp); - } - - // a <=> 1 AND a <=> 2 -> FALSE - public void testDualNullEqualsConjunction() { - FieldAttribute fa = getFieldAttribute(); - NullEquals eq1 = nullEqualsOf(fa, ONE); - NullEquals eq2 = nullEqualsOf(fa, TWO); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq1, eq2)); - assertEquals(FALSE, exp); - } - - // 1 < a < 10 AND a == 10 -> FALSE - public void testEliminateRangeByEqualsOutsideInterval() { - FieldAttribute fa = getFieldAttribute(); - Equals eq1 = equalsOf(fa, L(10)); - Range r = rangeOf(fa, ONE, false, L(10), false); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq1, r)); - assertEquals(FALSE, exp); - } - - // 1 < a < 10 AND a <=> 10 -> FALSE - public void testEliminateRangeByNullEqualsOutsideInterval() { - FieldAttribute fa = getFieldAttribute(); - NullEquals eq1 = nullEqualsOf(fa, L(10)); - Range r = rangeOf(fa, ONE, false, L(10), false); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq1, r)); - assertEquals(FALSE, exp); - } - - // a != 3 AND a = 3 -> FALSE - public void testPropagateEquals_VarNeq3AndVarEq3() { - FieldAttribute fa = getFieldAttribute(); - NotEquals neq = notEqualsOf(fa, THREE); - Equals eq = equalsOf(fa, THREE); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, neq, eq)); - assertEquals(FALSE, exp); - } - - // a != 4 AND a = 3 -> a = 3 - public void testPropagateEquals_VarNeq4AndVarEq3() { - FieldAttribute fa = getFieldAttribute(); - NotEquals neq = notEqualsOf(fa, FOUR); - Equals eq = equalsOf(fa, THREE); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, neq, eq)); - assertEquals(Equals.class, exp.getClass()); - assertEquals(eq, exp); - } - - // a = 2 AND a < 2 -> FALSE - public void testPropagateEquals_VarEq2AndVarLt2() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - LessThan lt = lessThanOf(fa, TWO); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, lt)); - assertEquals(FALSE, exp); - } - - // a = 2 AND a <= 2 -> a = 2 - public void testPropagateEquals_VarEq2AndVarLte2() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - LessThanOrEqual lt = lessThanOrEqualOf(fa, TWO); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, lt)); - assertEquals(eq, exp); - } - - // a = 2 AND a <= 1 -> FALSE - public void testPropagateEquals_VarEq2AndVarLte1() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - LessThanOrEqual lt = lessThanOrEqualOf(fa, ONE); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, lt)); - assertEquals(FALSE, exp); - } - - // a = 2 AND a > 2 -> FALSE - public void testPropagateEquals_VarEq2AndVarGt2() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - GreaterThan gt = greaterThanOf(fa, TWO); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, gt)); - assertEquals(FALSE, exp); - } - - // a = 2 AND a >= 2 -> a = 2 - public void testPropagateEquals_VarEq2AndVarGte2() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, TWO); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, gte)); - assertEquals(eq, exp); - } - - // a = 2 AND a > 3 -> FALSE - public void testPropagateEquals_VarEq2AndVarLt3() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - GreaterThan gt = greaterThanOf(fa, THREE); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new And(EMPTY, eq, gt)); - assertEquals(FALSE, exp); - } - - // a = 2 AND a < 3 AND a > 1 AND a != 4 -> a = 2 - public void testPropagateEquals_VarEq2AndVarLt3AndVarGt1AndVarNeq4() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - LessThan lt = lessThanOf(fa, THREE); - GreaterThan gt = greaterThanOf(fa, ONE); - NotEquals neq = notEqualsOf(fa, FOUR); - - PropagateEquals rule = new PropagateEquals(); - Expression and = Predicates.combineAnd(asList(eq, lt, gt, neq)); - Expression exp = rule.rule((And) and); - assertEquals(eq, exp); - } - - // a = 2 AND 1 < a < 3 AND a > 0 AND a != 4 -> a = 2 - public void testPropagateEquals_VarEq2AndVarRangeGt1Lt3AndVarGt0AndVarNeq4() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - Range range = rangeOf(fa, ONE, false, THREE, false); - GreaterThan gt = greaterThanOf(fa, L(0)); - NotEquals neq = notEqualsOf(fa, FOUR); - - PropagateEquals rule = new PropagateEquals(); - Expression and = Predicates.combineAnd(asList(eq, range, gt, neq)); - Expression exp = rule.rule((And) and); - assertEquals(eq, exp); - } - - // a = 2 OR a > 1 -> a > 1 - public void testPropagateEquals_VarEq2OrVarGt1() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - GreaterThan gt = greaterThanOf(fa, ONE); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, gt)); - assertEquals(gt, exp); - } - - // a = 2 OR a > 2 -> a >= 2 - public void testPropagateEquals_VarEq2OrVarGte2() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - GreaterThan gt = greaterThanOf(fa, TWO); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, gt)); - assertEquals(GreaterThanOrEqual.class, exp.getClass()); - GreaterThanOrEqual gte = (GreaterThanOrEqual) exp; - assertEquals(TWO, gte.right()); - } - - // a = 2 OR a < 3 -> a < 3 - public void testPropagateEquals_VarEq2OrVarLt3() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - LessThan lt = lessThanOf(fa, THREE); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, lt)); - assertEquals(lt, exp); - } - - // a = 3 OR a < 3 -> a <= 3 - public void testPropagateEquals_VarEq3OrVarLt3() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, THREE); - LessThan lt = lessThanOf(fa, THREE); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, lt)); - assertEquals(LessThanOrEqual.class, exp.getClass()); - LessThanOrEqual lte = (LessThanOrEqual) exp; - assertEquals(THREE, lte.right()); - } - - // a = 2 OR 1 < a < 3 -> 1 < a < 3 - public void testPropagateEquals_VarEq2OrVarRangeGt1Lt3() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - Range range = rangeOf(fa, ONE, false, THREE, false); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, range)); - assertEquals(range, exp); - } - - // a = 2 OR 2 < a < 3 -> 2 <= a < 3 - public void testPropagateEquals_VarEq2OrVarRangeGt2Lt3() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - Range range = rangeOf(fa, TWO, false, THREE, false); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, range)); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(TWO, r.lower()); - assertTrue(r.includeLower()); - assertEquals(THREE, r.upper()); - assertFalse(r.includeUpper()); - } - - // a = 3 OR 2 < a < 3 -> 2 < a <= 3 - public void testPropagateEquals_VarEq3OrVarRangeGt2Lt3() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, THREE); - Range range = rangeOf(fa, TWO, false, THREE, false); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, range)); - assertEquals(Range.class, exp.getClass()); - Range r = (Range) exp; - assertEquals(TWO, r.lower()); - assertFalse(r.includeLower()); - assertEquals(THREE, r.upper()); - assertTrue(r.includeUpper()); - } - - // a = 2 OR a != 2 -> TRUE - public void testPropagateEquals_VarEq2OrVarNeq2() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - NotEquals neq = notEqualsOf(fa, TWO); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, neq)); - assertEquals(TRUE, exp); - } - - // a = 2 OR a != 5 -> a != 5 - public void testPropagateEquals_VarEq2OrVarNeq5() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - NotEquals neq = notEqualsOf(fa, FIVE); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(new Or(EMPTY, eq, neq)); - assertEquals(NotEquals.class, exp.getClass()); - NotEquals ne = (NotEquals) exp; - assertEquals(FIVE, ne.right()); - } - - // a = 2 OR 3 < a < 4 OR a > 2 OR a!= 2 -> TRUE - public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() { - FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); - Range range = rangeOf(fa, THREE, false, FOUR, false); - GreaterThan gt = greaterThanOf(fa, TWO); - NotEquals neq = notEqualsOf(fa, TWO); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule((Or) Predicates.combineOr(asList(eq, range, neq, gt))); - assertEquals(TRUE, exp); - } - - // a == 1 AND a == 2 -> nop for date/time fields - public void testPropagateEquals_ignoreDateTimeFields() { - FieldAttribute fa = TestUtils.getFieldAttribute("a", DataTypes.DATETIME); - Equals eq1 = equalsOf(fa, ONE); - Equals eq2 = equalsOf(fa, TWO); - And and = new And(EMPTY, eq1, eq2); - - PropagateEquals rule = new PropagateEquals(); - Expression exp = rule.rule(and); - assertEquals(and, exp); - } - - // - // Like / Regex - // - public void testMatchAllLikeToExist() throws Exception { - for (String s : asList("%", "%%", "%%%")) { - LikePattern pattern = new LikePattern(s, (char) 0); - FieldAttribute fa = getFieldAttribute(); - Like l = new Like(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); - assertEquals(IsNotNull.class, e.getClass()); - IsNotNull inn = (IsNotNull) e; - assertEquals(fa, inn.field()); - } - } - - public void testMatchAllWildcardLikeToExist() throws Exception { - for (String s : asList("*", "**", "***")) { - WildcardPattern pattern = new WildcardPattern(s); - FieldAttribute fa = getFieldAttribute(); - WildcardLike l = new WildcardLike(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); - assertEquals(IsNotNull.class, e.getClass()); - IsNotNull inn = (IsNotNull) e; - assertEquals(fa, inn.field()); - } - } - - public void testMatchAllRLikeToExist() throws Exception { - RLikePattern pattern = new RLikePattern(".*"); - FieldAttribute fa = getFieldAttribute(); - RLike l = new RLike(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); - assertEquals(IsNotNull.class, e.getClass()); - IsNotNull inn = (IsNotNull) e; - assertEquals(fa, inn.field()); - } - - public void testExactMatchLike() throws Exception { - for (String s : asList("ab", "ab0%", "ab0_c")) { - LikePattern pattern = new LikePattern(s, '0'); - FieldAttribute fa = getFieldAttribute(); - Like l = new Like(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); - assertEquals(Equals.class, e.getClass()); - Equals eq = (Equals) e; - assertEquals(fa, eq.left()); - assertEquals(s.replace("0", StringUtils.EMPTY), eq.right().fold()); - } - } - - public void testExactMatchWildcardLike() throws Exception { - String s = "ab"; - WildcardPattern pattern = new WildcardPattern(s); - FieldAttribute fa = getFieldAttribute(); - WildcardLike l = new WildcardLike(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); - assertEquals(Equals.class, e.getClass()); - Equals eq = (Equals) e; - assertEquals(fa, eq.left()); - assertEquals(s, eq.right().fold()); - } - - public void testExactMatchRLike() throws Exception { - RLikePattern pattern = new RLikePattern("abc"); - FieldAttribute fa = getFieldAttribute(); - RLike l = new RLike(EMPTY, fa, pattern); - Expression e = new ReplaceRegexMatch().rule(l); - assertEquals(Equals.class, e.getClass()); - Equals eq = (Equals) e; - assertEquals(fa, eq.left()); - assertEquals("abc", eq.right().fold()); - } - - // - // CombineDisjunction in Equals - // - public void testTwoEqualsWithOr() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(In.class, e.getClass()); - In in = (In) e; - assertEquals(fa, in.value()); - assertThat(in.list(), contains(ONE, TWO)); - } - - public void testTwoEqualsWithSameValue() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, ONE)); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(Equals.class, e.getClass()); - Equals eq = (Equals) e; - assertEquals(fa, eq.left()); - assertEquals(ONE, eq.right()); - } - - public void testOneEqualsOneIn() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, singletonList(TWO))); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(In.class, e.getClass()); - In in = (In) e; - assertEquals(fa, in.value()); - assertThat(in.list(), contains(ONE, TWO)); - } - - public void testOneEqualsOneInWithSameValue() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, asList(ONE, TWO))); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(In.class, e.getClass()); - In in = (In) e; - assertEquals(fa, in.value()); - assertThat(in.list(), contains(ONE, TWO)); - } - - public void testSingleValueInToEquals() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - Equals equals = equalsOf(fa, ONE); - Or or = new Or(EMPTY, equals, new In(EMPTY, fa, singletonList(ONE))); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(equals, e); - } - - public void testEqualsBehindAnd() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - And and = new And(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); - Filter dummy = new Filter(EMPTY, relation(), and); - LogicalPlan transformed = new CombineDisjunctionsToIn().apply(dummy); - assertSame(dummy, transformed); - assertEquals(and, ((Filter) transformed).condition()); - } - - public void testTwoEqualsDifferentFields() throws Exception { - FieldAttribute fieldOne = TestUtils.getFieldAttribute("ONE"); - FieldAttribute fieldTwo = TestUtils.getFieldAttribute("TWO"); - - Or or = new Or(EMPTY, equalsOf(fieldOne, ONE), equalsOf(fieldTwo, TWO)); - Expression e = new CombineDisjunctionsToIn().rule(or); - assertEquals(or, e); - } - - public void testMultipleIn() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - Or firstOr = new Or(EMPTY, new In(EMPTY, fa, singletonList(ONE)), new In(EMPTY, fa, singletonList(TWO))); - Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, singletonList(THREE))); - Expression e = new CombineDisjunctionsToIn().rule(secondOr); - assertEquals(In.class, e.getClass()); - In in = (In) e; - assertEquals(fa, in.value()); - assertThat(in.list(), contains(ONE, TWO, THREE)); - } - - public void testOrWithNonCombinableExpressions() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - Or firstOr = new Or(EMPTY, new In(EMPTY, fa, singletonList(ONE)), lessThanOf(fa, TWO)); - Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, singletonList(THREE))); - Expression e = new CombineDisjunctionsToIn().rule(secondOr); - assertEquals(Or.class, e.getClass()); - Or or = (Or) e; - assertEquals(or.left(), firstOr.right()); - assertEquals(In.class, or.right().getClass()); - In in = (In) or.right(); - assertEquals(fa, in.value()); - assertThat(in.list(), contains(ONE, THREE)); - } - - // Null folding - - public void testNullFoldingIsNull() { - FoldNull foldNull = new FoldNull(); - assertEquals(true, foldNull.rule(new IsNull(EMPTY, NULL)).fold()); - assertEquals(false, foldNull.rule(new IsNull(EMPTY, TRUE)).fold()); - } - - public void testGenericNullableExpression() { - FoldNull rule = new FoldNull(); - // arithmetic - assertNullLiteral(rule.rule(new Add(EMPTY, getFieldAttribute(), NULL))); - // comparison - assertNullLiteral(rule.rule(greaterThanOf(getFieldAttribute(), NULL))); - // regex - assertNullLiteral(rule.rule(new RLike(EMPTY, NULL, new RLikePattern("123")))); - } - - public void testNullFoldingDoesNotApplyOnLogicalExpressions() { - FoldNull rule = new FoldNull(); - - Or or = new Or(EMPTY, NULL, TRUE); - assertEquals(or, rule.rule(or)); - or = new Or(EMPTY, NULL, NULL); - assertEquals(or, rule.rule(or)); - - And and = new And(EMPTY, NULL, TRUE); - assertEquals(and, rule.rule(and)); - and = new And(EMPTY, NULL, NULL); - assertEquals(and, rule.rule(and)); - } - - // - // Propagate nullability (IS NULL / IS NOT NULL) - // - - // a IS NULL AND a IS NOT NULL => false - public void testIsNullAndNotNull() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - And and = new And(EMPTY, new IsNull(EMPTY, fa), new IsNotNull(EMPTY, fa)); - assertEquals(FALSE, new PropagateNullable().rule(and)); - } - - // a IS NULL AND b IS NOT NULL AND c IS NULL AND d IS NOT NULL AND e IS NULL AND a IS NOT NULL => false - public void testIsNullAndNotNullMultiField() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - And andOne = new And(EMPTY, new IsNull(EMPTY, fa), new IsNotNull(EMPTY, getFieldAttribute())); - And andTwo = new And(EMPTY, new IsNull(EMPTY, getFieldAttribute()), new IsNotNull(EMPTY, getFieldAttribute())); - And andThree = new And(EMPTY, new IsNull(EMPTY, getFieldAttribute()), new IsNotNull(EMPTY, fa)); - - And and = new And(EMPTY, andOne, new And(EMPTY, andThree, andTwo)); - - assertEquals(FALSE, new PropagateNullable().rule(and)); - } - - // a IS NULL AND a > 1 => a IS NULL AND false - public void testIsNullAndComparison() throws Exception { - FieldAttribute fa = getFieldAttribute(); - IsNull isNull = new IsNull(EMPTY, fa); - - And and = new And(EMPTY, isNull, greaterThanOf(fa, ONE)); - assertEquals(new And(EMPTY, isNull, nullOf(BOOLEAN)), new PropagateNullable().rule(and)); - } - - // a IS NULL AND b < 1 AND c < 1 AND a < 1 => a IS NULL AND b < 1 AND c < 1 => a IS NULL AND b < 1 AND c < 1 - public void testIsNullAndMultipleComparison() throws Exception { - FieldAttribute fa = getFieldAttribute(); - IsNull isNull = new IsNull(EMPTY, fa); - - And nestedAnd = new And( - EMPTY, - lessThanOf(TestUtils.getFieldAttribute("b"), ONE), - lessThanOf(TestUtils.getFieldAttribute("c"), ONE) - ); - And and = new And(EMPTY, isNull, nestedAnd); - And top = new And(EMPTY, and, lessThanOf(fa, ONE)); - - Expression optimized = new PropagateNullable().rule(top); - Expression expected = new And(EMPTY, and, nullOf(BOOLEAN)); - assertEquals(Predicates.splitAnd(expected), Predicates.splitAnd(optimized)); - } - - // ((a+1)/2) > 1 AND a + 2 AND a IS NULL AND b < 3 => NULL AND NULL AND a IS NULL AND b < 3 - public void testIsNullAndDeeplyNestedExpression() throws Exception { - FieldAttribute fa = getFieldAttribute(); - IsNull isNull = new IsNull(EMPTY, fa); - - Expression nullified = new And( - EMPTY, - greaterThanOf(new Div(EMPTY, new Add(EMPTY, fa, ONE), TWO), ONE), - greaterThanOf(new Add(EMPTY, fa, TWO), ONE) - ); - Expression kept = new And(EMPTY, isNull, lessThanOf(TestUtils.getFieldAttribute("b"), THREE)); - And and = new And(EMPTY, nullified, kept); - - Expression optimized = new PropagateNullable().rule(and); - Expression expected = new And(EMPTY, new And(EMPTY, nullOf(BOOLEAN), nullOf(BOOLEAN)), kept); - - assertEquals(Predicates.splitAnd(expected), Predicates.splitAnd(optimized)); - } - - // a IS NULL OR a IS NOT NULL => no change - // a IS NULL OR a > 1 => no change - public void testIsNullInDisjunction() throws Exception { - FieldAttribute fa = getFieldAttribute(); - - Or or = new Or(EMPTY, new IsNull(EMPTY, fa), new IsNotNull(EMPTY, fa)); - Filter dummy = new Filter(EMPTY, relation(), or); - LogicalPlan transformed = new PropagateNullable().apply(dummy); - assertSame(dummy, transformed); - assertEquals(or, ((Filter) transformed).condition()); - - or = new Or(EMPTY, new IsNull(EMPTY, fa), greaterThanOf(fa, ONE)); - dummy = new Filter(EMPTY, relation(), or); - transformed = new PropagateNullable().apply(dummy); - assertSame(dummy, transformed); - assertEquals(or, ((Filter) transformed).condition()); - } - - // a + 1 AND (a IS NULL OR a > 3) => no change - public void testIsNullDisjunction() throws Exception { - FieldAttribute fa = getFieldAttribute(); - IsNull isNull = new IsNull(EMPTY, fa); - - Or or = new Or(EMPTY, isNull, greaterThanOf(fa, THREE)); - And and = new And(EMPTY, new Add(EMPTY, fa, ONE), or); - - assertEquals(and, new PropagateNullable().rule(and)); - } - - public void testIsNotNullOnIsNullField() { - EsRelation relation = relation(); - var fieldA = TestUtils.getFieldAttribute("a"); - Expression inn = isNotNull(fieldA); - Filter f = new Filter(EMPTY, relation, inn); - - assertEquals(f, new OptimizerRules.InferIsNotNull().apply(f)); - } - - public void testIsNotNullOnOperatorWithOneField() { - EsRelation relation = relation(); - var fieldA = TestUtils.getFieldAttribute("a"); - Expression inn = isNotNull(new Add(EMPTY, fieldA, ONE)); - Filter f = new Filter(EMPTY, relation, inn); - Filter expected = new Filter(EMPTY, relation, new And(EMPTY, isNotNull(fieldA), inn)); - - assertEquals(expected, new OptimizerRules.InferIsNotNull().apply(f)); - } - - public void testIsNotNullOnOperatorWithTwoFields() { - EsRelation relation = relation(); - var fieldA = TestUtils.getFieldAttribute("a"); - var fieldB = TestUtils.getFieldAttribute("b"); - Expression inn = isNotNull(new Add(EMPTY, fieldA, fieldB)); - Filter f = new Filter(EMPTY, relation, inn); - Filter expected = new Filter(EMPTY, relation, new And(EMPTY, new And(EMPTY, isNotNull(fieldA), isNotNull(fieldB)), inn)); - - assertEquals(expected, new OptimizerRules.InferIsNotNull().apply(f)); - } - - public void testIsNotNullOnFunctionWithTwoField() {} - - private IsNotNull isNotNull(Expression field) { - return new IsNotNull(EMPTY, field); - } - - private IsNull isNull(Expression field) { - return new IsNull(EMPTY, field); - } - - private Literal nullOf(DataType dataType) { - return new Literal(Source.EMPTY, null, dataType); - } - - private void assertNullLiteral(Expression expression) { - assertEquals(Literal.class, expression.getClass()); - assertNull(expression.fold()); - } } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQueryTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQueryTests.java index 47fa87bf188af..47c471af1051c 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQueryTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/MatchQueryTests.java @@ -23,7 +23,7 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.hamcrest.Matchers.equalTo; public class MatchQueryTests extends ESTestCase { diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/DataTypeConversionTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/DataTypeConversionTests.java index 80754857d7f24..dee41e089de13 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/DataTypeConversionTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/DataTypeConversionTests.java @@ -17,23 +17,23 @@ import java.math.BigInteger; import java.time.ZonedDateTime; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.BYTE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.SHORT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.commonType; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.converterFor; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BYTE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.FLOAT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NULL; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.SHORT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSUPPORTED; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; import static org.elasticsearch.xpack.esql.core.type.DateUtils.asDateTime; public class DataTypeConversionTests extends ESTestCase { @@ -539,8 +539,8 @@ public void testCommonType() { } public void testEsDataTypes() { - for (DataType type : DataTypes.types()) { - assertEquals(type, DataTypes.fromTypeName(type.typeName())); + for (DataType type : DataType.types()) { + assertEquals(type, DataType.fromTypeName(type.typeName())); } } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/EsFieldTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/EsFieldTests.java index 75921778d5970..e72ae0c5c0cda 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/EsFieldTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/EsFieldTests.java @@ -12,7 +12,7 @@ public class EsFieldTests extends AbstractEsFieldTypeTests { static EsField randomEsField(int maxPropertiesDepth) { String name = randomAlphaOfLength(4); - DataType esDataType = randomFrom(DataTypes.types()); + DataType esDataType = randomFrom(DataType.types()); Map properties = randomProperties(maxPropertiesDepth); boolean aggregatable = randomBoolean(); boolean isAlias = randomBoolean(); @@ -33,7 +33,7 @@ protected EsField mutate(EsField instance) { boolean isAlias = instance.isAlias(); switch (between(0, 4)) { case 0 -> name = randomAlphaOfLength(name.length() + 1); - case 1 -> esDataType = randomValueOtherThan(esDataType, () -> randomFrom(DataTypes.types())); + case 1 -> esDataType = randomValueOtherThan(esDataType, () -> randomFrom(DataType.types())); case 2 -> properties = randomValueOtherThan(properties, () -> randomProperties(4)); case 3 -> aggregatable = false == aggregatable; case 4 -> isAlias = false == isAlias; diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/TypesTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/TypesTests.java index b3be18156ab8b..489666976b592 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/TypesTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/type/TypesTests.java @@ -15,12 +15,12 @@ import java.util.Map; import static java.util.Collections.emptyMap; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NESTED; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.OBJECT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.NESTED; +import static org.elasticsearch.xpack.esql.core.type.DataType.OBJECT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -128,7 +128,7 @@ public void testDottedField() { assertThat(mapping.size(), is(2)); EsField field = mapping.get("manager"); - assertThat(DataTypes.isPrimitive(field.getDataType()), is(false)); + assertThat(DataType.isPrimitive(field.getDataType()), is(false)); assertThat(field.getDataType(), is(OBJECT)); Map children = field.getProperties(); assertThat(children.size(), is(2)); @@ -143,7 +143,7 @@ public void testMultiField() { assertThat(mapping.size(), is(1)); EsField field = mapping.get("text"); - assertThat(DataTypes.isPrimitive(field.getDataType()), is(true)); + assertThat(DataType.isPrimitive(field.getDataType()), is(true)); assertThat(field.getDataType(), is(TEXT)); Map fields = field.getProperties(); assertThat(fields.size(), is(4)); @@ -157,7 +157,7 @@ public void testMultiFieldTooManyOptions() { assertThat(mapping.size(), is(1)); EsField field = mapping.get("text"); - assertThat(DataTypes.isPrimitive(field.getDataType()), is(true)); + assertThat(DataType.isPrimitive(field.getDataType()), is(true)); assertThat(field, instanceOf(TextEsField.class)); Map fields = field.getProperties(); assertThat(fields.size(), is(4)); @@ -171,7 +171,7 @@ public void testNestedDoc() { assertThat(mapping.size(), is(1)); EsField field = mapping.get("dep"); - assertThat(DataTypes.isPrimitive(field.getDataType()), is(false)); + assertThat(DataType.isPrimitive(field.getDataType()), is(false)); assertThat(field.getDataType(), is(NESTED)); Map children = field.getProperties(); assertThat(children.size(), is(4)); diff --git a/x-pack/plugin/esql-core/src/test/resources/mapping-multi-field-variation.json b/x-pack/plugin/esql-core/src/test/resources/mapping-multi-field-variation.json index b5b3d42816502..5369e50dd6bb9 100644 --- a/x-pack/plugin/esql-core/src/test/resources/mapping-multi-field-variation.json +++ b/x-pack/plugin/esql-core/src/test/resources/mapping-multi-field-variation.json @@ -8,6 +8,8 @@ "keyword" : { "type" : "keyword" }, "date" : { "type" : "date" }, "date_nanos": { "type" : "date_nanos" }, + "long" : { "type" : "long" }, + "ip" : { "type" : "ip" }, "unsupported" : { "type" : "ip_range" }, "some" : { "properties" : { diff --git a/x-pack/plugin/esql-core/test-fixtures/src/main/java/org/elasticsearch/xpack/esql/core/TestUtils.java b/x-pack/plugin/esql-core/test-fixtures/src/main/java/org/elasticsearch/xpack/esql/core/TestUtils.java index 27da0f56deb20..35d73f87f2ceb 100644 --- a/x-pack/plugin/esql-core/test-fixtures/src/main/java/org/elasticsearch/xpack/esql/core/TestUtils.java +++ b/x-pack/plugin/esql-core/test-fixtures/src/main/java/org/elasticsearch/xpack/esql/core/TestUtils.java @@ -41,7 +41,6 @@ import org.elasticsearch.xpack.esql.core.session.Configuration; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.hamcrest.Description; @@ -82,7 +81,7 @@ import static org.elasticsearch.test.ESTestCase.randomZone; import static org.elasticsearch.xpack.esql.core.TestUtils.StringContainsRegex.containsRegex; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; @@ -115,7 +114,7 @@ public static Literal of(Source source, Object value) { if (value instanceof Literal) { return (Literal) value; } - return new Literal(source, value, DataTypes.fromJava(value)); + return new Literal(source, value, DataType.fromJava(value)); } public static Equals equalsOf(Expression left, Expression right) { @@ -159,7 +158,7 @@ public static RLike rlike(Expression left, String exp) { } public static FieldAttribute fieldAttribute() { - return fieldAttribute(randomAlphaOfLength(10), randomFrom(DataTypes.types())); + return fieldAttribute(randomAlphaOfLength(10), randomFrom(DataType.types())); } public static FieldAttribute fieldAttribute(String name, DataType type) { diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 669f38bd44ecc..92071543aa27e 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -4,6 +4,8 @@ import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.string-templates' +apply plugin: 'elasticsearch.publish' + esplugin { name 'x-pack-esql' description 'The plugin that powers ESQL for Elasticsearch' @@ -47,24 +49,34 @@ tasks.named("compileJava").configure { exclude { it.file.toString().startsWith("${projectDir}/src/main/generated-src/generated") } } +interface Injected { + @Inject FileSystemOperations getFs() +} + tasks.named("test").configure { if (BuildParams.isCi() == false) { systemProperty 'generateDocs', true + def injected = project.objects.newInstance(Injected) doFirst { - project.delete( - files("${projectDir}/build/testrun/test/temp/esql/functions") - ) + injected.fs.delete { + it.delete("build/testrun/test/temp/esql/functions") + } } + File functionsFolder = file("build/testrun/test/temp/esql/functions") + File signatureFolder = file("build/testrun/test/temp/esql/functions/signature") + File typesFolder = file("build/testrun/test/temp/esql/functions/types") + def functionsDocFolder = file("${rootDir}/docs/reference/esql/functions") + def effectiveProjectDir = projectDir + doLast { - List signatures = file("${projectDir}/build/testrun/test/temp/esql/functions/signature").list().findAll {it.endsWith("svg")} - List types = file("${projectDir}/build/testrun/test/temp/esql/functions/types").list().findAll {it.endsWith("asciidoc")} + List types = typesFolder.list().findAll {it.endsWith("asciidoc")} int count = types == null ? 0 : types.size() Closure readExample = line -> { line.replaceAll(/read-example::([^\[]+)\[tag=([^,\]]+)(, ?json)?\]/, { String file = it[1] String tag = it[2] boolean isJson = it[3] - String allExamples = new File("${projectDir}/qa/testFixtures/src/main/resources/${file}").text + String allExamples = new File("${effectiveProjectDir}/qa/testFixtures/src/main/resources/${file}").text .replaceAll(System.lineSeparator(), "\n") int start = allExamples.indexOf("tag::${tag}[]") int end = allExamples.indexOf("end::${tag}[]", start) @@ -85,9 +97,9 @@ tasks.named("test").configure { logger.quiet("ESQL Docs: No function signatures created. Skipping sync.") } else if (count == 1) { logger.quiet("ESQL Docs: Only files related to $types, patching them into place") - project.sync { - from "${projectDir}/build/testrun/test/temp/esql/functions" - into "${rootDir}/docs/reference/esql/functions" + injected.fs.sync { + from functionsFolder + into functionsDocFolder include '**/*.asciidoc', '**/*.svg', '**/*.md', '**/*.json' preserve { include '/*.asciidoc', '**/*.asciidoc', '**/*.md', '**/*.json', '**/*.svg', 'README.md' @@ -95,9 +107,9 @@ tasks.named("test").configure { filter readExample } } else { - project.sync { - from "${projectDir}/build/testrun/test/temp/esql/functions" - into "${rootDir}/docs/reference/esql/functions" + injected.fs.sync { + from functionsFolder + into functionsDocFolder include '**/*.asciidoc', '**/*.svg', '**/*.md', '**/*.json' preserve { include '/*.asciidoc', 'README.md' diff --git a/x-pack/plugin/esql/compute/ann/build.gradle b/x-pack/plugin/esql/compute/ann/build.gradle index ee8d8c62dff39..072f1f6628b07 100644 --- a/x-pack/plugin/esql/compute/ann/build.gradle +++ b/x-pack/plugin/esql/compute/ann/build.gradle @@ -1,4 +1,9 @@ apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.publish' + +base { + archivesName = 'x-pack-esql-compute-ann' +} tasks.named('forbiddenApisMain').configure { // doesn't depend on anything diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index fce051ed60038..6092b006a551c 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -1,5 +1,10 @@ apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.string-templates' +apply plugin: 'elasticsearch.publish' + +base { + archivesName = 'x-pack-esql-compute' +} dependencies { compileOnly project(':server') diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index 55a81cd7aaace..e1456328e7f64 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -52,6 +52,8 @@ import static org.elasticsearch.compute.gen.Types.WARNINGS; import static org.elasticsearch.compute.gen.Types.blockType; import static org.elasticsearch.compute.gen.Types.builderType; +import static org.elasticsearch.compute.gen.Types.elementType; +import static org.elasticsearch.compute.gen.Types.vectorFixedBuilderType; import static org.elasticsearch.compute.gen.Types.vectorType; public class EvaluatorImplementer { @@ -120,11 +122,11 @@ private TypeSpec type() { private MethodSpec ctor() { MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); builder.addParameter(SOURCE, "source"); - builder.addStatement("this.warnings = new Warnings(source)"); processFunction.args.stream().forEach(a -> a.implementCtor(builder)); builder.addParameter(DRIVER_CONTEXT, "driverContext"); builder.addStatement("this.driverContext = driverContext"); + builder.addStatement("this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source)"); return builder.build(); } @@ -167,19 +169,25 @@ private MethodSpec realEval(boolean blockStyle) { builder.addModifiers(Modifier.PUBLIC).returns(resultDataType); builder.addParameter(TypeName.INT, "positionCount"); - processFunction.args.stream().forEach(a -> { - if (a.paramName(blockStyle) != null) { - builder.addParameter(a.dataType(blockStyle), a.paramName(blockStyle)); - } - }); + boolean vectorize = false; + if (blockStyle == false && processFunction.warnExceptions.isEmpty() && processOutputsMultivalued == false) { + ClassName type = processFunction.resultDataType(false); + vectorize = type.simpleName().startsWith("BytesRef") == false; + } - TypeName builderType = builderType(resultDataType); + TypeName builderType = vectorize ? vectorFixedBuilderType(elementType(resultDataType)) : builderType(resultDataType); builder.beginControlFlow( "try($T result = driverContext.blockFactory().$L(positionCount))", builderType, buildFromFactory(builderType) ); { + processFunction.args.stream().forEach(a -> { + if (a.paramName(blockStyle) != null) { + builder.addParameter(a.dataType(blockStyle), a.paramName(blockStyle)); + } + }); + processFunction.args.stream().forEach(a -> a.createScratch(builder)); builder.beginControlFlow("position: for (int p = 0; p < positionCount; p++)"); @@ -226,7 +234,7 @@ private MethodSpec realEval(boolean blockStyle) { pattern.append(")"); String builtPattern; if (processFunction.builderArg == null) { - builtPattern = "result.$L(" + pattern + ")"; + builtPattern = vectorize ? "result.$L(p, " + pattern + ")" : "result.$L(" + pattern + ")"; args.add(0, appendMethod(resultDataType)); } else { builtPattern = pattern.toString(); @@ -249,8 +257,9 @@ private MethodSpec realEval(boolean blockStyle) { } builder.endControlFlow(); builder.addStatement("return result.build()"); - builder.endControlFlow(); } + builder.endControlFlow(); + return builder.build(); } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java index 52b1c2b09b629..993b8363fb35f 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java @@ -166,10 +166,10 @@ private MethodSpec ctor() { } builder.addParameter(EXPRESSION_EVALUATOR, "field"); builder.addStatement("super(driverContext, field)"); + builder.addParameter(DRIVER_CONTEXT, "driverContext"); if (warnExceptions.isEmpty() == false) { - builder.addStatement("this.warnings = new Warnings(source)"); + builder.addStatement("this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source)"); } - builder.addParameter(DRIVER_CONTEXT, "driverContext"); return builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java index 5b82950c7de37..953b7172a2862 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasables; @@ -59,9 +58,9 @@ void set(int groupId, double value) { Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (DoubleVector.Builder builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(selected.getPositionCount())) { + try (var builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { - builder.appendDouble(values.get(selected.getInt(i))); + builder.appendDouble(i, values.get(selected.getInt(i))); } return builder.build().asBlock(); } @@ -107,7 +106,7 @@ public void toIntermediate( } else { valuesBuilder.appendDouble(0); // TODO can we just use null? } - hasValueBuilder.appendBoolean(hasValue(group)); + hasValueBuilder.appendBoolean(i, hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); blocks[offset + 1] = hasValueBuilder.build().asBlock(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java index 0234f36f6675c..034ed72d08c17 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java @@ -58,9 +58,9 @@ void set(int groupId, int value) { Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (IntVector.Builder builder = driverContext.blockFactory().newIntVectorFixedBuilder(selected.getPositionCount())) { + try (var builder = driverContext.blockFactory().newIntVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { - builder.appendInt(values.get(selected.getInt(i))); + builder.appendInt(i, values.get(selected.getInt(i))); } return builder.build().asBlock(); } @@ -106,7 +106,7 @@ public void toIntermediate( } else { valuesBuilder.appendInt(0); // TODO can we just use null? } - hasValueBuilder.appendBoolean(hasValue(group)); + hasValueBuilder.appendBoolean(i, hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); blocks[offset + 1] = hasValueBuilder.build().asBlock(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java index 860bf43eaad82..9ff7e3f536484 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java @@ -12,7 +12,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasables; @@ -65,9 +64,9 @@ void increment(int groupId, long value) { Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (LongVector.Builder builder = driverContext.blockFactory().newLongVectorFixedBuilder(selected.getPositionCount())) { + try (var builder = driverContext.blockFactory().newLongVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { - builder.appendLong(values.get(selected.getInt(i))); + builder.appendLong(i, values.get(selected.getInt(i))); } return builder.build().asBlock(); } @@ -113,7 +112,7 @@ public void toIntermediate( } else { valuesBuilder.appendLong(0); // TODO can we just use null? } - hasValueBuilder.appendBoolean(hasValue(group)); + hasValueBuilder.appendBoolean(i, hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); blocks[offset + 1] = hasValueBuilder.build().asBlock(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java index 2dc5b441ca00d..cbd20f15c6511 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java @@ -272,7 +272,7 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive try ( LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); DoubleBlock.Builder values = blockFactory.newDoubleBlockBuilder(positionCount * 2); - DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + DoubleVector.FixedBuilder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) ) { for (int i = 0; i < positionCount; i++) { final var groupId = selected.getInt(i); @@ -290,11 +290,11 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive } values.endPositionEntry(); - resets.appendDouble(state.reset); + resets.appendDouble(i, state.reset); } else { timestamps.appendNull(); values.appendNull(); - resets.appendDouble(0); + resets.appendDouble(i, 0); } } blocks[offset] = timestamps.build(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java index 1ba8b9264c24a..01c3e3d7fb8e7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java @@ -273,7 +273,7 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive try ( LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); IntBlock.Builder values = blockFactory.newIntBlockBuilder(positionCount * 2); - DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + DoubleVector.FixedBuilder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) ) { for (int i = 0; i < positionCount; i++) { final var groupId = selected.getInt(i); @@ -291,11 +291,11 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive } values.endPositionEntry(); - resets.appendDouble(state.reset); + resets.appendDouble(i, state.reset); } else { timestamps.appendNull(); values.appendNull(); - resets.appendDouble(0); + resets.appendDouble(i, 0); } } blocks[offset] = timestamps.build(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java index 846c6f0cc2730..c84985b703aed 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java @@ -272,7 +272,7 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive try ( LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); LongBlock.Builder values = blockFactory.newLongBlockBuilder(positionCount * 2); - DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + DoubleVector.FixedBuilder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) ) { for (int i = 0; i < positionCount; i++) { final var groupId = selected.getInt(i); @@ -290,11 +290,11 @@ public void toIntermediate(Block[] blocks, int offset, IntVector selected, Drive } values.endPositionEntry(); - resets.appendDouble(state.reset); + resets.appendDouble(i, state.reset); } else { timestamps.appendNull(); values.appendNull(); - resets.appendDouble(0); + resets.appendDouble(i, 0); } } blocks[offset] = timestamps.build(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index c8921a7c9f02e..5cf900cfc4a71 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -116,7 +116,7 @@ default void writeTo(StreamOutput out) throws IOException { private static BooleanVector readValues(int positions, StreamInput in, BlockFactory blockFactory) throws IOException { try (var builder = blockFactory.newBooleanVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendBoolean(in.readBoolean()); + builder.appendBoolean(i, in.readBoolean()); } return builder.build(); } @@ -150,5 +150,8 @@ sealed interface FixedBuilder extends Builder permits BooleanVectorFixedBuilder */ @Override FixedBuilder appendBoolean(boolean value); + + FixedBuilder appendBoolean(int index, boolean value); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java index 4cc2ec17b6ad4..8b952ee0d951a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java @@ -25,6 +25,8 @@ final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { */ private int nextIndex; + private boolean closed; + BooleanVectorFixedBuilder(int size, BlockFactory blockFactory) { preAdjustedBytes = ramBytesUsed(size); blockFactory.adjustBreaker(preAdjustedBytes); @@ -38,6 +40,12 @@ public BooleanVectorFixedBuilder appendBoolean(boolean value) { return this; } + @Override + public BooleanVectorFixedBuilder appendBoolean(int idx, boolean value) { + values[idx] = value; + return this; + } + private static long ramBytesUsed(int size) { return size == 1 ? ConstantBooleanVector.RAM_BYTES_USED @@ -53,13 +61,10 @@ public long estimatedBytes() { @Override public BooleanVector build() { - if (nextIndex < 0) { + if (closed) { throw new IllegalStateException("already closed"); } - if (nextIndex != values.length) { - throw new IllegalStateException("expected to write [" + values.length + "] entries but wrote [" + nextIndex + "]"); - } - nextIndex = -1; + closed = true; BooleanVector vector; if (values.length == 1) { vector = blockFactory.newConstantBooleanBlockWith(values[0], 1, preAdjustedBytes).asVector(); @@ -72,14 +77,14 @@ public BooleanVector build() { @Override public void close() { - if (nextIndex >= 0) { + if (closed == false) { // If nextIndex < 0 we've already built the vector - nextIndex = -1; + closed = true; blockFactory.adjustBreaker(-preAdjustedBytes); } } boolean isReleased() { - return nextIndex < 0; + return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index 3d93043f93d8f..10d4f4abe5f6a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -117,7 +117,7 @@ default void writeTo(StreamOutput out) throws IOException { private static DoubleVector readValues(int positions, StreamInput in, BlockFactory blockFactory) throws IOException { try (var builder = blockFactory.newDoubleVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendDouble(in.readDouble()); + builder.appendDouble(i, in.readDouble()); } return builder.build(); } @@ -151,5 +151,8 @@ sealed interface FixedBuilder extends Builder permits DoubleVectorFixedBuilder { */ @Override FixedBuilder appendDouble(double value); + + FixedBuilder appendDouble(int index, double value); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java index 42cdd0f5667ff..ff363b36e44b1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java @@ -25,6 +25,8 @@ final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { */ private int nextIndex; + private boolean closed; + DoubleVectorFixedBuilder(int size, BlockFactory blockFactory) { preAdjustedBytes = ramBytesUsed(size); blockFactory.adjustBreaker(preAdjustedBytes); @@ -38,6 +40,12 @@ public DoubleVectorFixedBuilder appendDouble(double value) { return this; } + @Override + public DoubleVectorFixedBuilder appendDouble(int idx, double value) { + values[idx] = value; + return this; + } + private static long ramBytesUsed(int size) { return size == 1 ? ConstantDoubleVector.RAM_BYTES_USED @@ -53,13 +61,10 @@ public long estimatedBytes() { @Override public DoubleVector build() { - if (nextIndex < 0) { + if (closed) { throw new IllegalStateException("already closed"); } - if (nextIndex != values.length) { - throw new IllegalStateException("expected to write [" + values.length + "] entries but wrote [" + nextIndex + "]"); - } - nextIndex = -1; + closed = true; DoubleVector vector; if (values.length == 1) { vector = blockFactory.newConstantDoubleBlockWith(values[0], 1, preAdjustedBytes).asVector(); @@ -72,14 +77,14 @@ public DoubleVector build() { @Override public void close() { - if (nextIndex >= 0) { + if (closed == false) { // If nextIndex < 0 we've already built the vector - nextIndex = -1; + closed = true; blockFactory.adjustBreaker(-preAdjustedBytes); } } boolean isReleased() { - return nextIndex < 0; + return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index b1a2d1b80a410..384d5813d5750 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -126,7 +126,7 @@ default void writeTo(StreamOutput out) throws IOException { private static IntVector readValues(int positions, StreamInput in, BlockFactory blockFactory) throws IOException { try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendInt(in.readInt()); + builder.appendInt(i, in.readInt()); } return builder.build(); } @@ -169,5 +169,8 @@ sealed interface FixedBuilder extends Builder permits IntVectorFixedBuilder { */ @Override FixedBuilder appendInt(int value); + + FixedBuilder appendInt(int index, int value); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java index 77e3511a5cb54..9ab01d019252a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java @@ -25,6 +25,8 @@ final class IntVectorFixedBuilder implements IntVector.FixedBuilder { */ private int nextIndex; + private boolean closed; + IntVectorFixedBuilder(int size, BlockFactory blockFactory) { preAdjustedBytes = ramBytesUsed(size); blockFactory.adjustBreaker(preAdjustedBytes); @@ -38,6 +40,12 @@ public IntVectorFixedBuilder appendInt(int value) { return this; } + @Override + public IntVectorFixedBuilder appendInt(int idx, int value) { + values[idx] = value; + return this; + } + private static long ramBytesUsed(int size) { return size == 1 ? ConstantIntVector.RAM_BYTES_USED @@ -53,13 +61,10 @@ public long estimatedBytes() { @Override public IntVector build() { - if (nextIndex < 0) { + if (closed) { throw new IllegalStateException("already closed"); } - if (nextIndex != values.length) { - throw new IllegalStateException("expected to write [" + values.length + "] entries but wrote [" + nextIndex + "]"); - } - nextIndex = -1; + closed = true; IntVector vector; if (values.length == 1) { vector = blockFactory.newConstantIntBlockWith(values[0], 1, preAdjustedBytes).asVector(); @@ -72,14 +77,14 @@ public IntVector build() { @Override public void close() { - if (nextIndex >= 0) { + if (closed == false) { // If nextIndex < 0 we've already built the vector - nextIndex = -1; + closed = true; blockFactory.adjustBreaker(-preAdjustedBytes); } } boolean isReleased() { - return nextIndex < 0; + return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index e2f53d1ee07f4..a74146b692e31 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -117,7 +117,7 @@ default void writeTo(StreamOutput out) throws IOException { private static LongVector readValues(int positions, StreamInput in, BlockFactory blockFactory) throws IOException { try (var builder = blockFactory.newLongVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendLong(in.readLong()); + builder.appendLong(i, in.readLong()); } return builder.build(); } @@ -151,5 +151,8 @@ sealed interface FixedBuilder extends Builder permits LongVectorFixedBuilder { */ @Override FixedBuilder appendLong(long value); + + FixedBuilder appendLong(int index, long value); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java index 2ad259198bf1b..77dd0a87dfb2f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java @@ -25,6 +25,8 @@ final class LongVectorFixedBuilder implements LongVector.FixedBuilder { */ private int nextIndex; + private boolean closed; + LongVectorFixedBuilder(int size, BlockFactory blockFactory) { preAdjustedBytes = ramBytesUsed(size); blockFactory.adjustBreaker(preAdjustedBytes); @@ -38,6 +40,12 @@ public LongVectorFixedBuilder appendLong(long value) { return this; } + @Override + public LongVectorFixedBuilder appendLong(int idx, long value) { + values[idx] = value; + return this; + } + private static long ramBytesUsed(int size) { return size == 1 ? ConstantLongVector.RAM_BYTES_USED @@ -53,13 +61,10 @@ public long estimatedBytes() { @Override public LongVector build() { - if (nextIndex < 0) { + if (closed) { throw new IllegalStateException("already closed"); } - if (nextIndex != values.length) { - throw new IllegalStateException("expected to write [" + values.length + "] entries but wrote [" + nextIndex + "]"); - } - nextIndex = -1; + closed = true; LongVector vector; if (values.length == 1) { vector = blockFactory.newConstantLongBlockWith(values[0], 1, preAdjustedBytes).asVector(); @@ -72,14 +77,14 @@ public LongVector build() { @Override public void close() { - if (nextIndex >= 0) { + if (closed == false) { // If nextIndex < 0 we've already built the vector - nextIndex = -1; + closed = true; blockFactory.adjustBreaker(-preAdjustedBytes); } } boolean isReleased() { - return nextIndex < 0; + return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st index e81af4841d1a4..246aebe2c08ec 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st @@ -14,7 +14,9 @@ $if(long)$ import org.elasticsearch.compute.data.IntVector; $endif$ import org.elasticsearch.compute.data.$Type$Block; +$if(int)$ import org.elasticsearch.compute.data.$Type$Vector; +$endif$ $if(double)$ import org.elasticsearch.compute.data.IntVector; $endif$ @@ -72,9 +74,9 @@ $endif$ Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try ($Type$Vector.Builder builder = driverContext.blockFactory().new$Type$VectorFixedBuilder(selected.getPositionCount())) { + try (var builder = driverContext.blockFactory().new$Type$VectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { - builder.append$Type$(values.get(selected.getInt(i))); + builder.append$Type$(i, values.get(selected.getInt(i))); } return builder.build().asBlock(); } @@ -120,7 +122,7 @@ $endif$ } else { valuesBuilder.append$Type$(0); // TODO can we just use null? } - hasValueBuilder.appendBoolean(hasValue(group)); + hasValueBuilder.appendBoolean(i, hasValue(group)); } blocks[offset + 0] = valuesBuilder.build(); blocks[offset + 1] = hasValueBuilder.build().asBlock(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st index ad305809c6651..212a017cb300d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st @@ -275,7 +275,7 @@ public class Rate$Type$Aggregator { try ( LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); $Type$Block.Builder values = blockFactory.new$Type$BlockBuilder(positionCount * 2); - DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + DoubleVector.FixedBuilder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) ) { for (int i = 0; i < positionCount; i++) { final var groupId = selected.getInt(i); @@ -293,11 +293,11 @@ public class Rate$Type$Aggregator { } values.endPositionEntry(); - resets.appendDouble(state.reset); + resets.appendDouble(i, state.reset); } else { timestamps.appendNull(); values.appendNull(); - resets.appendDouble(0); + resets.appendDouble(i, 0); } } blocks[offset] = timestamps.build(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java index 17aa5afbe3ade..4c2817588904a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java @@ -64,7 +64,7 @@ private IntVector add(BooleanVector vector) { int positions = vector.getPositionCount(); try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendInt(MultivalueDedupeBoolean.hashOrd(everSeen, vector.getBoolean(i))); + builder.appendInt(i, MultivalueDedupeBoolean.hashOrd(everSeen, vector.getBoolean(i))); } return builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java index ce11d1bb64146..626c5bb910ce3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRef3BlockHash.java @@ -84,12 +84,12 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { private void addVectors(BytesRefVector v1, BytesRefVector v2, BytesRefVector v3, GroupingAggregatorFunction.AddInput addInput) { final int positionCount = v1.getPositionCount(); - try (IntVector.Builder ordsBuilder = blockFactory.newIntVectorFixedBuilder(positionCount)) { + try (IntVector.FixedBuilder ordsBuilder = blockFactory.newIntVectorFixedBuilder(positionCount)) { // TODO: enable ordinal vectors in BytesRefBlockHash try (IntVector k1 = hash1.add(v1); IntVector k2 = hash2.add(v2); IntVector k3 = hash3.add(v3)) { for (int p = 0; p < positionCount; p++) { long ord = hashOrdToGroup(finalHash.add(k1.getInt(p), k2.getInt(p), k3.getInt(p))); - ordsBuilder.appendInt(Math.toIntExact(ord)); + ordsBuilder.appendInt(p, Math.toIntExact(ord)); } } try (IntVector ords = ordsBuilder.build()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java index 11423539db396..3be4db702a931 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java @@ -67,7 +67,7 @@ IntVector add(LongVector vector1, LongVector vector2) { int positions = vector1.getPositionCount(); try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendInt(Math.toIntExact(hashOrdToGroup(hash.add(vector1.getLong(i), vector2.getLong(i))))); + builder.appendInt(i, Math.toIntExact(hashOrdToGroup(hash.add(vector1.getLong(i), vector2.getLong(i))))); } return builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java index 09b1022200b6a..7cbc7cc4c25db 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java @@ -96,13 +96,13 @@ public Block[] getKeys() { LongVector timestampIntervals = null; try ( BytesRefVector.Builder tsidHashesBuilder = blockFactory.newBytesRefVectorBuilder(positions); - LongVector.Builder timestampIntervalsBuilder = blockFactory.newLongVectorFixedBuilder(positions) + LongVector.FixedBuilder timestampIntervalsBuilder = blockFactory.newLongVectorFixedBuilder(positions) ) { BytesRef scratch = new BytesRef(); for (long i = 0; i < positions; i++) { BytesRef key1 = this.tsidHashes.get(intervalHash.getKey1(i), scratch); tsidHashesBuilder.appendBytesRef(key1); - timestampIntervalsBuilder.appendLong(intervalHash.getKey2(i)); + timestampIntervalsBuilder.appendLong((int) i, intervalHash.getKey2(i)); } tsidHashes = tsidHashesBuilder.build(); timestampIntervals = timestampIntervalsBuilder.build(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/AscendingSequenceRowInTableLookup.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/AscendingSequenceRowInTableLookup.java index bcb245146c2c6..b8a02642450ca 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/AscendingSequenceRowInTableLookup.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/AscendingSequenceRowInTableLookup.java @@ -51,9 +51,9 @@ private IntBlock lookupVector(IntVector vector) { } private IntVector lookupVectorInRange(IntVector vector) { - try (IntVector.Builder builder = blockFactory.newIntVectorFixedBuilder(vector.getPositionCount())) { + try (IntVector.FixedBuilder builder = blockFactory.newIntVectorFixedBuilder(vector.getPositionCount())) { for (int i = 0; i < vector.getPositionCount(); i++) { - builder.appendInt(vector.getInt(i) - min); + builder.appendInt(i, vector.getInt(i) - min); } return builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 628ee93ed757d..0113f4940adb5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -185,7 +185,7 @@ $endif$ private static $Type$Vector readValues(int positions, StreamInput in, BlockFactory blockFactory) throws IOException { try (var builder = blockFactory.new$Type$Vector$if(BytesRef)$$else$Fixed$endif$Builder(positions)) { for (int i = 0; i < positions; i++) { - builder.append$Type$(in.read$Type$()); + builder.append$Type$($if(BytesRef)$$else$i, $endif$in.read$Type$()); } return builder.build(); } @@ -243,6 +243,9 @@ $else$ */ @Override FixedBuilder append$Type$($type$ value); + + FixedBuilder append$Type$(int index, $type$ value); + } $endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st index af783a2435251..a8876c5120090 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st @@ -25,6 +25,8 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { */ private int nextIndex; + private boolean closed; + $Type$VectorFixedBuilder(int size, BlockFactory blockFactory) { preAdjustedBytes = ramBytesUsed(size); blockFactory.adjustBreaker(preAdjustedBytes); @@ -38,6 +40,12 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { return this; } + @Override + public $Type$VectorFixedBuilder append$Type$(int idx, $type$ value) { + values[idx] = value; + return this; + } + private static long ramBytesUsed(int size) { return size == 1 ? Constant$Type$Vector.RAM_BYTES_USED @@ -53,13 +61,10 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { @Override public $Type$Vector build() { - if (nextIndex < 0) { + if (closed) { throw new IllegalStateException("already closed"); } - if (nextIndex != values.length) { - throw new IllegalStateException("expected to write [" + values.length + "] entries but wrote [" + nextIndex + "]"); - } - nextIndex = -1; + closed = true; $Type$Vector vector; if (values.length == 1) { vector = blockFactory.newConstant$Type$BlockWith(values[0], 1, preAdjustedBytes).asVector(); @@ -72,14 +77,14 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { @Override public void close() { - if (nextIndex >= 0) { + if (closed == false) { // If nextIndex < 0 we've already built the vector - nextIndex = -1; + closed = true; blockFactory.adjustBreaker(-preAdjustedBytes); } } boolean isReleased() { - return nextIndex < 0; + return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java index d645a7cbe0185..843aa4aaaa881 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.core.Releasable; @@ -57,11 +58,26 @@ public class DriverContext { private final AsyncActions asyncActions = new AsyncActions(); + private final WarningsMode warningsMode; + public DriverContext(BigArrays bigArrays, BlockFactory blockFactory) { + this(bigArrays, blockFactory, WarningsMode.COLLECT); + } + + private DriverContext(BigArrays bigArrays, BlockFactory blockFactory, WarningsMode warningsMode) { Objects.requireNonNull(bigArrays); Objects.requireNonNull(blockFactory); this.bigArrays = bigArrays; this.blockFactory = blockFactory; + this.warningsMode = warningsMode; + } + + public static DriverContext getLocalDriver() { + return new DriverContext( + BigArrays.NON_RECYCLING_INSTANCE, + // TODO maybe this should have a small fixed limit? + new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE) + ); } public BigArrays bigArrays() { @@ -159,6 +175,22 @@ public void removeAsyncAction() { asyncActions.removeInstance(); } + /** + * Evaluators should use this function to decide their warning behavior. + * @return an appropriate {@link WarningsMode} + */ + public WarningsMode warningsMode() { + return warningsMode; + } + + /** + * Indicates the behavior Evaluators of this context should use for reporting warnings + */ + public enum WarningsMode { + COLLECT, + IGNORE + } + private static class AsyncActions { private final SubscribableListener completion = new SubscribableListener<>(); private final AtomicBoolean finished = new AtomicBoolean(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index 2aea504db1ac8..b5ae35bfc8d7f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -611,12 +611,12 @@ private static class SortedDocValuesBlockOrdinalsReader extends BlockOrdinalsRea @Override IntBlock readOrdinalsAdded1(IntVector docs) throws IOException { final int positionCount = docs.getPositionCount(); - try (IntVector.Builder builder = blockFactory.newIntVectorFixedBuilder(positionCount)) { + try (IntVector.FixedBuilder builder = blockFactory.newIntVectorFixedBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { if (sortedDocValues.advanceExact(docs.getInt(p))) { - builder.appendInt(sortedDocValues.ordValue() + 1); + builder.appendInt(p, sortedDocValues.ordValue() + 1); } else { - builder.appendInt(0); + builder.appendInt(p, 0); } } return builder.build().asBlock(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java index b92c6d01e5077..9665590940afe 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java @@ -37,9 +37,9 @@ public SequenceBooleanBlockSourceOperator(BlockFactory blockFactory, List answer = runEsql( builder.query(testCase.query), testCase.expectedWarnings(false), testCase.expectedWarningsRegex() ); + var expectedColumnsWithValues = loadCsvSpecValues(testCase.expectedResults); var metadata = answer.get("columns"); @@ -306,4 +316,50 @@ public static void assertRequestBreakerEmpty() throws Exception { } }); } + + /** + * "tables" parameter sent if there is a LOOKUP in the request. If you + * add to this, you must also add to {@link EsqlTestUtils#tables}; + */ + private Map>> tables() { + Map>> tables = new TreeMap<>(); + tables.put( + "int_number_names", + EsqlTestUtils.table( + Map.entry("int:integer", IntStream.range(0, 10).boxed().toList()), + Map.entry("name:keyword", IntStream.range(0, 10).mapToObj(EsqlTestUtils::numberName).toList()) + ) + ); + tables.put( + "long_number_names", + EsqlTestUtils.table( + Map.entry("long:long", LongStream.range(0, 10).boxed().toList()), + Map.entry("name:keyword", IntStream.range(0, 10).mapToObj(EsqlTestUtils::numberName).toList()) + ) + ); + tables.put( + "double_number_names", + EsqlTestUtils.table( + Map.entry("double:double", List.of(2.03, 2.08)), + Map.entry("name:keyword", List.of("two point zero three", "two point zero eight")) + ) + ); + tables.put( + "double_number_names_with_null", + EsqlTestUtils.table( + Map.entry("double:double", List.of(2.03, 2.08, 0.0)), + Map.entry("name:keyword", Arrays.asList("two point zero three", "two point zero eight", null)) + ) + ); + tables.put( + "big", + EsqlTestUtils.table( + Map.entry("aa:keyword", List.of("foo", "bar", "baz", "foo")), + Map.entry("ab:keyword", List.of("zoo", "zop", "zoi", "foo")), + Map.entry("na:integer", List.of(1, 10, 100, 2)), + Map.entry("nb:integer", List.of(-1, -10, -100, -2)) + ) + ); + return tables; + } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 3f21c9da31861..349f9445030c4 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -113,6 +113,8 @@ public static class RequestObjectBuilder { private final XContentBuilder builder; private boolean isBuilt = false; + private Map>> tables; + private Boolean keepOnCompletion = null; public RequestObjectBuilder() throws IOException { @@ -129,6 +131,11 @@ public RequestObjectBuilder query(String query) throws IOException { return this; } + public RequestObjectBuilder tables(Map>> tables) { + this.tables = tables; + return this; + } + public RequestObjectBuilder columnar(boolean columnar) throws IOException { builder.field("columnar", columnar); return this; @@ -173,6 +180,9 @@ public RequestObjectBuilder pragmas(Settings pragmas) throws IOException { public RequestObjectBuilder build() throws IOException { if (isBuilt == false) { + if (tables != null) { + builder.field("tables", tables); + } builder.endObject(); isBuilt = true; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 820d62eb50e37..7096c12022729 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; @@ -25,7 +26,6 @@ import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.DateUtils; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.TypesTests; @@ -45,12 +45,13 @@ import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.TreeMap; import java.util.regex.Pattern; import static java.util.Collections.emptyList; @@ -269,9 +270,13 @@ public static void assertWarnings(List warnings, List allowedWar } } - static Map> tables() { + /** + * "tables" provided in the context for the LOOKUP command. If you + * add to this, you must also add to {@code EsqlSpecTestCase#tables}; + */ + public static Map> tables() { BlockFactory factory = new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE); - Map> tables = new HashMap<>(); + Map> tables = new TreeMap<>(); try ( IntBlock.Builder ints = factory.newIntBlockBuilder(10); LongBlock.Builder longs = factory.newLongBlockBuilder(10); @@ -280,19 +285,7 @@ static Map> tables() { for (int i = 0; i < 10; i++) { ints.appendInt(i); longs.appendLong(i); - names.appendBytesRef(new BytesRef(switch (i) { - case 0 -> "zero"; - case 1 -> "one"; - case 2 -> "two"; - case 3 -> "three"; - case 4 -> "four"; - case 5 -> "five"; - case 6 -> "six"; - case 7 -> "seven"; - case 8 -> "eight"; - case 9 -> "nine"; - default -> throw new IllegalArgumentException(); - })); + names.appendBytesRef(new BytesRef(numberName(i))); } IntBlock intsBlock = ints.build(); @@ -300,13 +293,104 @@ static Map> tables() { BytesRefBlock namesBlock = names.build(); tables.put( "int_number_names", - Map.of("int", new Column(DataTypes.INTEGER, intsBlock), "name", new Column(DataTypes.KEYWORD, namesBlock)) + table( + Map.entry("int", new Column(DataType.INTEGER, intsBlock)), + Map.entry("name", new Column(DataType.KEYWORD, namesBlock)) + ) ); tables.put( "long_number_names", - Map.of("long", new Column(DataTypes.LONG, longsBlock), "name", new Column(DataTypes.KEYWORD, namesBlock)) + table(Map.entry("long", new Column(DataType.LONG, longsBlock)), Map.entry("name", new Column(DataType.KEYWORD, namesBlock))) + ); + } + for (boolean hasNull : new boolean[] { true, false }) { + try ( + DoubleBlock.Builder doubles = factory.newDoubleBlockBuilder(2); + BytesRefBlock.Builder names = factory.newBytesRefBlockBuilder(2); + ) { + doubles.appendDouble(2.03); + names.appendBytesRef(new BytesRef("two point zero three")); + doubles.appendDouble(2.08); + names.appendBytesRef(new BytesRef("two point zero eight")); + if (hasNull) { + doubles.appendDouble(0.0); + names.appendNull(); + } + tables.put( + "double_number_names" + (hasNull ? "_with_null" : ""), + table( + Map.entry("double", new Column(DataType.DOUBLE, doubles.build())), + Map.entry("name", new Column(DataType.KEYWORD, names.build())) + ) + ); + } + } + try ( + BytesRefBlock.Builder aa = factory.newBytesRefBlockBuilder(3); + BytesRefBlock.Builder ab = factory.newBytesRefBlockBuilder(3); + IntBlock.Builder na = factory.newIntBlockBuilder(3); + IntBlock.Builder nb = factory.newIntBlockBuilder(3); + ) { + aa.appendBytesRef(new BytesRef("foo")); + ab.appendBytesRef(new BytesRef("zoo")); + na.appendInt(1); + nb.appendInt(-1); + + aa.appendBytesRef(new BytesRef("bar")); + ab.appendBytesRef(new BytesRef("zop")); + na.appendInt(10); + nb.appendInt(-10); + + aa.appendBytesRef(new BytesRef("baz")); + ab.appendBytesRef(new BytesRef("zoi")); + na.appendInt(100); + nb.appendInt(-100); + + aa.appendBytesRef(new BytesRef("foo")); + ab.appendBytesRef(new BytesRef("foo")); + na.appendInt(2); + nb.appendInt(-2); + + tables.put( + "big", + table( + Map.entry("aa", new Column(DataType.KEYWORD, aa.build())), + Map.entry("ab", new Column(DataType.KEYWORD, ab.build())), + Map.entry("na", new Column(DataType.INTEGER, na.build())), + Map.entry("nb", new Column(DataType.INTEGER, nb.build())) + ) ); } + return unmodifiableMap(tables); } + + /** + * Builds a table from the provided parameters. This isn't just a call to + * {@link Map#of} because we want to maintain sort order of the columns + */ + @SafeVarargs + public static Map table(Map.Entry... kv) { + Map table = new LinkedHashMap<>(); + for (Map.Entry stringTEntry : kv) { + table.put(stringTEntry.getKey(), stringTEntry.getValue()); + } + return table; + } + + public static String numberName(int i) { + return switch (i) { + case 0 -> "zero"; + case 1 -> "one"; + case 2 -> "two"; + case 3 -> "three"; + case 4 -> "four"; + case 5 -> "five"; + case 6 -> "six"; + case 7 -> "seven"; + case 8 -> "eight"; + case 9 -> "nine"; + default -> throw new IllegalArgumentException(); + }; + } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 0c808afc9d12b..776cc2f95f465 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -1076,6 +1076,23 @@ required_capability: agg_values [1955-01-21T00:00:00Z, 1957-05-23T00:00:00Z, 1959-12-03T00:00:00Z] | null ; + +mvAppendDates +required_capability: fn_mv_append + +FROM employees +| WHERE emp_no == 10039 OR emp_no == 10040 +| SORT emp_no +| EVAL dates = mv_append(birth_date, hire_date) +| KEEP emp_no, birth_date, hire_date, dates +; + +emp_no:integer | birth_date:date | hire_date:date | dates:date +10039 | 1959-10-01T00:00:00Z | 1988-01-19T00:00:00Z | [1959-10-01T00:00:00Z, 1988-01-19T00:00:00Z] +10040 | null | 1993-02-14T00:00:00Z | null +; + + implicitCastingNotEqual required_capability: string_literal_auto_casting from employees | where birth_date != "1957-05-23T00:00:00Z" | keep emp_no, birth_date | sort emp_no | limit 3; @@ -1105,3 +1122,52 @@ emp_no:integer | birth_date:datetime 10003 | 1959-12-03T00:00:00Z 10008 | 1958-02-19T00:00:00Z ; + +implicitCastingArithmeticOperationAdd +required_capability: string_literal_auto_casting_to_datetime_add_sub +from employees +| eval a = 1 day + "2024-01-01", b = 1 year + "2024-04-01" + 1 month, c = "2024-01-01" + 3600 seconds, + d = "2024-04-01" + (1 year + 1 day) +| keep a, b, c, d +| limit 1 +; + +a:datetime | b:datetime | c:datetime | d:datetime +2024-01-02 | 2025-05-01 | 2024-01-01T01:00:00.000Z | 2025-04-02 +; + +implicitCastingArithmeticOperationSub +required_capability: string_literal_auto_casting_to_datetime_add_sub +from employees +| eval a = "2024-01-01" - 1 day, b = "2024-04-01" - 1 month, c = "2024-01-01" - 3600 seconds, + d = "2024-04-01" - (1 year + 1 day) +| keep a, b, c, d +| limit 1 +; + +a:datetime | b:datetime | c:datetime | d:datetime +2023-12-31 | 2024-03-01 | 2023-12-31T23:00:00.000Z | 2023-03-31 +; + +implicitCastingArithmeticOperationAddSub +required_capability: string_literal_auto_casting_to_datetime_add_sub +from employees +| eval a = 1 month + "2024-01-01" - 1 day, b = - 1 year + "2024-04-01" + 1 month, + c = 1 hour + "2024-01-01" - 3600 seconds, d = "2024-04-01" - (1 year + 1 day) +| keep a, b, c, d +| limit 1 +; + +a:datetime | b:datetime | c:datetime | d:datetime +2024-01-31 | 2023-05-01 | 2024-01-01 | 2023-03-31 +; + +temporalAmountWithNulls +from employees +| eval a = to_dt(null) - 1 day +| keep a +| limit 1; + +a:datetime +null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index bbe0df9a8cda9..66f4e9a33ceff 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -523,6 +523,26 @@ required_capability: agg_values [1.7, 1.83, 2.05] | null ; + +mvAppend +required_capability: fn_mv_append + +FROM employees +| WHERE emp_no == 10008 OR emp_no == 10021 +| EVAL d = mv_append(salary_change, salary_change), + i = mv_append(salary_change.int, salary_change.int), + i2 = mv_append(emp_no, salary_change.int), + i3 = mv_append(emp_no, emp_no), + s = mv_append(salary_change.keyword, salary_change.keyword) +| KEEP emp_no, salary_change, d, i, i2, i3, s +| SORT emp_no; + +emp_no:integer | salary_change:double | d:double | i:integer | i2:integer | i3:integer | s:keyword +10008 | [-2.92,0.75,3.54,12.68] | [-2.92,0.75,3.54,12.68,-2.92,0.75,3.54,12.68] | [-2,0,3,12,-2,0,3,12] | [10008,-2,0,3,12] | [10008, 10008] | [-2.92,0.75,12.68,3.54,-2.92,0.75,12.68,3.54] +10021 | null | null | null | null | [10021, 10021] | null +; + + signumOfPositiveDouble#[skip:-8.13.99,reason:new scalar function added in 8.14] row d = to_double(100) | eval s = signum(d); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec index f6dc28fa5db12..377d6d6678032 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec @@ -1,11 +1,259 @@ +keywordByInt +required_capability: lookup_command +FROM employees +| SORT emp_no +| LIMIT 4 +| RENAME languages AS int +| LOOKUP int_number_names ON int +| RENAME int AS languages, name AS lang_name +| KEEP emp_no, languages, lang_name +; + +emp_no:integer | languages:integer | lang_name:keyword + 10001 | 2 | two + 10002 | 5 | five + 10003 | 4 | four + 10004 | 5 | five +; + +keywordByMvInt +required_capability: lookup_command +ROW int=[1, 2, 3] +| LOOKUP int_number_names ON int +; + +int:integer | name:keyword + [1, 2, 3] | [one, two, three] +; + +keywordByDupeInt +required_capability: lookup_command +ROW int=[1, 1, 1] +| LOOKUP int_number_names ON int +; + +int:integer | name:keyword + [1, 1, 1] | [one, one, one] +; + +intByKeyword +required_capability: lookup_command +ROW name="two" +| LOOKUP int_number_names ON name +; + +name:keyword | int:integer + two | 2 +; + + +keywordByLong +required_capability: lookup_command +FROM employees +| SORT emp_no +| LIMIT 4 +| RENAME languages.long AS long +| LOOKUP long_number_names ON long +| RENAME long AS languages, name AS lang_name +| KEEP emp_no, languages, lang_name +; + +emp_no:integer | languages:long | lang_name:keyword + 10001 | 2 | two + 10002 | 5 | five + 10003 | 4 | four + 10004 | 5 | five +; + +longByKeyword +required_capability: lookup_command +ROW name="two" +| LOOKUP long_number_names ON name +; + +name:keyword | long:long + two | 2 +; + +keywordByFloat +required_capability: lookup_command +FROM employees +| SORT emp_no +| LIMIT 4 +| RENAME height AS double +| LOOKUP double_number_names ON double +| RENAME double AS height, name AS height_name +| KEEP emp_no, height, height_name +; + +emp_no:integer | height:double | height_name:keyword + 10001 | 2.03 | two point zero three + 10002 | 2.08 | two point zero eight + 10003 | 1.83 | null + 10004 | 1.78 | null +; + +floatByKeyword +required_capability: lookup_command +ROW name="two point zero eight" +| LOOKUP double_number_names ON name +; + + name:keyword | double:double +two point zero eight | 2.08 +; + +floatByNullMissing +required_capability: lookup_command +ROW name=null +| LOOKUP double_number_names ON name +; + +name:null | double:double + null | null +; +floatByNullMatching +required_capability: lookup_command +ROW name=null +| LOOKUP double_number_names_with_null ON name +; + +name:null | double:double + null | 0 +; + +intIntByKeywordKeyword +required_capability: lookup_command +ROW aa="foo", ab="zoo" +| LOOKUP big ON aa, ab +; + +aa:keyword | ab:keyword | na:integer | nb:integer +foo | zoo | 1 | -1 +; + +intIntByKeywordKeywordMissing +required_capability: lookup_command +ROW aa="foo", ab="zoi" +| LOOKUP big ON aa, ab +; + +aa:keyword | ab:keyword | na:integer | nb:integer +foo | zoi | null | null +; + +intIntByKeywordKeywordSameValues +required_capability: lookup_command +ROW aa="foo", ab="foo" +| LOOKUP big ON aa, ab +; + +aa:keyword | ab:keyword | na:integer | nb:integer +foo | foo | 2 | -2 +; + +intIntByKeywordKeywordSameValuesMissing +required_capability: lookup_command +ROW aa="bar", ab="bar" +| LOOKUP big ON aa, ab +; + +aa:keyword | ab:keyword | na:integer | nb:integer +bar | bar | null | null +; + +lookupBeforeStats +required_capability: lookup_command + FROM employees +| RENAME languages AS int +| LOOKUP int_number_names ON int +| RENAME name AS languages +| STATS height=ROUND(AVG(height), 3) BY languages +| SORT height ASC; + +height:double | languages:keyword + 1.694 | four + 1.732 | one + 1.762 | two + 1.764 | three + 1.809 | null + 1.847 | five +; + +lookupAfterStats +required_capability: lookup_command + FROM employees +| STATS int=TO_INT(AVG(height)) +| LOOKUP int_number_names ON int +| KEEP name; + +name:keyword +two +; + +// Makes sure the LOOKUP squashes previous names +doesNotDuplicateNames +required_capability: lookup_command +FROM employees +| SORT emp_no +| LIMIT 4 +| RENAME languages.long AS long +| EVAL name = CONCAT(first_name, " ", last_name) +| LOOKUP long_number_names ON long +| RENAME long AS languages +| KEEP emp_no, languages, name +; +emp_no:integer | languages:long | name:keyword + 10001 | 2 | two + 10002 | 5 | five + 10003 | 4 | four + 10004 | 5 | five +; + +lookupBeforeSort +required_capability: lookup_command +FROM employees +| WHERE emp_no < 10005 +| RENAME languages AS int +| LOOKUP int_number_names ON int +| RENAME name AS languages +| KEEP languages, emp_no +| SORT languages ASC, emp_no ASC +; + +languages:keyword | emp_no:integer + five | 10002 + five | 10004 + four | 10003 + two | 10001 +; + +lookupAfterSort +required_capability: lookup_command +FROM employees +| WHERE emp_no < 10005 +| SORT languages ASC, emp_no ASC +| RENAME languages AS int +| LOOKUP int_number_names ON int +| RENAME name AS languages +| KEEP languages, emp_no +; + +languages:keyword | emp_no:integer + two | 10001 + four | 10003 + five | 10002 + five | 10004 +; // // Make sure that the new LOOKUP syntax doesn't clash with any existing things // named "lookup" // rowNamedLookup +required_capability: lookup_command ROW lookup = "a" ; @@ -14,6 +262,7 @@ lookup:keyword ; rowNamedLOOKUP +required_capability: lookup_command ROW LOOKUP = "a" ; @@ -22,6 +271,7 @@ LOOKUP:keyword ; evalNamedLookup +required_capability: lookup_command ROW a = "a" | EVAL lookup = CONCAT(a, "1") ; @@ -30,6 +280,7 @@ a:keyword | lookup:keyword ; dissectNamedLookup +required_capability: lookup_command row a = "foo bar" | dissect a "foo %{lookup}"; a:keyword | lookup:keyword @@ -37,6 +288,7 @@ a:keyword | lookup:keyword ; renameIntoLookup +required_capability: lookup_command row a = "foo bar" | RENAME a AS lookup; lookup:keyword @@ -44,6 +296,7 @@ lookup:keyword ; sortOnLookup +required_capability: lookup_command ROW lookup = "a" | SORT lookup ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index eff4cb05bd8c0..2cdd5c1dfd931 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -14,7 +14,7 @@ synopsis:keyword "double cbrt(number:double|integer|long|unsigned_long)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" "boolean cidr_match(ip:ip, blockX...:keyword|text)" -"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text coalesce(first:boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text, ?rest...:boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text)" +"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version coalesce(first:boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version, ?rest...:boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version)" "keyword concat(string1:keyword|text, string2...:keyword|text)" "double cos(angle:double|integer|long|unsigned_long)" "double cosh(angle:double|integer|long|unsigned_long)" @@ -42,10 +42,11 @@ double e() "double|integer|long median(number:double|integer|long)" "double|integer|long median_absolute_deviation(number:double|integer|long)" "double|integer|long min(number:double|integer|long)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_append(field1:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, field2:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version)" "double mv_avg(number:double|integer|long|unsigned_long)" "keyword mv_concat(string:text|keyword, delim:text|keyword)" "integer mv_count(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(field:boolean|date|double|integer|ip|keyword|long|text|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_dedupe(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version)" "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" @@ -59,6 +60,7 @@ date now() "double|integer|long percentile(number:double|integer|long, percentile:double|integer|long)" double pi() "double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" +"keyword repeat(string:keyword|text, number:integer)" "keyword replace(string:keyword|text, regex:keyword|text, newString:keyword|text)" "keyword right(string:keyword|text, length:integer)" "double|integer|long|unsigned_long round(number:double|integer|long|unsigned_long, ?decimals:integer)" @@ -129,7 +131,7 @@ case |[condition, trueValue] |[boolean, "boolean|cartesian cbrt |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. cidr_match |[ip, blockX] |[ip, "keyword|text"] |[IP address of type `ip` (both IPv4 and IPv6 are supported)., CIDR block to test the IP against.] -coalesce |first |"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text" |Expression to evaluate. +coalesce |first |"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version" |Expression to evaluate. concat |[string1, string2] |["keyword|text", "keyword|text"] |[Strings to concatenate., Strings to concatenate.] cos |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. cosh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. @@ -157,10 +159,11 @@ max |number |"double|integer|long" median |number |"double|integer|long" |[""] median_absolut|number |"double|integer|long" |[""] min |number |"double|integer|long" |[""] +mv_append |[field1, field2] |["boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version", "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version"] | ["", ""] mv_avg |number |"double|integer|long|unsigned_long" |Multivalue expression. mv_concat |[string, delim] |["text|keyword", "text|keyword"] |[Multivalue expression., Delimiter.] mv_count |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |Multivalue expression. -mv_dedupe |field |"boolean|date|double|integer|ip|keyword|long|text|version" |Multivalue expression. +mv_dedupe |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" |Multivalue expression. mv_first |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |Multivalue expression. mv_last |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |Multivalue expression. mv_max |field |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" |Multivalue expression. @@ -174,6 +177,7 @@ now |null |null percentile |[number, percentile] |["double|integer|long", "double|integer|long"] |[, ] pi |null |null |null pow |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["Numeric expression for the base. If `null`\, the function returns `null`.", "Numeric expression for the exponent. If `null`\, the function returns `null`."] +repeat |[string, number] |["keyword|text", integer] |[String expression., Number times to repeat.] replace |[string, regex, newString] |["keyword|text", "keyword|text", "keyword|text"] |[String expression., Regular expression., Replacement string.] right |[string, length] |["keyword|text", integer] |[The string from which to returns a substring., The number of characters to return.] round |[number, decimals] |["double|integer|long|unsigned_long", integer] |["The numeric value to round. If `null`\, the function returns `null`.", "The number of decimal places to round to. Defaults to 0. If `null`\, the function returns `null`."] @@ -273,6 +277,7 @@ max |The maximum value of a numeric field. median |The value that is greater than half of all values and less than half of all values. median_absolut|The median absolute deviation, a measure of variability. min |The minimum value of a numeric field. +mv_append |Concatenates values of two multi-value fields. mv_avg |Converts a multivalued field into a single valued field containing the average of all of the values. mv_concat |Converts a multivalued string expression into a single valued column containing the concatenation of all values separated by a delimiter. mv_count |Converts a multivalued expression into a single valued column containing a count of the number of values. @@ -290,6 +295,7 @@ now |Returns current date and time. percentile |The value at which a certain percentage of observed values occur. pi |Returns {wikipedia}/Pi[Pi], the ratio of a circle's circumference to its diameter. pow |Returns the value of `base` raised to the power of `exponent`. +repeat |Returns a string constructed by concatenating `string` with itself the specified `number` of times. replace |The function substitutes in the string `str` any match of the regular expression `regex` with the replacement string `newStr`. right |Return the substring that extracts 'length' chars from 'str' starting from the right. round |Rounds a number to the specified number of decimal places. Defaults to 0, which returns the nearest integer. If the precision is a negative number, rounds to the number of digits left of the decimal point. @@ -362,7 +368,7 @@ case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword cbrt |double |false |false |false ceil |"double|integer|long|unsigned_long" |false |false |false cidr_match |boolean |[false, false] |true |false -coalesce |"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text" |false |true |false +coalesce |"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version" |false |true |false concat |keyword |[false, false] |true |false cos |double |false |false |false cosh |double |false |false |false @@ -390,10 +396,11 @@ max |"double|integer|long" median |"double|integer|long" |false |false |true median_absolut|"double|integer|long" |false |false |true min |"double|integer|long" |false |false |true +mv_append |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" |[false, false] |false |false mv_avg |double |false |false |false mv_concat |keyword |[false, false] |false |false mv_count |integer |false |false |false -mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version" |false |false |false +mv_dedupe |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" |false |false |false mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version"|false |false |false mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version"|false |false |false mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" |false |false |false @@ -407,6 +414,7 @@ now |date percentile |"double|integer|long" |[false, false] |false |true pi |double |null |false |false pow |double |[false, false] |false |false +repeat |keyword |[false, false] |false |false replace |keyword |[false, false, false] |false |false right |keyword |[false, false] |false |false round |"double|integer|long|unsigned_long" |[false, true] |false |false @@ -475,5 +483,5 @@ countFunctions#[skip:-8.14.99, reason:BIN added] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -107 | 107 | 107 +109 | 109 | 109 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 13616e5146949..53d7d1fd0d352 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -839,6 +839,41 @@ emp_no:integer | full_name:keyword | full_name_2:keyword | job_positions:keyword 10005 | Kyoichi Maliniak | Maliniak,Kyoichi | null | [-2.14,13.07] | [-2.14,13.07] ; +mvZipLiteralNullDelim +required_capability: mv_sort + +FROM employees +| EVAL full_name = mv_zip(first_name, last_name, null) +| KEEP emp_no, full_name +| SORT emp_no +| LIMIT 5; + +emp_no:integer | full_name:keyword +10001 | null +10002 | null +10003 | null +10004 | null +10005 | null +; + +mvZipLiteralLongDelim +required_capability: mv_sort + +FROM employees +| EVAL full_name = mv_zip(first_name, last_name, " words words words ") +| KEEP emp_no, full_name +| SORT emp_no +| LIMIT 5; + +emp_no:integer | full_name:keyword +10001 | Georgi words words words Facello +10002 | Bezalel words words words Simmel +10003 | Parto words words words Bamford +10004 | Chirstian words words words Koblick +10005 | Kyoichi words words words Maliniak +; + + showTextFields from hosts | sort description, card, ip0, ip1 | where host == "beta" | keep host, host_group, description; ignoreOrder:true @@ -1335,6 +1370,79 @@ l1:integer | l2:integer null | 0 ; + +mvAppend +required_capability: fn_mv_append + +ROW a = "a", b = ["b", "c"], n = null +| EVAL aa = mv_append(a, a), bb = mv_append(b, b), ab = mv_append(a, b), abb = mv_append(mv_append(a, b), b), na = mv_append(n, a), an = mv_append(a, n) +; + +a:keyword | b:keyword | n:null | aa:keyword | bb:keyword | ab:keyword | abb:keyword | na:keyword | an:keyword +a | [b, c] | null |[a, a] | [b, c, b, c] | [a, b, c] | [a, b, c, b, c] | null | null +; + + +mvAppendNull +required_capability: fn_mv_append + +ROW a = "a", b = ["b", "c"], c = to_string(null) +| EVAL a_null = mv_append(a, c), + null_a = mv_append(c, a), + b_null = mv_append(b, c), + null_b = mv_append(c, b), + null_null = mv_append(c, c) +; + +a:keyword | b:keyword | c:keyword | a_null:keyword | null_a:keyword | b_null:keyword | null_b:keyword | null_null:keyword +a | [b, c] | null | null | null | null | null | null +; + + +mvAppendStrings +required_capability: fn_mv_append + +FROM employees +| WHERE emp_no == 10004 +| EVAL names = mv_sort(mv_append(first_name, last_name)), + two_jobs = mv_sort(mv_append(job_positions, job_positions)), + three_jobs = mv_sort(mv_append(job_positions, mv_append(job_positions, job_positions))) +| KEEP emp_no, names, two_jobs, three_jobs +; + +emp_no:integer | names:keyword | two_jobs:keyword | three_jobs:keyword +10004 | ["Chirstian", "Koblick"] | ["Head Human Resources","Head Human Resources","Reporting Analyst","Reporting Analyst","Support Engineer","Support Engineer","Tech Lead","Tech Lead"] | ["Head Human Resources","Head Human Resources","Head Human Resources","Reporting Analyst","Reporting Analyst","Reporting Analyst","Support Engineer","Support Engineer","Support Engineer","Tech Lead","Tech Lead","Tech Lead"] +; + + + +mvAppendStringsWhere +required_capability: fn_mv_append + +FROM employees +| EVAL two_jobs = mv_append(mv_sort(job_positions), mv_sort(job_positions)) +| WHERE emp_no == 10004 AND mv_slice(mv_append(mv_sort(job_positions), mv_sort(job_positions)), 6, 6) == "Support Engineer" +| KEEP emp_no, two_jobs +; + +emp_no:integer | two_jobs:keyword +10004 | ["Head Human Resources","Reporting Analyst","Support Engineer","Tech Lead","Head Human Resources","Reporting Analyst","Support Engineer","Tech Lead"] +; + +mvAppendNullFields +required_capability: fn_mv_append + +FROM employees +| WHERE emp_no == 10005 +| EVAL x = mv_append(first_name, job_positions), y = mv_append(job_positions, first_name), z = mv_append(job_positions, job_positions) +| keep emp_no, first_name, job_positions, x, y, z +; + +emp_no:integer | first_name:keyword | job_positions:keyword | x:keyword | y:keyword | z:keyword +10005 | Kyoichi | null | null | null | null +; + + base64Encode#[skip:-8.13.99,reason:new base64 function added in 8.14] required_capability: base64_decode_encode @@ -1381,3 +1489,108 @@ emp_no:integer | first_name:keyword | e:keyword | d:keyword 10030 | null | null | null 10031 | null | null | null ; + +repeat +required_capability: repeat +// tag::repeat[] +ROW a = "Hello!" +| EVAL triple_a = REPEAT(a, 3); +// end::repeat[] + +// tag::repeat-result[] +a:keyword | triple_a:keyword +Hello! | Hello!Hello!Hello! +// end::repeat-result[] +; + +repeatUtf16Emoji +required_capability: repeat +row a = "🐱Meow!🐶Woof!" | eval repeated = repeat(a, 2); + +a:keyword | repeated:keyword +🐱Meow!🐶Woof! | 🐱Meow!🐶Woof!🐱Meow!🐶Woof! +; + +repeatLength +required_capability: repeat +row a = "cheese" | eval repeated_len = length(repeat(a, 5)); + +a:keyword | repeated_len:integer +cheese | 30 +; + +repeatSubstring +required_capability: repeat +row a = "catcat" | eval repeated = repeat(substring(a, 4), 2); + +a:keyword | repeated:keyword +catcat | catcat +; + +repeatZero +required_capability: repeat +row a = "cheese" | eval repeated = repeat(a, 0); + +a:keyword | repeated:keyword +cheese | "" +; + +repeatNegative +required_capability: repeat + +from employees | sort emp_no | limit 1 | eval repeated = repeat(first_name, emp_no - 10002) | keep first_name, repeated; + +warning:Line 1:58: evaluation of [repeat(first_name, emp_no - 10002)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:58: java.lang.IllegalArgumentException: Number parameter cannot be negative, found [-1] + +first_name:keyword | repeated:keyword +Georgi | null +; + +repeatVariableNumber +required_capability: repeat +from employees +| sort emp_no +| limit 4 +| eval first_repeated = repeat(first_name, emp_no - 10001) +| keep emp_no, first_name, first_repeated; + +emp_no:integer | first_name:keyword | first_repeated:keyword +10001 | Georgi | "" +10002 | Bezalel | Bezalel +10003 | Parto | PartoParto +10004 | Chirstian | ChirstianChirstianChirstian +; + +repeatStringNull +required_capability: repeat +row n = 2 | eval repeated = repeat(null, n); + +n:integer | repeated:keyword +2 | null +; + +repeatNumberNull +required_capability: repeat +row s = "cheese" | eval repeated = repeat(s, null); + +s:keyword | repeated:keyword +cheese | null +; + +repeatBothArgsFromIndex +required_capability: repeat +FROM employees +| EVAL first_name=REPEAT(first_name, languages) +| KEEP emp_no, languages, first_name +| WHERE emp_no < 10005 +| SORT emp_no; + +emp_no:integer | languages:integer | first_name:keyword +10001 | 2 | GeorgiGeorgi +10002 | 5 | BezalelBezalelBezalelBezalelBezalel +10003 | 4 | PartoPartoPartoParto +10004 | 5 | ChirstianChirstianChirstianChirstianChirstian +; + + diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec index 3b6c41f883018..eb0d6d75a7d07 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec @@ -371,6 +371,21 @@ version:version | name:keyword 5.2.9 | mmmmm ; + +mvAppend +required_capability: fn_mv_append + +ROW a = to_version("1.2.0"), x1 = to_version("0.0.1"), x2 = to_version("1.0.0") +| EVAL b = mv_append(x1, x2) +| EVAL aa = mv_append(a, a), bb = mv_append(b, b), ab = mv_append(a, b), abb = mv_append(mv_append(a, b), b) +| KEEP a, b, aa, bb, ab, abb +; + +a:version | b:version | aa:version | bb:version | ab:version | abb:version +1.2.0 | [0.0.1, 1.0.0] | [1.2.0, 1.2.0] | [0.0.1, 1.0.0, 0.0.1, 1.0.0] | [1.2.0, 0.0.1, 1.0.0] | [1.2.0, 0.0.1, 1.0.0, 0.0.1, 1.0.0] +; + + implictCastingEqual required_capability: string_literal_auto_casting_extended from apps | where version == "1.2.3.4" | sort name | keep name, version; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java index 8479e2c073d82..76e09389c7ad7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java @@ -30,9 +30,9 @@ public final class NotEvaluator implements EvalOperator.ExpressionEvaluator { public NotEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { - this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public BooleanBlock eval(int positionCount, BooleanBlock vBlock) { } public BooleanVector eval(int positionCount, BooleanVector vVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Not.process(vVector.getBoolean(p))); + result.appendBoolean(p, Not.process(vVector.getBoolean(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java index e136926ea1f5d..4c8988bbf6034 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java @@ -32,9 +32,9 @@ public final class GreatestBooleanEvaluator implements EvalOperator.ExpressionEv public GreatestBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -84,14 +84,14 @@ public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) { } public BooleanVector eval(int positionCount, BooleanVector[] valuesVectors) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { boolean[] valuesValues = new boolean[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getBoolean(p); } - result.appendBoolean(Greatest.process(valuesValues)); + result.appendBoolean(p, Greatest.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java index bff5eb17aa98c..0879c62ecafa6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java @@ -33,9 +33,9 @@ public final class GreatestBytesRefEvaluator implements EvalOperator.ExpressionE public GreatestBytesRefEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java index c82e076e6d807..20121bd3727af 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java @@ -32,9 +32,9 @@ public final class GreatestDoubleEvaluator implements EvalOperator.ExpressionEva public GreatestDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -84,14 +84,14 @@ public DoubleBlock eval(int positionCount, DoubleBlock[] valuesBlocks) { } public DoubleVector eval(int positionCount, DoubleVector[] valuesVectors) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { double[] valuesValues = new double[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getDouble(p); } - result.appendDouble(Greatest.process(valuesValues)); + result.appendDouble(p, Greatest.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java index 28dbdd4218c1b..85268a83b159e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java @@ -32,9 +32,9 @@ public final class GreatestIntEvaluator implements EvalOperator.ExpressionEvalua public GreatestIntEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -84,14 +84,14 @@ public IntBlock eval(int positionCount, IntBlock[] valuesBlocks) { } public IntVector eval(int positionCount, IntVector[] valuesVectors) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { int[] valuesValues = new int[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getInt(p); } - result.appendInt(Greatest.process(valuesValues)); + result.appendInt(p, Greatest.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java index 8e907c20742ed..98e45ea0fe7b4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java @@ -32,9 +32,9 @@ public final class GreatestLongEvaluator implements EvalOperator.ExpressionEvalu public GreatestLongEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -84,14 +84,14 @@ public LongBlock eval(int positionCount, LongBlock[] valuesBlocks) { } public LongVector eval(int positionCount, LongVector[] valuesVectors) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { long[] valuesValues = new long[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getLong(p); } - result.appendLong(Greatest.process(valuesValues)); + result.appendLong(p, Greatest.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java index f99f385ba616b..2dce335fc442d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java @@ -32,9 +32,9 @@ public final class LeastBooleanEvaluator implements EvalOperator.ExpressionEvalu public LeastBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -84,14 +84,14 @@ public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) { } public BooleanVector eval(int positionCount, BooleanVector[] valuesVectors) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { boolean[] valuesValues = new boolean[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getBoolean(p); } - result.appendBoolean(Least.process(valuesValues)); + result.appendBoolean(p, Least.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java index 23d03102eed5e..c701da21de514 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java @@ -33,9 +33,9 @@ public final class LeastBytesRefEvaluator implements EvalOperator.ExpressionEval public LeastBytesRefEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java index 4dae6fe78041d..eb605876045f8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java @@ -32,9 +32,9 @@ public final class LeastDoubleEvaluator implements EvalOperator.ExpressionEvalua public LeastDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -84,14 +84,14 @@ public DoubleBlock eval(int positionCount, DoubleBlock[] valuesBlocks) { } public DoubleVector eval(int positionCount, DoubleVector[] valuesVectors) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { double[] valuesValues = new double[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getDouble(p); } - result.appendDouble(Least.process(valuesValues)); + result.appendDouble(p, Least.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java index a6a1cac60146f..3a69293b66cff 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java @@ -32,9 +32,9 @@ public final class LeastIntEvaluator implements EvalOperator.ExpressionEvaluator public LeastIntEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -84,14 +84,14 @@ public IntBlock eval(int positionCount, IntBlock[] valuesBlocks) { } public IntVector eval(int positionCount, IntVector[] valuesVectors) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { int[] valuesValues = new int[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getInt(p); } - result.appendInt(Least.process(valuesValues)); + result.appendInt(p, Least.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java index e8416095d46b2..00494374236ec 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java @@ -32,9 +32,9 @@ public final class LeastLongEvaluator implements EvalOperator.ExpressionEvaluato public LeastLongEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -84,14 +84,14 @@ public LongBlock eval(int positionCount, LongBlock[] valuesBlocks) { } public LongVector eval(int positionCount, LongVector[] valuesVectors) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { long[] valuesValues = new long[values.length]; position: for (int p = 0; p < positionCount; p++) { // unpack valuesVectors into valuesValues for (int i = 0; i < valuesVectors.length; i++) { valuesValues[i] = valuesVectors[i].getLong(p); } - result.appendLong(Least.process(valuesValues)); + result.appendLong(p, Least.process(valuesValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java index fff0acc5009f2..f4704dc7c7e27 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java @@ -35,10 +35,10 @@ public final class FromBase64Evaluator implements EvalOperator.ExpressionEvaluat public FromBase64Evaluator(Source source, EvalOperator.ExpressionEvaluator field, BytesRefBuilder oScratch, DriverContext driverContext) { - this.warnings = new Warnings(source); this.field = field; this.oScratch = oScratch; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java index 2bf0df5caeecd..eb0c483c7485d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java @@ -36,10 +36,10 @@ public final class ToBase64Evaluator implements EvalOperator.ExpressionEvaluator public ToBase64Evaluator(Source source, EvalOperator.ExpressionEvaluator field, BytesRefBuilder oScratch, DriverContext driverContext) { - this.warnings = new Warnings(source); this.field = field; this.oScratch = oScratch; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java index 6543e1e9bcb29..fe54f8f5f9e12 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java @@ -37,11 +37,11 @@ public final class DateDiffConstantEvaluator implements EvalOperator.ExpressionE public DateDiffConstantEvaluator(Source source, DateDiff.Part datePartFieldUnit, EvalOperator.ExpressionEvaluator startTimestamp, EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { - this.warnings = new Warnings(source); this.datePartFieldUnit = datePartFieldUnit; this.startTimestamp = startTimestamp; this.endTimestamp = endTimestamp; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java index 7d7874c1beb99..dbb13c2d422dd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java @@ -40,11 +40,11 @@ public final class DateDiffEvaluator implements EvalOperator.ExpressionEvaluator public DateDiffEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, EvalOperator.ExpressionEvaluator startTimestamp, EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { - this.warnings = new Warnings(source); this.unit = unit; this.startTimestamp = startTimestamp; this.endTimestamp = endTimestamp; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java index 4f63a5579474e..abff711e5c19a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java @@ -36,11 +36,11 @@ public final class DateExtractConstantEvaluator implements EvalOperator.Expressi public DateExtractConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator value, ChronoField chronoField, ZoneId zone, DriverContext driverContext) { - this.warnings = new Warnings(source); this.value = value; this.chronoField = chronoField; this.zone = zone; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -75,9 +75,9 @@ public LongBlock eval(int positionCount, LongBlock valueBlock) { } public LongVector eval(int positionCount, LongVector valueVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(DateExtract.process(valueVector.getLong(p), chronoField, zone)); + result.appendLong(p, DateExtract.process(valueVector.getLong(p), chronoField, zone)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java index dafe99859f77a..e2c77cd2718c4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java @@ -38,11 +38,11 @@ public final class DateExtractEvaluator implements EvalOperator.ExpressionEvalua public DateExtractEvaluator(Source source, EvalOperator.ExpressionEvaluator value, EvalOperator.ExpressionEvaluator chronoField, ZoneId zone, DriverContext driverContext) { - this.warnings = new Warnings(source); this.value = value; this.chronoField = chronoField; this.zone = zone; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java index 55170fc19d05b..770230e3a5a71 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java @@ -35,10 +35,10 @@ public final class DateFormatConstantEvaluator implements EvalOperator.Expressio public DateFormatConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DateFormatter formatter, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.formatter = formatter; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java index 9b2df06b61ceb..0ac3f5c327169 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java @@ -38,11 +38,11 @@ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluat public DateFormatEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator formatter, Locale locale, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.formatter = formatter; this.locale = locale; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java index 28ef039cf72c6..c08c1a54f90ba 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java @@ -35,10 +35,10 @@ public final class DateParseConstantEvaluator implements EvalOperator.Expression public DateParseConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DateFormatter formatter, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.formatter = formatter; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java index 4f1018b446587..a28a3feb1c9b6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java @@ -37,11 +37,11 @@ public final class DateParseEvaluator implements EvalOperator.ExpressionEvaluato public DateParseEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator formatter, ZoneId zoneId, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.formatter = formatter; this.zoneId = zoneId; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java index 5630f963eac2b..b72203ce0de35 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java @@ -33,10 +33,10 @@ public final class DateTruncEvaluator implements EvalOperator.ExpressionEvaluato public DateTruncEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, Rounding.Prepared rounding, DriverContext driverContext) { - this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.rounding = rounding; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -71,9 +71,9 @@ public LongBlock eval(int positionCount, LongBlock fieldValBlock) { } public LongVector eval(int positionCount, LongVector fieldValVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(DateTrunc.process(fieldValVector.getLong(p), rounding)); + result.appendLong(p, DateTrunc.process(fieldValVector.getLong(p), rounding)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index 51e00309f4450..1894d19d7b082 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -26,9 +26,9 @@ public final class NowEvaluator implements EvalOperator.ExpressionEvaluator { private final DriverContext driverContext; public NowEvaluator(Source source, long now, DriverContext driverContext) { - this.warnings = new Warnings(source); this.now = now; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -37,9 +37,9 @@ public Block eval(Page page) { } public LongVector eval(int positionCount) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Now.process(now)); + result.appendLong(p, Now.process(now)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java index 0f91568f36430..8782e547c3831 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java @@ -37,10 +37,10 @@ public final class CIDRMatchEvaluator implements EvalOperator.ExpressionEvaluato public CIDRMatchEvaluator(Source source, EvalOperator.ExpressionEvaluator ip, EvalOperator.ExpressionEvaluator[] cidrs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.ip = ip; this.cidrs = cidrs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -113,7 +113,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock ipBlock, BytesRefBlock public BooleanVector eval(int positionCount, BytesRefVector ipVector, BytesRefVector[] cidrsVectors) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef ipScratch = new BytesRef(); BytesRef[] cidrsValues = new BytesRef[cidrs.length]; BytesRef[] cidrsScratch = new BytesRef[cidrs.length]; @@ -125,7 +125,7 @@ public BooleanVector eval(int positionCount, BytesRefVector ipVector, for (int i = 0; i < cidrsVectors.length; i++) { cidrsValues[i] = cidrsVectors[i].getBytesRef(p, cidrsScratch[i]); } - result.appendBoolean(CIDRMatch.process(ipVector.getBytesRef(p, ipScratch), cidrsValues)); + result.appendBoolean(p, CIDRMatch.process(ipVector.getBytesRef(p, ipScratch), cidrsValues)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java index 174df48d5ce62..57427f87b76f7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixEvaluator.java @@ -42,12 +42,12 @@ public IpPrefixEvaluator(Source source, EvalOperator.ExpressionEvaluator ip, EvalOperator.ExpressionEvaluator prefixLengthV4, EvalOperator.ExpressionEvaluator prefixLengthV6, BytesRef scratch, DriverContext driverContext) { - this.warnings = new Warnings(source); this.ip = ip; this.prefixLengthV4 = prefixLengthV4; this.prefixLengthV6 = prefixLengthV6; this.scratch = scratch; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java index 917f2efac561d..330ee39d49907 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java @@ -30,9 +30,9 @@ public final class AbsDoubleEvaluator implements EvalOperator.ExpressionEvaluato public AbsDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { - this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock fieldValBlock) { } public DoubleVector eval(int positionCount, DoubleVector fieldValVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Abs.process(fieldValVector.getDouble(p))); + result.appendDouble(p, Abs.process(fieldValVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java index fa8a17266cbca..c453fbd08267c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java @@ -30,9 +30,9 @@ public final class AbsIntEvaluator implements EvalOperator.ExpressionEvaluator { public AbsIntEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { - this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public IntBlock eval(int positionCount, IntBlock fieldValBlock) { } public IntVector eval(int positionCount, IntVector fieldValVector) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendInt(Abs.process(fieldValVector.getInt(p))); + result.appendInt(p, Abs.process(fieldValVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java index 0a8f122176f05..3e75e955b2580 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java @@ -30,9 +30,9 @@ public final class AbsLongEvaluator implements EvalOperator.ExpressionEvaluator public AbsLongEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { - this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public LongBlock eval(int positionCount, LongBlock fieldValBlock) { } public LongVector eval(int positionCount, LongVector fieldValVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Abs.process(fieldValVector.getLong(p))); + result.appendLong(p, Abs.process(fieldValVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java index 1d7d18ff1e437..840483e754b43 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java @@ -31,9 +31,9 @@ public final class AcosEvaluator implements EvalOperator.ExpressionEvaluator { public AcosEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java index c5d116c3bce86..ed78b6fa29733 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java @@ -31,9 +31,9 @@ public final class AsinEvaluator implements EvalOperator.ExpressionEvaluator { public AsinEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java index c71612a98d7eb..0c3bb49333363 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java @@ -32,10 +32,10 @@ public final class Atan2Evaluator implements EvalOperator.ExpressionEvaluator { public Atan2Evaluator(Source source, EvalOperator.ExpressionEvaluator y, EvalOperator.ExpressionEvaluator x, DriverContext driverContext) { - this.warnings = new Warnings(source); this.y = y; this.x = x; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -87,9 +87,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock yBlock, DoubleBlock xBloc } public DoubleVector eval(int positionCount, DoubleVector yVector, DoubleVector xVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Atan2.process(yVector.getDouble(p), xVector.getDouble(p))); + result.appendDouble(p, Atan2.process(yVector.getDouble(p), xVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java index fd6bbfd2b8a30..0902d138620ad 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java @@ -30,9 +30,9 @@ public final class AtanEvaluator implements EvalOperator.ExpressionEvaluator { public AtanEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Atan.process(valVector.getDouble(p))); + result.appendDouble(p, Atan.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java index ccd453db17dfb..12e074f1d5049 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java @@ -32,9 +32,9 @@ public final class CastIntToDoubleEvaluator implements EvalOperator.ExpressionEv public CastIntToDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { - this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -69,9 +69,9 @@ public DoubleBlock eval(int positionCount, IntBlock vBlock) { } public DoubleVector eval(int positionCount, IntVector vVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cast.castIntToDouble(vVector.getInt(p))); + result.appendDouble(p, Cast.castIntToDouble(vVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java index b2f965d9862e5..29e28c305a167 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java @@ -32,9 +32,9 @@ public final class CastIntToLongEvaluator implements EvalOperator.ExpressionEval public CastIntToLongEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { - this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -69,9 +69,9 @@ public LongBlock eval(int positionCount, IntBlock vBlock) { } public LongVector eval(int positionCount, IntVector vVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Cast.castIntToLong(vVector.getInt(p))); + result.appendLong(p, Cast.castIntToLong(vVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java index 723acd3818867..61d19f02c4cb6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java @@ -32,9 +32,9 @@ public final class CastIntToUnsignedLongEvaluator implements EvalOperator.Expres public CastIntToUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { - this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -69,9 +69,9 @@ public LongBlock eval(int positionCount, IntBlock vBlock) { } public LongVector eval(int positionCount, IntVector vVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Cast.castIntToUnsignedLong(vVector.getInt(p))); + result.appendLong(p, Cast.castIntToUnsignedLong(vVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java index 3a18e2f3d6c78..fdfc31b471d8d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java @@ -32,9 +32,9 @@ public final class CastLongToDoubleEvaluator implements EvalOperator.ExpressionE public CastLongToDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { - this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -69,9 +69,9 @@ public DoubleBlock eval(int positionCount, LongBlock vBlock) { } public DoubleVector eval(int positionCount, LongVector vVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cast.castLongToDouble(vVector.getLong(p))); + result.appendDouble(p, Cast.castLongToDouble(vVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java index 4b4b30ff5452d..4198062c2ecf5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java @@ -30,9 +30,9 @@ public final class CastLongToUnsignedLongEvaluator implements EvalOperator.Expre public CastLongToUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { - this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public LongBlock eval(int positionCount, LongBlock vBlock) { } public LongVector eval(int positionCount, LongVector vVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Cast.castLongToUnsignedLong(vVector.getLong(p))); + result.appendLong(p, Cast.castLongToUnsignedLong(vVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java index f5cf77cde426d..3ae66262f9b0b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java @@ -32,9 +32,9 @@ public final class CastUnsignedLongToDoubleEvaluator implements EvalOperator.Exp public CastUnsignedLongToDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { - this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -69,9 +69,9 @@ public DoubleBlock eval(int positionCount, LongBlock vBlock) { } public DoubleVector eval(int positionCount, LongVector vVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cast.castUnsignedLongToDouble(vVector.getLong(p))); + result.appendDouble(p, Cast.castUnsignedLongToDouble(vVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java index adb850f6dc57d..e34ea2a314b1c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtDoubleEvaluator.java @@ -31,9 +31,9 @@ public final class CbrtDoubleEvaluator implements EvalOperator.ExpressionEvaluat public CbrtDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java index 8d1fb71f79a06..fb815f9f01e33 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtIntEvaluator.java @@ -32,9 +32,9 @@ public final class CbrtIntEvaluator implements EvalOperator.ExpressionEvaluator public CbrtIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java index 766fe75bf8288..56a7af30bcfd0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtLongEvaluator.java @@ -32,9 +32,9 @@ public final class CbrtLongEvaluator implements EvalOperator.ExpressionEvaluator public CbrtLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java index b3b0567a058e8..843d8f0d58c3a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtUnsignedLongEvaluator.java @@ -32,9 +32,9 @@ public final class CbrtUnsignedLongEvaluator implements EvalOperator.ExpressionE public CbrtUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -69,9 +69,9 @@ public DoubleBlock eval(int positionCount, LongBlock valBlock) { } public DoubleVector eval(int positionCount, LongVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cbrt.processUnsignedLong(valVector.getLong(p))); + result.appendDouble(p, Cbrt.processUnsignedLong(valVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java index 30882e1c0777e..6ee809c683f73 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -30,9 +30,9 @@ public final class CeilDoubleEvaluator implements EvalOperator.ExpressionEvaluat public CeilDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Ceil.process(valVector.getDouble(p))); + result.appendDouble(p, Ceil.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java index d92c19a9637f0..7d2833dc025dd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java @@ -30,9 +30,9 @@ public final class CosEvaluator implements EvalOperator.ExpressionEvaluator { public CosEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Cos.process(valVector.getDouble(p))); + result.appendDouble(p, Cos.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java index 1c113bd9655ae..211d801b75fd8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java @@ -31,9 +31,9 @@ public final class CoshEvaluator implements EvalOperator.ExpressionEvaluator { public CoshEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java index 867c43951818a..fb3bbb34bf72f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java @@ -30,9 +30,9 @@ public final class FloorDoubleEvaluator implements EvalOperator.ExpressionEvalua public FloorDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Floor.process(valVector.getDouble(p))); + result.appendDouble(p, Floor.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java index 8fa1e7218ac61..0a4d7a3ad6d2c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java @@ -31,9 +31,9 @@ public final class Log10DoubleEvaluator implements EvalOperator.ExpressionEvalua public Log10DoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java index 417074d0a47b2..147e2052af998 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java @@ -32,9 +32,9 @@ public final class Log10IntEvaluator implements EvalOperator.ExpressionEvaluator public Log10IntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java index 1134de7f76b0b..565f286dcc8cd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java @@ -32,9 +32,9 @@ public final class Log10LongEvaluator implements EvalOperator.ExpressionEvaluato public Log10LongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java index 6f5f036e29a62..a900585fb6ef2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java @@ -32,9 +32,9 @@ public final class Log10UnsignedLongEvaluator implements EvalOperator.Expression public Log10UnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java index 82d34296a64d9..3688e989a45d4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java @@ -31,9 +31,9 @@ public final class LogConstantEvaluator implements EvalOperator.ExpressionEvalua public LogConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator value, DriverContext driverContext) { - this.warnings = new Warnings(source); this.value = value; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java index 9ef5cf8d9580c..c2ccd4d64ec81 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java @@ -33,10 +33,10 @@ public final class LogEvaluator implements EvalOperator.ExpressionEvaluator { public LogEvaluator(Source source, EvalOperator.ExpressionEvaluator base, EvalOperator.ExpressionEvaluator value, DriverContext driverContext) { - this.warnings = new Warnings(source); this.base = base; this.value = value; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java index 2721150e49009..7d85b12e50085 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java @@ -33,10 +33,10 @@ public final class PowEvaluator implements EvalOperator.ExpressionEvaluator { public PowEvaluator(Source source, EvalOperator.ExpressionEvaluator base, EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { - this.warnings = new Warnings(source); this.base = base; this.exponent = exponent; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java index 07d165f9b102c..1fe09cdd7079c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java @@ -34,10 +34,10 @@ public final class RoundDoubleEvaluator implements EvalOperator.ExpressionEvalua public RoundDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock, LongBlock decim } public DoubleVector eval(int positionCount, DoubleVector valVector, LongVector decimalsVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Round.process(valVector.getDouble(p), decimalsVector.getLong(p))); + result.appendDouble(p, Round.process(valVector.getDouble(p), decimalsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java index cf7300ce8aea8..c9b3c778139c8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java @@ -30,9 +30,9 @@ public final class RoundDoubleNoDecimalsEvaluator implements EvalOperator.Expres public RoundDoubleNoDecimalsEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Round.process(valVector.getDouble(p))); + result.appendDouble(p, Round.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java index 0ea8111d392c5..75886d8fb5ac6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java @@ -34,10 +34,10 @@ public final class RoundIntEvaluator implements EvalOperator.ExpressionEvaluator public RoundIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public IntBlock eval(int positionCount, IntBlock valBlock, LongBlock decimalsBlo } public IntVector eval(int positionCount, IntVector valVector, LongVector decimalsVector) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendInt(Round.process(valVector.getInt(p), decimalsVector.getLong(p))); + result.appendInt(p, Round.process(valVector.getInt(p), decimalsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java index 9f862a4e7289b..3c37fab209a46 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java @@ -32,10 +32,10 @@ public final class RoundLongEvaluator implements EvalOperator.ExpressionEvaluato public RoundLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -87,9 +87,9 @@ public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsB } public LongVector eval(int positionCount, LongVector valVector, LongVector decimalsVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Round.process(valVector.getLong(p), decimalsVector.getLong(p))); + result.appendLong(p, Round.process(valVector.getLong(p), decimalsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index 3d07675692f8d..2826feeea29b4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -32,10 +32,10 @@ public final class RoundUnsignedLongEvaluator implements EvalOperator.Expression public RoundUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -87,9 +87,9 @@ public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsB } public LongVector eval(int positionCount, LongVector valVector, LongVector decimalsVector) { - try(LongVector.Builder result = driverContext.blockFactory().newLongVectorBuilder(positionCount)) { + try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(Round.processUnsignedLong(valVector.getLong(p), decimalsVector.getLong(p))); + result.appendLong(p, Round.processUnsignedLong(valVector.getLong(p), decimalsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java index 4fbac4ce96392..c1f184afc5889 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java @@ -30,9 +30,9 @@ public final class SignumDoubleEvaluator implements EvalOperator.ExpressionEvalu public SignumDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Signum.process(valVector.getDouble(p))); + result.appendDouble(p, Signum.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java index 122779e1d5e65..68b603cd98a0f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java @@ -32,9 +32,9 @@ public final class SignumIntEvaluator implements EvalOperator.ExpressionEvaluato public SignumIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -69,9 +69,9 @@ public DoubleBlock eval(int positionCount, IntBlock valBlock) { } public DoubleVector eval(int positionCount, IntVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Signum.process(valVector.getInt(p))); + result.appendDouble(p, Signum.process(valVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java index aaae4eb6ba1ca..b66532789a57d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java @@ -32,9 +32,9 @@ public final class SignumLongEvaluator implements EvalOperator.ExpressionEvaluat public SignumLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -69,9 +69,9 @@ public DoubleBlock eval(int positionCount, LongBlock valBlock) { } public DoubleVector eval(int positionCount, LongVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Signum.process(valVector.getLong(p))); + result.appendDouble(p, Signum.process(valVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java index 4070836e89c19..2fa03ed2cf444 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java @@ -32,9 +32,9 @@ public final class SignumUnsignedLongEvaluator implements EvalOperator.Expressio public SignumUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -69,9 +69,9 @@ public DoubleBlock eval(int positionCount, LongBlock valBlock) { } public DoubleVector eval(int positionCount, LongVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Signum.processUnsignedLong(valVector.getLong(p))); + result.appendDouble(p, Signum.processUnsignedLong(valVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java index 7e7484b3f47c3..23df0d539b630 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java @@ -30,9 +30,9 @@ public final class SinEvaluator implements EvalOperator.ExpressionEvaluator { public SinEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Sin.process(valVector.getDouble(p))); + result.appendDouble(p, Sin.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java index 6850b2e151950..1aecf68eec110 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java @@ -31,9 +31,9 @@ public final class SinhEvaluator implements EvalOperator.ExpressionEvaluator { public SinhEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java index 60d735cdf6a2a..cb1c5da5b0cf0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java @@ -31,9 +31,9 @@ public final class SqrtDoubleEvaluator implements EvalOperator.ExpressionEvaluat public SqrtDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java index 0e13726e3a4bc..26fa9f39e7059 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java @@ -32,9 +32,9 @@ public final class SqrtIntEvaluator implements EvalOperator.ExpressionEvaluator public SqrtIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java index 5695b23eba72b..7d306d76cd791 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java @@ -32,9 +32,9 @@ public final class SqrtLongEvaluator implements EvalOperator.ExpressionEvaluator public SqrtLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java index fd60cbb419413..eba1d041e6738 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java @@ -32,9 +32,9 @@ public final class SqrtUnsignedLongEvaluator implements EvalOperator.ExpressionE public SqrtUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -69,9 +69,9 @@ public DoubleBlock eval(int positionCount, LongBlock valBlock) { } public DoubleVector eval(int positionCount, LongVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Sqrt.processUnsignedLong(valVector.getLong(p))); + result.appendDouble(p, Sqrt.processUnsignedLong(valVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java index 78214b1c0d320..de602995cc328 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java @@ -30,9 +30,9 @@ public final class TanEvaluator implements EvalOperator.ExpressionEvaluator { public TanEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Tan.process(valVector.getDouble(p))); + result.appendDouble(p, Tan.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java index 1acd2b47174cf..80a1448820cc3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java @@ -30,9 +30,9 @@ public final class TanhEvaluator implements EvalOperator.ExpressionEvaluator { public TanhEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { } public DoubleVector eval(int positionCount, DoubleVector valVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Tanh.process(valVector.getDouble(p))); + result.appendDouble(p, Tanh.process(valVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java new file mode 100644 index 0000000000000..83e49464fc43a --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBooleanEvaluator.java @@ -0,0 +1,102 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. + * This class is generated. Do not edit it. + */ +public final class MvAppendBooleanEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator field1; + + private final EvalOperator.ExpressionEvaluator field2; + + private final DriverContext driverContext; + + public MvAppendBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator field1, + EvalOperator.ExpressionEvaluator field2, DriverContext driverContext) { + this.field1 = field1; + this.field2 = field2; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (BooleanBlock field1Block = (BooleanBlock) field1.eval(page)) { + try (BooleanBlock field2Block = (BooleanBlock) field2.eval(page)) { + return eval(page.getPositionCount(), field1Block, field2Block); + } + } + } + + public BooleanBlock eval(int positionCount, BooleanBlock field1Block, BooleanBlock field2Block) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + boolean allBlocksAreNulls = true; + if (!field1Block.isNull(p)) { + allBlocksAreNulls = false; + } + if (!field2Block.isNull(p)) { + allBlocksAreNulls = false; + } + if (allBlocksAreNulls) { + result.appendNull(); + continue position; + } + MvAppend.process(result, p, field1Block, field2Block); + } + return result.build(); + } + } + + @Override + public String toString() { + return "MvAppendBooleanEvaluator[" + "field1=" + field1 + ", field2=" + field2 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field1, field2); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field1; + + private final EvalOperator.ExpressionEvaluator.Factory field2; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field1, + EvalOperator.ExpressionEvaluator.Factory field2) { + this.source = source; + this.field1 = field1; + this.field2 = field2; + } + + @Override + public MvAppendBooleanEvaluator get(DriverContext context) { + return new MvAppendBooleanEvaluator(source, field1.get(context), field2.get(context), context); + } + + @Override + public String toString() { + return "MvAppendBooleanEvaluator[" + "field1=" + field1 + ", field2=" + field2 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java new file mode 100644 index 0000000000000..6baea4de982ff --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendBytesRefEvaluator.java @@ -0,0 +1,103 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. + * This class is generated. Do not edit it. + */ +public final class MvAppendBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator field1; + + private final EvalOperator.ExpressionEvaluator field2; + + private final DriverContext driverContext; + + public MvAppendBytesRefEvaluator(Source source, EvalOperator.ExpressionEvaluator field1, + EvalOperator.ExpressionEvaluator field2, DriverContext driverContext) { + this.field1 = field1; + this.field2 = field2; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock field1Block = (BytesRefBlock) field1.eval(page)) { + try (BytesRefBlock field2Block = (BytesRefBlock) field2.eval(page)) { + return eval(page.getPositionCount(), field1Block, field2Block); + } + } + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock field1Block, + BytesRefBlock field2Block) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + boolean allBlocksAreNulls = true; + if (!field1Block.isNull(p)) { + allBlocksAreNulls = false; + } + if (!field2Block.isNull(p)) { + allBlocksAreNulls = false; + } + if (allBlocksAreNulls) { + result.appendNull(); + continue position; + } + MvAppend.process(result, p, field1Block, field2Block); + } + return result.build(); + } + } + + @Override + public String toString() { + return "MvAppendBytesRefEvaluator[" + "field1=" + field1 + ", field2=" + field2 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field1, field2); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field1; + + private final EvalOperator.ExpressionEvaluator.Factory field2; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field1, + EvalOperator.ExpressionEvaluator.Factory field2) { + this.source = source; + this.field1 = field1; + this.field2 = field2; + } + + @Override + public MvAppendBytesRefEvaluator get(DriverContext context) { + return new MvAppendBytesRefEvaluator(source, field1.get(context), field2.get(context), context); + } + + @Override + public String toString() { + return "MvAppendBytesRefEvaluator[" + "field1=" + field1 + ", field2=" + field2 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java new file mode 100644 index 0000000000000..f714fcefac8c8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendDoubleEvaluator.java @@ -0,0 +1,102 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. + * This class is generated. Do not edit it. + */ +public final class MvAppendDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator field1; + + private final EvalOperator.ExpressionEvaluator field2; + + private final DriverContext driverContext; + + public MvAppendDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator field1, + EvalOperator.ExpressionEvaluator field2, DriverContext driverContext) { + this.field1 = field1; + this.field2 = field2; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (DoubleBlock field1Block = (DoubleBlock) field1.eval(page)) { + try (DoubleBlock field2Block = (DoubleBlock) field2.eval(page)) { + return eval(page.getPositionCount(), field1Block, field2Block); + } + } + } + + public DoubleBlock eval(int positionCount, DoubleBlock field1Block, DoubleBlock field2Block) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + boolean allBlocksAreNulls = true; + if (!field1Block.isNull(p)) { + allBlocksAreNulls = false; + } + if (!field2Block.isNull(p)) { + allBlocksAreNulls = false; + } + if (allBlocksAreNulls) { + result.appendNull(); + continue position; + } + MvAppend.process(result, p, field1Block, field2Block); + } + return result.build(); + } + } + + @Override + public String toString() { + return "MvAppendDoubleEvaluator[" + "field1=" + field1 + ", field2=" + field2 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field1, field2); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field1; + + private final EvalOperator.ExpressionEvaluator.Factory field2; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field1, + EvalOperator.ExpressionEvaluator.Factory field2) { + this.source = source; + this.field1 = field1; + this.field2 = field2; + } + + @Override + public MvAppendDoubleEvaluator get(DriverContext context) { + return new MvAppendDoubleEvaluator(source, field1.get(context), field2.get(context), context); + } + + @Override + public String toString() { + return "MvAppendDoubleEvaluator[" + "field1=" + field1 + ", field2=" + field2 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java new file mode 100644 index 0000000000000..1fbbdf81323bb --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendIntEvaluator.java @@ -0,0 +1,102 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. + * This class is generated. Do not edit it. + */ +public final class MvAppendIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator field1; + + private final EvalOperator.ExpressionEvaluator field2; + + private final DriverContext driverContext; + + public MvAppendIntEvaluator(Source source, EvalOperator.ExpressionEvaluator field1, + EvalOperator.ExpressionEvaluator field2, DriverContext driverContext) { + this.field1 = field1; + this.field2 = field2; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (IntBlock field1Block = (IntBlock) field1.eval(page)) { + try (IntBlock field2Block = (IntBlock) field2.eval(page)) { + return eval(page.getPositionCount(), field1Block, field2Block); + } + } + } + + public IntBlock eval(int positionCount, IntBlock field1Block, IntBlock field2Block) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + boolean allBlocksAreNulls = true; + if (!field1Block.isNull(p)) { + allBlocksAreNulls = false; + } + if (!field2Block.isNull(p)) { + allBlocksAreNulls = false; + } + if (allBlocksAreNulls) { + result.appendNull(); + continue position; + } + MvAppend.process(result, p, field1Block, field2Block); + } + return result.build(); + } + } + + @Override + public String toString() { + return "MvAppendIntEvaluator[" + "field1=" + field1 + ", field2=" + field2 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field1, field2); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field1; + + private final EvalOperator.ExpressionEvaluator.Factory field2; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field1, + EvalOperator.ExpressionEvaluator.Factory field2) { + this.source = source; + this.field1 = field1; + this.field2 = field2; + } + + @Override + public MvAppendIntEvaluator get(DriverContext context) { + return new MvAppendIntEvaluator(source, field1.get(context), field2.get(context), context); + } + + @Override + public String toString() { + return "MvAppendIntEvaluator[" + "field1=" + field1 + ", field2=" + field2 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java new file mode 100644 index 0000000000000..14f27ae88964b --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendLongEvaluator.java @@ -0,0 +1,102 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvAppend}. + * This class is generated. Do not edit it. + */ +public final class MvAppendLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator field1; + + private final EvalOperator.ExpressionEvaluator field2; + + private final DriverContext driverContext; + + public MvAppendLongEvaluator(Source source, EvalOperator.ExpressionEvaluator field1, + EvalOperator.ExpressionEvaluator field2, DriverContext driverContext) { + this.field1 = field1; + this.field2 = field2; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (LongBlock field1Block = (LongBlock) field1.eval(page)) { + try (LongBlock field2Block = (LongBlock) field2.eval(page)) { + return eval(page.getPositionCount(), field1Block, field2Block); + } + } + } + + public LongBlock eval(int positionCount, LongBlock field1Block, LongBlock field2Block) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + boolean allBlocksAreNulls = true; + if (!field1Block.isNull(p)) { + allBlocksAreNulls = false; + } + if (!field2Block.isNull(p)) { + allBlocksAreNulls = false; + } + if (allBlocksAreNulls) { + result.appendNull(); + continue position; + } + MvAppend.process(result, p, field1Block, field2Block); + } + return result.build(); + } + } + + @Override + public String toString() { + return "MvAppendLongEvaluator[" + "field1=" + field1 + ", field2=" + field2 + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field1, field2); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field1; + + private final EvalOperator.ExpressionEvaluator.Factory field2; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field1, + EvalOperator.ExpressionEvaluator.Factory field2) { + this.source = source; + this.field1 = field1; + this.field2 = field2; + } + + @Override + public MvAppendLongEvaluator get(DriverContext context) { + return new MvAppendLongEvaluator(source, field1.get(context), field2.get(context), context); + } + + @Override + public String toString() { + return "MvAppendLongEvaluator[" + "field1=" + field1 + ", field2=" + field2 + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java index d63da310b7acd..294cdb4373c86 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBooleanEvaluator.java @@ -36,11 +36,11 @@ public final class MvSliceBooleanEvaluator implements EvalOperator.ExpressionEva public MvSliceBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator field, EvalOperator.ExpressionEvaluator start, EvalOperator.ExpressionEvaluator end, DriverContext driverContext) { - this.warnings = new Warnings(source); this.field = field; this.start = start; this.end = end; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java index f9ad5518e5792..ce17962a59391 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceBytesRefEvaluator.java @@ -36,11 +36,11 @@ public final class MvSliceBytesRefEvaluator implements EvalOperator.ExpressionEv public MvSliceBytesRefEvaluator(Source source, EvalOperator.ExpressionEvaluator field, EvalOperator.ExpressionEvaluator start, EvalOperator.ExpressionEvaluator end, DriverContext driverContext) { - this.warnings = new Warnings(source); this.field = field; this.start = start; this.end = end; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java index 13eed4711c8fc..30cd51f2b23c0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceDoubleEvaluator.java @@ -36,11 +36,11 @@ public final class MvSliceDoubleEvaluator implements EvalOperator.ExpressionEval public MvSliceDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator field, EvalOperator.ExpressionEvaluator start, EvalOperator.ExpressionEvaluator end, DriverContext driverContext) { - this.warnings = new Warnings(source); this.field = field; this.start = start; this.end = end; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java index 114677f070261..cf8e9babc88bd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceIntEvaluator.java @@ -35,11 +35,11 @@ public final class MvSliceIntEvaluator implements EvalOperator.ExpressionEvaluat public MvSliceIntEvaluator(Source source, EvalOperator.ExpressionEvaluator field, EvalOperator.ExpressionEvaluator start, EvalOperator.ExpressionEvaluator end, DriverContext driverContext) { - this.warnings = new Warnings(source); this.field = field; this.start = start; this.end = end; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java index 207d342a1b720..e7e2b7f643db3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceLongEvaluator.java @@ -36,11 +36,11 @@ public final class MvSliceLongEvaluator implements EvalOperator.ExpressionEvalua public MvSliceLongEvaluator(Source source, EvalOperator.ExpressionEvaluator field, EvalOperator.ExpressionEvaluator start, EvalOperator.ExpressionEvaluator end, DriverContext driverContext) { - this.warnings = new Warnings(source); this.field = field; this.start = start; this.end = end; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java index dea92249e7e0e..a61b8f71a04a0 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java @@ -24,7 +24,7 @@ public final class MvSumIntEvaluator extends AbstractMultivalueFunction.Abstract public MvSumIntEvaluator(Source source, EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); - this.warnings = new Warnings(source); + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java index dda5638fbeef5..6d37a1e7780ef 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java @@ -24,7 +24,7 @@ public final class MvSumLongEvaluator extends AbstractMultivalueFunction.Abstrac public MvSumLongEvaluator(Source source, EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); - this.warnings = new Warnings(source); + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java index 83ca390fdd90b..9db3b2a2afde9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java @@ -24,7 +24,7 @@ public final class MvSumUnsignedLongEvaluator extends AbstractMultivalueFunction public MvSumUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { super(driverContext, field); - this.warnings = new Warnings(source); + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java index 37f8804082937..e307400f98696 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipEvaluator.java @@ -35,11 +35,11 @@ public final class MvZipEvaluator implements EvalOperator.ExpressionEvaluator { public MvZipEvaluator(Source source, EvalOperator.ExpressionEvaluator leftField, EvalOperator.ExpressionEvaluator rightField, EvalOperator.ExpressionEvaluator delim, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftField = leftField; this.rightField = rightField; this.delim = delim; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java index 32bc1acf0dcd9..b5a5634bc0fd1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java @@ -35,10 +35,10 @@ public final class SpatialContainsCartesianPointDocValuesAndConstantEvaluator im public SpatialContainsCartesianPointDocValuesAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D[] rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java index 832e8f990720c..1d9da890a1b48 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java @@ -38,10 +38,10 @@ public final class SpatialContainsCartesianPointDocValuesAndSourceEvaluator impl public SpatialContainsCartesianPointDocValuesAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -96,10 +96,10 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, public BooleanVector eval(int positionCount, LongVector leftValueVector, BytesRefVector rightValueVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + result.appendBoolean(p, SpatialContains.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java index a1c8d482ddca4..3e2de0ebd397a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialContainsCartesianSourceAndConstantEvaluator implements public SpatialContainsCartesianSourceAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D[] rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java index 38c1087cc7606..9d19429519a6e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java @@ -36,10 +36,10 @@ public final class SpatialContainsCartesianSourceAndSourceEvaluator implements E public SpatialContainsCartesianSourceAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java index 49c7d1433dae0..f345c135747e7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java @@ -35,10 +35,10 @@ public final class SpatialContainsGeoPointDocValuesAndConstantEvaluator implemen public SpatialContainsGeoPointDocValuesAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D[] rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java index b1fee22fcb204..bd083e382927c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialContainsGeoPointDocValuesAndSourceEvaluator implements public SpatialContainsGeoPointDocValuesAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java index 377ead4fb9387..f2316c17db1ec 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialContainsGeoSourceAndConstantEvaluator implements EvalO public SpatialContainsGeoSourceAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D[] rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java index f906969a03571..7bce1a585f490 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java @@ -36,10 +36,10 @@ public final class SpatialContainsGeoSourceAndSourceEvaluator implements EvalOpe public SpatialContainsGeoSourceAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java index 1265e3eb7f496..3c46f859c80f8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java @@ -35,10 +35,10 @@ public final class SpatialDisjointCartesianPointDocValuesAndConstantEvaluator im public SpatialDisjointCartesianPointDocValuesAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java index 6a6f1a292a598..4630ef9b01b47 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java @@ -38,10 +38,10 @@ public final class SpatialDisjointCartesianPointDocValuesAndSourceEvaluator impl public SpatialDisjointCartesianPointDocValuesAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -96,10 +96,10 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, public BooleanVector eval(int positionCount, LongVector leftValueVector, BytesRefVector rightValueVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + result.appendBoolean(p, SpatialDisjoint.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java index 46dded83d3af2..6e5becc402135 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialDisjointCartesianSourceAndConstantEvaluator implements public SpatialDisjointCartesianSourceAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java index a7cea9330aa00..5c888eddcac68 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java @@ -36,10 +36,10 @@ public final class SpatialDisjointCartesianSourceAndSourceEvaluator implements E public SpatialDisjointCartesianSourceAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java index e52c04c4d72b4..6fa2d7a6dd639 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java @@ -35,10 +35,10 @@ public final class SpatialDisjointGeoPointDocValuesAndConstantEvaluator implemen public SpatialDisjointGeoPointDocValuesAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java index 3e8df2f0b7852..96a2a06dbe241 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialDisjointGeoPointDocValuesAndSourceEvaluator implements public SpatialDisjointGeoPointDocValuesAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java index c061ecf09ced8..c3930168ae594 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialDisjointGeoSourceAndConstantEvaluator implements EvalO public SpatialDisjointGeoSourceAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java index 9d617a39973f2..467d2518d9a9c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java @@ -36,10 +36,10 @@ public final class SpatialDisjointGeoSourceAndSourceEvaluator implements EvalOpe public SpatialDisjointGeoSourceAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java index 775680d2cde42..56912e3233a4c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java @@ -35,10 +35,10 @@ public final class SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator public SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java index 4b6d8168000ae..a38dacc1e04b6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java @@ -38,10 +38,10 @@ public final class SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator im public SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -96,10 +96,10 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, public BooleanVector eval(int positionCount, LongVector leftValueVector, BytesRefVector rightValueVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + result.appendBoolean(p, SpatialIntersects.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java index d7e6507bec3e0..26c4abdc51ecf 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialIntersectsCartesianSourceAndConstantEvaluator implemen public SpatialIntersectsCartesianSourceAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java index f1581fefbf0f7..8ad15d9f5a881 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java @@ -36,10 +36,10 @@ public final class SpatialIntersectsCartesianSourceAndSourceEvaluator implements public SpatialIntersectsCartesianSourceAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java index c1618b407b0e5..405d013a77f5a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java @@ -35,10 +35,10 @@ public final class SpatialIntersectsGeoPointDocValuesAndConstantEvaluator implem public SpatialIntersectsGeoPointDocValuesAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java index 89ba6c79c06d9..f385ee992096c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialIntersectsGeoPointDocValuesAndSourceEvaluator implemen public SpatialIntersectsGeoPointDocValuesAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java index 8fa0f02b783bd..dea6989a830ab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialIntersectsGeoSourceAndConstantEvaluator implements Eva public SpatialIntersectsGeoSourceAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java index a6dc48b761989..2b5dd689ca3a9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java @@ -36,10 +36,10 @@ public final class SpatialIntersectsGeoSourceAndSourceEvaluator implements EvalO public SpatialIntersectsGeoSourceAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java index f17a5183e9a33..5c31df936236b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java @@ -35,10 +35,10 @@ public final class SpatialWithinCartesianPointDocValuesAndConstantEvaluator impl public SpatialWithinCartesianPointDocValuesAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java index 702f599700691..dae80c04b43d3 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java @@ -38,10 +38,10 @@ public final class SpatialWithinCartesianPointDocValuesAndSourceEvaluator implem public SpatialWithinCartesianPointDocValuesAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -96,10 +96,10 @@ public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, public BooleanVector eval(int positionCount, LongVector leftValueVector, BytesRefVector rightValueVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef rightValueScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + result.appendBoolean(p, SpatialWithin.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java index 2e16e18b5677e..1a1bd4cd64535 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialWithinCartesianSourceAndConstantEvaluator implements E public SpatialWithinCartesianSourceAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java index 1714ff54543ec..43440a165de74 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java @@ -36,10 +36,10 @@ public final class SpatialWithinCartesianSourceAndSourceEvaluator implements Eva public SpatialWithinCartesianSourceAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java index 2df0772da62b3..d19182ffb2341 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java @@ -35,10 +35,10 @@ public final class SpatialWithinGeoPointDocValuesAndConstantEvaluator implements public SpatialWithinGeoPointDocValuesAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java index 458532018ffdf..761e0f2019cec 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialWithinGeoPointDocValuesAndSourceEvaluator implements E public SpatialWithinGeoPointDocValuesAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java index f284cc124260b..cca5ef92918d8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java @@ -37,10 +37,10 @@ public final class SpatialWithinGeoSourceAndConstantEvaluator implements EvalOpe public SpatialWithinGeoSourceAndConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java index 2f18cb11d06ed..bbeb07f6eefc5 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java @@ -36,10 +36,10 @@ public final class SpatialWithinGeoSourceAndSourceEvaluator implements EvalOpera public SpatialWithinGeoSourceAndSourceEvaluator(Source source, EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, DriverContext driverContext) { - this.warnings = new Warnings(source); this.leftValue = leftValue; this.rightValue = rightValue; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java index 223f607a8b23c..21491b4272ea1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java @@ -38,11 +38,11 @@ public final class AutomataMatchEvaluator implements EvalOperator.ExpressionEval public AutomataMatchEvaluator(Source source, EvalOperator.ExpressionEvaluator input, ByteRunAutomaton automaton, String pattern, DriverContext driverContext) { - this.warnings = new Warnings(source); this.input = input; this.automaton = automaton; this.pattern = pattern; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -78,10 +78,10 @@ public BooleanBlock eval(int positionCount, BytesRefBlock inputBlock) { } public BooleanVector eval(int positionCount, BytesRefVector inputVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef inputScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(AutomataMatch.process(inputVector.getBytesRef(p, inputScratch), automaton, pattern)); + result.appendBoolean(p, AutomataMatch.process(inputVector.getBytesRef(p, inputScratch), automaton, pattern)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java index 0b539a5fdaf7e..e73cc58590fc1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java @@ -37,10 +37,10 @@ public final class ConcatEvaluator implements EvalOperator.ExpressionEvaluator { public ConcatEvaluator(Source source, BreakingBytesRefBuilder scratch, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { - this.warnings = new Warnings(source); this.scratch = scratch; this.values = values; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java index 7c00bd2e27cce..e075cdcff8827 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java @@ -35,10 +35,10 @@ public final class EndsWithEvaluator implements EvalOperator.ExpressionEvaluator public EndsWithEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator suffix, DriverContext driverContext) { - this.warnings = new Warnings(source); this.str = str; this.suffix = suffix; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -93,11 +93,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBloc public BooleanVector eval(int positionCount, BytesRefVector strVector, BytesRefVector suffixVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); BytesRef suffixScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(EndsWith.process(strVector.getBytesRef(p, strScratch), suffixVector.getBytesRef(p, suffixScratch))); + result.appendBoolean(p, EndsWith.process(strVector.getBytesRef(p, strScratch), suffixVector.getBytesRef(p, suffixScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java index f04e56d0b87f3..05c7c425ed9ab 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java @@ -31,9 +31,9 @@ public final class LTrimEvaluator implements EvalOperator.ExpressionEvaluator { public LTrimEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java index 6c0e392cf6120..8effe5e6d72cd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -42,12 +42,12 @@ public final class LeftEvaluator implements EvalOperator.ExpressionEvaluator { public LeftEvaluator(Source source, BytesRef out, UnicodeUtil.UTF8CodePoint cp, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator length, DriverContext driverContext) { - this.warnings = new Warnings(source); this.out = out; this.cp = cp; this.str = str; this.length = length; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java index 25877cfc3d5be..5d71fa5a4d70e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java @@ -33,9 +33,9 @@ public final class LengthEvaluator implements EvalOperator.ExpressionEvaluator { public LengthEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -71,10 +71,10 @@ public IntBlock eval(int positionCount, BytesRefBlock valBlock) { } public IntVector eval(int positionCount, BytesRefVector valVector) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendInt(Length.process(valVector.getBytesRef(p, valScratch))); + result.appendInt(p, Length.process(valVector.getBytesRef(p, valScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java index e62d32dca1ef5..17430f8fc572f 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java @@ -38,11 +38,11 @@ public final class LocateEvaluator implements EvalOperator.ExpressionEvaluator { public LocateEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator substr, EvalOperator.ExpressionEvaluator start, DriverContext driverContext) { - this.warnings = new Warnings(source); this.str = str; this.substr = substr; this.start = start; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -115,11 +115,11 @@ public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock su public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector, IntVector startVector) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); BytesRef substrScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch), startVector.getInt(p))); + result.appendInt(p, Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch), startVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java index 4fcffcb88412e..9f206426a348e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java @@ -35,10 +35,10 @@ public final class LocateNoStartEvaluator implements EvalOperator.ExpressionEval public LocateNoStartEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator substr, DriverContext driverContext) { - this.warnings = new Warnings(source); this.str = str; this.substr = substr; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -92,11 +92,11 @@ public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock su } public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector) { - try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); BytesRef substrScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch))); + result.appendInt(p, Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java index 8cbd454895a67..a6dcdb25f2dfc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java @@ -31,9 +31,9 @@ public final class RTrimEvaluator implements EvalOperator.ExpressionEvaluator { public RTrimEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java new file mode 100644 index 0000000000000..e83c7c7720828 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatConstantEvaluator.java @@ -0,0 +1,136 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.util.function.Function; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Repeat}. + * This class is generated. Do not edit it. + */ +public final class RepeatConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final BreakingBytesRefBuilder scratch; + + private final EvalOperator.ExpressionEvaluator str; + + private final int number; + + private final DriverContext driverContext; + + public RepeatConstantEvaluator(Source source, BreakingBytesRefBuilder scratch, + EvalOperator.ExpressionEvaluator str, int number, DriverContext driverContext) { + this.scratch = scratch; + this.str = str; + this.number = number; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) { + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return eval(page.getPositionCount(), strBlock); + } + return eval(page.getPositionCount(), strVector); + } + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBytesRef(Repeat.processConstantNumber(scratch, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), number)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BytesRefBlock eval(int positionCount, BytesRefVector strVector) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBytesRef(Repeat.processConstantNumber(scratch, strVector.getBytesRef(p, strScratch), number)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "RepeatConstantEvaluator[" + "str=" + str + ", number=" + number + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(scratch, str); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final Function scratch; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final int number; + + public Factory(Source source, Function scratch, + EvalOperator.ExpressionEvaluator.Factory str, int number) { + this.source = source; + this.scratch = scratch; + this.str = str; + this.number = number; + } + + @Override + public RepeatConstantEvaluator get(DriverContext context) { + return new RepeatConstantEvaluator(source, scratch.apply(context), str.get(context), number, context); + } + + @Override + public String toString() { + return "RepeatConstantEvaluator[" + "str=" + str + ", number=" + number + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java new file mode 100644 index 0000000000000..3723a35283c4b --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatEvaluator.java @@ -0,0 +1,157 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.util.function.Function; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Warnings; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Repeat}. + * This class is generated. Do not edit it. + */ +public final class RepeatEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final BreakingBytesRefBuilder scratch; + + private final EvalOperator.ExpressionEvaluator str; + + private final EvalOperator.ExpressionEvaluator number; + + private final DriverContext driverContext; + + public RepeatEvaluator(Source source, BreakingBytesRefBuilder scratch, + EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator number, + DriverContext driverContext) { + this.scratch = scratch; + this.str = str; + this.number = number; + this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) { + try (IntBlock numberBlock = (IntBlock) number.eval(page)) { + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return eval(page.getPositionCount(), strBlock, numberBlock); + } + IntVector numberVector = numberBlock.asVector(); + if (numberVector == null) { + return eval(page.getPositionCount(), strBlock, numberBlock); + } + return eval(page.getPositionCount(), strVector, numberVector); + } + } + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock numberBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (numberBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (numberBlock.getValueCount(p) != 1) { + if (numberBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBytesRef(Repeat.process(scratch, strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), numberBlock.getInt(numberBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BytesRefBlock eval(int positionCount, BytesRefVector strVector, IntVector numberVector) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBytesRef(Repeat.process(scratch, strVector.getBytesRef(p, strScratch), numberVector.getInt(p))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "RepeatEvaluator[" + "str=" + str + ", number=" + number + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(scratch, str, number); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final Function scratch; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory number; + + public Factory(Source source, Function scratch, + EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory number) { + this.source = source; + this.scratch = scratch; + this.str = str; + this.number = number; + } + + @Override + public RepeatEvaluator get(DriverContext context) { + return new RepeatEvaluator(source, scratch.apply(context), str.get(context), number.get(context), context); + } + + @Override + public String toString() { + return "RepeatEvaluator[" + "str=" + str + ", number=" + number + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java index 09a5429f351bc..2b898377f59f6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java @@ -37,11 +37,11 @@ public final class ReplaceConstantEvaluator implements EvalOperator.ExpressionEv public ReplaceConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator str, Pattern regex, EvalOperator.ExpressionEvaluator newStr, DriverContext driverContext) { - this.warnings = new Warnings(source); this.str = str; this.regex = regex; this.newStr = newStr; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java index 782a0323bc82a..a6544f0b16817 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java @@ -37,11 +37,11 @@ public final class ReplaceEvaluator implements EvalOperator.ExpressionEvaluator public ReplaceEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator regex, EvalOperator.ExpressionEvaluator newStr, DriverContext driverContext) { - this.warnings = new Warnings(source); this.str = str; this.regex = regex; this.newStr = newStr; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java index f8d3a8dae348f..57cad6c63242d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java @@ -42,12 +42,12 @@ public final class RightEvaluator implements EvalOperator.ExpressionEvaluator { public RightEvaluator(Source source, BytesRef out, UnicodeUtil.UTF8CodePoint cp, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator length, DriverContext driverContext) { - this.warnings = new Warnings(source); this.out = out; this.cp = cp; this.str = str; this.length = length; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java index 89138d2f54f64..d58b1aee0ee9d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java @@ -36,11 +36,11 @@ public final class SplitSingleByteEvaluator implements EvalOperator.ExpressionEv public SplitSingleByteEvaluator(Source source, EvalOperator.ExpressionEvaluator str, byte delim, BytesRef scratch, DriverContext driverContext) { - this.warnings = new Warnings(source); this.str = str; this.delim = delim; this.scratch = scratch; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java index be072452f9e88..753febd88ca58 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java @@ -36,11 +36,11 @@ public final class SplitVariableEvaluator implements EvalOperator.ExpressionEval public SplitVariableEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator delim, BytesRef scratch, DriverContext driverContext) { - this.warnings = new Warnings(source); this.str = str; this.delim = delim; this.scratch = scratch; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java index 83527c4b1aa24..a932e449f650a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java @@ -35,10 +35,10 @@ public final class StartsWithEvaluator implements EvalOperator.ExpressionEvaluat public StartsWithEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator prefix, DriverContext driverContext) { - this.warnings = new Warnings(source); this.str = str; this.prefix = prefix; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -93,11 +93,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBloc public BooleanVector eval(int positionCount, BytesRefVector strVector, BytesRefVector prefixVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); BytesRef prefixScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(StartsWith.process(strVector.getBytesRef(p, strScratch), prefixVector.getBytesRef(p, prefixScratch))); + result.appendBoolean(p, StartsWith.process(strVector.getBytesRef(p, strScratch), prefixVector.getBytesRef(p, prefixScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java index 98fc2510a1f89..92a2d622eaf56 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java @@ -38,11 +38,11 @@ public final class SubstringEvaluator implements EvalOperator.ExpressionEvaluato public SubstringEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator start, EvalOperator.ExpressionEvaluator length, DriverContext driverContext) { - this.warnings = new Warnings(source); this.str = str; this.start = start; this.length = length; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java index 442ffd517d43c..9177b31ab64fd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java @@ -35,10 +35,10 @@ public final class SubstringNoLengthEvaluator implements EvalOperator.Expression public SubstringNoLengthEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator start, DriverContext driverContext) { - this.warnings = new Warnings(source); this.str = str; this.start = start; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java index ac032e60a6b4b..ee30b2b282162 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerEvaluator.java @@ -34,10 +34,10 @@ public final class ToLowerEvaluator implements EvalOperator.ExpressionEvaluator public ToLowerEvaluator(Source source, EvalOperator.ExpressionEvaluator val, Locale locale, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.locale = locale; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java index e79aa71a0e9d2..cf72804b7e354 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperEvaluator.java @@ -34,10 +34,10 @@ public final class ToUpperEvaluator implements EvalOperator.ExpressionEvaluator public ToUpperEvaluator(Source source, EvalOperator.ExpressionEvaluator val, Locale locale, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.locale = locale; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java index fd357506c91a0..d4a884fc97b9c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java @@ -31,9 +31,9 @@ public final class TrimEvaluator implements EvalOperator.ExpressionEvaluator { public TrimEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { - this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java index a48c658ee825b..44ed1ebebd817 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java @@ -35,10 +35,10 @@ public final class AddDatetimesEvaluator implements EvalOperator.ExpressionEvalu public AddDatetimesEvaluator(Source source, EvalOperator.ExpressionEvaluator datetime, TemporalAmount temporalAmount, DriverContext driverContext) { - this.warnings = new Warnings(source); this.datetime = datetime; this.temporalAmount = temporalAmount; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index a9b2ebef30125..fbf25c5fec393 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -32,10 +32,10 @@ public final class AddDoublesEvaluator implements EvalOperator.ExpressionEvaluat public AddDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -87,9 +87,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhs } public DoubleVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Add.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendDouble(p, Add.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java index aeba6794fe9dd..570b666676330 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java @@ -33,10 +33,10 @@ public final class AddIntsEvaluator implements EvalOperator.ExpressionEvaluator public AddIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java index 8f5b399e39b77..71dda4fd9752e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java @@ -33,10 +33,10 @@ public final class AddLongsEvaluator implements EvalOperator.ExpressionEvaluator public AddLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java index 36096e878ea82..16db58fe5bd6a 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java @@ -33,10 +33,10 @@ public final class AddUnsignedLongsEvaluator implements EvalOperator.ExpressionE public AddUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java index f158cd20fc87c..20d1647d6bc99 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java @@ -33,10 +33,10 @@ public final class DivDoublesEvaluator implements EvalOperator.ExpressionEvaluat public DivDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java index db4c22491be31..a1b4f3857ad0d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java @@ -33,10 +33,10 @@ public final class DivIntsEvaluator implements EvalOperator.ExpressionEvaluator public DivIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java index 359f549d137ee..902d7d2f0b98c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java @@ -33,10 +33,10 @@ public final class DivLongsEvaluator implements EvalOperator.ExpressionEvaluator public DivLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java index bf3d579788dc4..65ba269840121 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java @@ -33,10 +33,10 @@ public final class DivUnsignedLongsEvaluator implements EvalOperator.ExpressionE public DivUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java index 659e3b2c3c89f..0f1b344e6add7 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java @@ -33,10 +33,10 @@ public final class ModDoublesEvaluator implements EvalOperator.ExpressionEvaluat public ModDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java index 97916c7c0b1ff..014bc32e0e250 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java @@ -33,10 +33,10 @@ public final class ModIntsEvaluator implements EvalOperator.ExpressionEvaluator public ModIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java index 89d598b5b74c4..3436c10521b64 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java @@ -33,10 +33,10 @@ public final class ModLongsEvaluator implements EvalOperator.ExpressionEvaluator public ModLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java index 11764071bb034..b031a21e79f73 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java @@ -33,10 +33,10 @@ public final class ModUnsignedLongsEvaluator implements EvalOperator.ExpressionE public ModUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index 6a035b343d12f..9f3d5aa6d8b10 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -32,10 +32,10 @@ public final class MulDoublesEvaluator implements EvalOperator.ExpressionEvaluat public MulDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -87,9 +87,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhs } public DoubleVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Mul.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendDouble(p, Mul.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java index 5c8b11fdd24b6..089765b1662c4 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java @@ -33,10 +33,10 @@ public final class MulIntsEvaluator implements EvalOperator.ExpressionEvaluator public MulIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java index 87d547fc0e593..2cbc69a7face1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java @@ -33,10 +33,10 @@ public final class MulLongsEvaluator implements EvalOperator.ExpressionEvaluator public MulLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java index 8b85459fe49c8..3a74466a9bc45 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java @@ -33,10 +33,10 @@ public final class MulUnsignedLongsEvaluator implements EvalOperator.ExpressionE public MulUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java index 2b996c7174ceb..24f04a23ebb4d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java @@ -30,9 +30,9 @@ public final class NegDoublesEvaluator implements EvalOperator.ExpressionEvaluat public NegDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { - this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -67,9 +67,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock vBlock) { } public DoubleVector eval(int positionCount, DoubleVector vVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Neg.processDoubles(vVector.getDouble(p))); + result.appendDouble(p, Neg.processDoubles(vVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java index e3b8552b81ee6..0b27ba7f46153 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java @@ -31,9 +31,9 @@ public final class NegIntsEvaluator implements EvalOperator.ExpressionEvaluator public NegIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { - this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java index b269d4c8c3fd9..e6c2ccb469bb8 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java @@ -31,9 +31,9 @@ public final class NegLongsEvaluator implements EvalOperator.ExpressionEvaluator public NegLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { - this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java index ab162ea120004..f87f3c217e16e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java @@ -35,10 +35,10 @@ public final class SubDatetimesEvaluator implements EvalOperator.ExpressionEvalu public SubDatetimesEvaluator(Source source, EvalOperator.ExpressionEvaluator datetime, TemporalAmount temporalAmount, DriverContext driverContext) { - this.warnings = new Warnings(source); this.datetime = datetime; this.temporalAmount = temporalAmount; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index ff1e3d584772b..291cb5648e213 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -32,10 +32,10 @@ public final class SubDoublesEvaluator implements EvalOperator.ExpressionEvaluat public SubDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -87,9 +87,9 @@ public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhs } public DoubleVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendDouble(Sub.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendDouble(p, Sub.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java index cc47c615e1862..ec572bd491ec9 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java @@ -33,10 +33,10 @@ public final class SubIntsEvaluator implements EvalOperator.ExpressionEvaluator public SubIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java index de3e0dd0dcbad..eaa1efeb3922d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java @@ -33,10 +33,10 @@ public final class SubLongsEvaluator implements EvalOperator.ExpressionEvaluator public SubLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java index 7ab72956562de..f221e9b072351 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java @@ -33,10 +33,10 @@ public final class SubUnsignedLongsEvaluator implements EvalOperator.ExpressionE public SubUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java index 185e225fadb91..e39a9482215fa 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsBoolsEvaluator.java @@ -32,10 +32,10 @@ public final class EqualsBoolsEvaluator implements EvalOperator.ExpressionEvalua public EqualsBoolsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -87,9 +87,9 @@ public BooleanBlock eval(int positionCount, BooleanBlock lhsBlock, BooleanBlock } public BooleanVector eval(int positionCount, BooleanVector lhsVector, BooleanVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processBools(lhsVector.getBoolean(p), rhsVector.getBoolean(p))); + result.appendBoolean(p, Equals.processBools(lhsVector.getBoolean(p), rhsVector.getBoolean(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java index 9e0c62d118eca..cdf84e0506216 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsDoublesEvaluator.java @@ -34,10 +34,10 @@ public final class EqualsDoublesEvaluator implements EvalOperator.ExpressionEval public EqualsDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rh } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, Equals.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java index 192a5272789b3..ad942e63c6f44 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsGeometriesEvaluator.java @@ -35,10 +35,10 @@ public final class EqualsGeometriesEvaluator implements EvalOperator.ExpressionE public EqualsGeometriesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -92,11 +92,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processGeometries(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, Equals.processGeometries(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java index 5728b9be40b4a..d60efd0eddedc 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java @@ -34,10 +34,10 @@ public final class EqualsIntsEvaluator implements EvalOperator.ExpressionEvaluat public EqualsIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, Equals.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java index 2d3b679960d15..e28dcaeba31dd 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java @@ -35,10 +35,10 @@ public final class EqualsKeywordsEvaluator implements EvalOperator.ExpressionEva public EqualsKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -92,11 +92,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, Equals.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java index fa4da286cde21..504422e59071d 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsLongsEvaluator.java @@ -34,10 +34,10 @@ public final class EqualsLongsEvaluator implements EvalOperator.ExpressionEvalua public EqualsLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlo } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(Equals.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, Equals.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java index e40ce0df22a14..c1e0fcd09f175 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanDoublesEvaluator.java @@ -34,10 +34,10 @@ public final class GreaterThanDoublesEvaluator implements EvalOperator.Expressio public GreaterThanDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rh } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThan.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, GreaterThan.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java index e892430791344..721310c8a7518 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java @@ -34,10 +34,10 @@ public final class GreaterThanIntsEvaluator implements EvalOperator.ExpressionEv public GreaterThanIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThan.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, GreaterThan.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java index 82f8f62097545..1edb13c789a95 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java @@ -35,10 +35,10 @@ public final class GreaterThanKeywordsEvaluator implements EvalOperator.Expressi public GreaterThanKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -92,11 +92,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThan.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, GreaterThan.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java index 0db53292de9bb..79bc2b646b2f1 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanLongsEvaluator.java @@ -34,10 +34,10 @@ public final class GreaterThanLongsEvaluator implements EvalOperator.ExpressionE public GreaterThanLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlo } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThan.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, GreaterThan.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java index 2dabfdb8c0e46..9b39defdf7442 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java @@ -34,10 +34,10 @@ public final class GreaterThanOrEqualDoublesEvaluator implements EvalOperator.Ex public GreaterThanOrEqualDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rh } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThanOrEqual.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, GreaterThanOrEqual.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java index 12674aee9d469..c6aa1e89c1998 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java @@ -34,10 +34,10 @@ public final class GreaterThanOrEqualIntsEvaluator implements EvalOperator.Expre public GreaterThanOrEqualIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThanOrEqual.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, GreaterThanOrEqual.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java index 433a88cd5f5b8..baddf53e4d74b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java @@ -35,10 +35,10 @@ public final class GreaterThanOrEqualKeywordsEvaluator implements EvalOperator.E public GreaterThanOrEqualKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -92,11 +92,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThanOrEqual.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, GreaterThanOrEqual.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java index f7b040cbcde58..d2291d1752637 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java @@ -34,10 +34,10 @@ public final class GreaterThanOrEqualLongsEvaluator implements EvalOperator.Expr public GreaterThanOrEqualLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlo } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(GreaterThanOrEqual.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, GreaterThanOrEqual.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java index e8f02f232bbf7..4a1737f01a245 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsConstantEvaluator.java @@ -36,10 +36,10 @@ public final class InsensitiveEqualsConstantEvaluator implements EvalOperator.Ex public InsensitiveEqualsConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, ByteRunAutomaton rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -75,10 +75,10 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock) { } public BooleanVector eval(int positionCount, BytesRefVector lhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(InsensitiveEquals.processConstant(lhsVector.getBytesRef(p, lhsScratch), rhs)); + result.appendBoolean(p, InsensitiveEquals.processConstant(lhsVector.getBytesRef(p, lhsScratch), rhs)); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java index a3b5a743daab8..9dc408311b154 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsEvaluator.java @@ -35,10 +35,10 @@ public final class InsensitiveEqualsEvaluator implements EvalOperator.Expression public InsensitiveEqualsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -92,11 +92,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(InsensitiveEquals.process(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, InsensitiveEquals.process(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java index 4ad14555619f7..922c95b2bb550 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java @@ -34,10 +34,10 @@ public final class LessThanDoublesEvaluator implements EvalOperator.ExpressionEv public LessThanDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rh } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThan.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, LessThan.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java index f54a4dea0a1dc..f8d7b716b337e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanIntsEvaluator.java @@ -34,10 +34,10 @@ public final class LessThanIntsEvaluator implements EvalOperator.ExpressionEvalu public LessThanIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThan.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, LessThan.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java index 0085bd7f37eda..af31709cc9576 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanKeywordsEvaluator.java @@ -35,10 +35,10 @@ public final class LessThanKeywordsEvaluator implements EvalOperator.ExpressionE public LessThanKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -92,11 +92,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThan.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, LessThan.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java index bf0241d05c500..8911398202ceb 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanLongsEvaluator.java @@ -34,10 +34,10 @@ public final class LessThanLongsEvaluator implements EvalOperator.ExpressionEval public LessThanLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlo } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThan.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, LessThan.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java index 4ff3a3fb65b14..ea2097bead16c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java @@ -34,10 +34,10 @@ public final class LessThanOrEqualDoublesEvaluator implements EvalOperator.Expre public LessThanOrEqualDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rh } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThanOrEqual.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, LessThanOrEqual.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java index 00afea476db1d..01a46e011d344 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java @@ -34,10 +34,10 @@ public final class LessThanOrEqualIntsEvaluator implements EvalOperator.Expressi public LessThanOrEqualIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThanOrEqual.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, LessThanOrEqual.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java index 460b0ed07d6da..d30033733130c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java @@ -35,10 +35,10 @@ public final class LessThanOrEqualKeywordsEvaluator implements EvalOperator.Expr public LessThanOrEqualKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -92,11 +92,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThanOrEqual.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, LessThanOrEqual.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java index 5a293be95484d..3c1a03006a843 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java @@ -34,10 +34,10 @@ public final class LessThanOrEqualLongsEvaluator implements EvalOperator.Express public LessThanOrEqualLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlo } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(LessThanOrEqual.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, LessThanOrEqual.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java index 29af62dbbc8ce..0dc80fdbcf16b 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsBoolsEvaluator.java @@ -32,10 +32,10 @@ public final class NotEqualsBoolsEvaluator implements EvalOperator.ExpressionEva public NotEqualsBoolsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -87,9 +87,9 @@ public BooleanBlock eval(int positionCount, BooleanBlock lhsBlock, BooleanBlock } public BooleanVector eval(int positionCount, BooleanVector lhsVector, BooleanVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processBools(lhsVector.getBoolean(p), rhsVector.getBoolean(p))); + result.appendBoolean(p, NotEquals.processBools(lhsVector.getBoolean(p), rhsVector.getBoolean(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java index d24ff131d21e0..f439ec0e94d9c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsDoublesEvaluator.java @@ -34,10 +34,10 @@ public final class NotEqualsDoublesEvaluator implements EvalOperator.ExpressionE public NotEqualsDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rh } public BooleanVector eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); + result.appendBoolean(p, NotEquals.processDoubles(lhsVector.getDouble(p), rhsVector.getDouble(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java index abc0323722e23..7553a5667f4a2 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsGeometriesEvaluator.java @@ -35,10 +35,10 @@ public final class NotEqualsGeometriesEvaluator implements EvalOperator.Expressi public NotEqualsGeometriesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -92,11 +92,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processGeometries(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, NotEquals.processGeometries(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java index 23b49df95aa66..19098d89be46e 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsIntsEvaluator.java @@ -34,10 +34,10 @@ public final class NotEqualsIntsEvaluator implements EvalOperator.ExpressionEval public NotEqualsIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); + result.appendBoolean(p, NotEquals.processInts(lhsVector.getInt(p), rhsVector.getInt(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java index e4980ffeac829..1246745404756 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java @@ -35,10 +35,10 @@ public final class NotEqualsKeywordsEvaluator implements EvalOperator.Expression public NotEqualsKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -92,11 +92,11 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc } public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); + result.appendBoolean(p, NotEquals.processKeywords(lhsVector.getBytesRef(p, lhsScratch), rhsVector.getBytesRef(p, rhsScratch))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java index e6dc060bf837d..25f95af9266ef 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsLongsEvaluator.java @@ -34,10 +34,10 @@ public final class NotEqualsLongsEvaluator implements EvalOperator.ExpressionEva public NotEqualsLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { - this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; + this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -89,9 +89,9 @@ public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlo } public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) { - try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(NotEquals.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); + result.appendBoolean(p, NotEquals.processLongs(lhsVector.getLong(p), rhsVector.getLong(p))); } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/Column.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/Column.java index 1495c90dc67bf..a19dafba1559b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/Column.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/Column.java @@ -14,14 +14,19 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import java.io.IOException; /** - * A column of data provided in the request. + * A "column" from a {@code table} provided in the request. */ public record Column(DataType type, Block values) implements Releasable, Writeable { + public Column { + assert PlannerUtils.toElementType(type) == values.elementType(); + } + public Column(BlockStreamInput in) throws IOException { this(EsqlDataTypes.fromTypeName(in.readString()), in.readNamedWriteable(Block.class)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 675b99c61bfbe..75600e64e3e90 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -27,6 +27,11 @@ public class EsqlCapabilities { */ private static final String FN_CBRT = "fn_cbrt"; + /** + * Support for {@code MV_APPEND} function. #107001 + */ + private static final String FN_MV_APPEND = "fn_mv_append"; + /** * Support for function {@code IP_PREFIX}. */ @@ -48,12 +53,22 @@ public class EsqlCapabilities { private static final String METADATA_IGNORED_FIELD = "metadata_field_ignored"; /** - * Support for requesting the "LOOKUP" command. + * Support for the "LOOKUP" command. */ - private static final String LOOKUP = "lookup"; + private static final String LOOKUP_COMMAND = "lookup_command"; + + /** + * Support for requesting the "REPEAT" command. + */ + private static final String REPEAT = "repeat"; public static final Set CAPABILITIES = capabilities(); + /** + * Cast string literals to datetime in addition and subtraction when the other side is a date or time interval. + */ + public static final String STRING_LITERAL_AUTO_CASTING_TO_DATETIME_ADD_SUB = "string_literal_auto_casting_to_datetime_add_sub"; + private static Set capabilities() { List caps = new ArrayList<>(); caps.add(FN_CBRT); @@ -61,9 +76,11 @@ private static Set capabilities() { caps.add(FN_SUBSTRING_EMPTY_NULL); caps.add(ST_CENTROID_AGG_OPTIMIZED); caps.add(METADATA_IGNORED_FIELD); + caps.add(FN_MV_APPEND); + caps.add(REPEAT); if (Build.current().isSnapshot()) { - caps.add(LOOKUP); + caps.add(LOOKUP_COMMAND); } /* @@ -75,6 +92,7 @@ private static Set capabilities() { for (NodeFeature feature : new EsqlFeatures().getHistoricalFeatures().keySet()) { caps.add(cap(feature)); } + caps.add(STRING_LITERAL_AUTO_CASTING_TO_DATETIME_ADD_SUB); return Set.copyOf(caps); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ParseTables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ParseTables.java index 1c607e6446f4c..97728196a0ced 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ParseTables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ParseTables.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.core.Releasables; @@ -21,18 +22,17 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.io.IOException; +import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; -import java.util.TreeMap; /** * Parses the {@code tables} request body parameter. */ public class ParseTables { - public static final Set SUPPORTED_TYPES = Set.of(DataTypes.INTEGER, DataTypes.KEYWORD, DataTypes.LONG); + public static final Set SUPPORTED_TYPES = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.LONG); private static final int MAX_LENGTH = (int) ByteSizeValue.ofMb(1).getBytes(); private final BlockFactory blockFactory; @@ -70,7 +70,7 @@ void parseTables() throws IOException { * so we can be sure we'll always have a type. */ private Map parseTable() throws IOException { - Map columns = new TreeMap<>(); + Map columns = new LinkedHashMap<>(); boolean success = false; try { if (p.currentToken() != XContentParser.Token.START_OBJECT) { @@ -113,6 +113,7 @@ private Column parseColumn(String type) throws IOException { case "integer" -> parseIntColumn(); case "keyword" -> parseKeywordColumn(); case "long" -> parseLongColumn(); + case "double" -> parseDoubleColumn(); default -> throw new XContentParseException(p.getTokenLocation(), "unsupported type [" + type + "]"); }; } @@ -127,7 +128,7 @@ private Column parseKeywordColumn() throws IOException { while (true) { switch (p.nextToken()) { case END_ARRAY -> { - return new Column(DataTypes.KEYWORD, builder.build()); + return new Column(DataType.KEYWORD, builder.build()); } case START_ARRAY -> parseTextArray(builder, scratch); case VALUE_NULL -> builder.appendNull(); @@ -172,7 +173,7 @@ private Column parseIntColumn() throws IOException { while (true) { switch (p.nextToken()) { case END_ARRAY -> { - return new Column(DataTypes.INTEGER, builder.build()); + return new Column(DataType.INTEGER, builder.build()); } case START_ARRAY -> parseIntArray(builder); case VALUE_NULL -> builder.appendNull(); @@ -214,7 +215,7 @@ private Column parseLongColumn() throws IOException { while (true) { switch (p.nextToken()) { case END_ARRAY -> { - return new Column(DataTypes.LONG, builder.build()); + return new Column(DataType.LONG, builder.build()); } case START_ARRAY -> parseLongArray(builder); case VALUE_NULL -> builder.appendNull(); @@ -246,4 +247,46 @@ private void appendLong(LongBlock.Builder builder) throws IOException { } builder.appendLong(p.longValue()); } + + private Column parseDoubleColumn() throws IOException { + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(100)) { // TODO 100?! + XContentParser.Token token = p.nextToken(); + if (token != XContentParser.Token.START_ARRAY) { + throw new XContentParseException(p.getTokenLocation(), "expected " + XContentParser.Token.START_ARRAY); + } + while (true) { + switch (p.nextToken()) { + case END_ARRAY -> { + return new Column(DataType.DOUBLE, builder.build()); + } + case START_ARRAY -> parseDoubleArray(builder); + case VALUE_NULL -> builder.appendNull(); + case VALUE_NUMBER, VALUE_STRING -> appendDouble(builder); + default -> throw new XContentParseException(p.getTokenLocation(), "expected number, array of numbers, or null"); + } + } + } + } + + private void parseDoubleArray(DoubleBlock.Builder builder) throws IOException { + builder.beginPositionEntry(); + while (true) { + switch (p.nextToken()) { + case END_ARRAY -> { + builder.endPositionEntry(); + return; + } + case VALUE_NUMBER, VALUE_STRING -> appendDouble(builder); + default -> throw new XContentParseException(p.getTokenLocation(), "expected number"); + } + } + } + + private void appendDouble(DoubleBlock.Builder builder) throws IOException { + length += Double.BYTES; + if (length > MAX_LENGTH) { + throw new XContentParseException(p.getTokenLocation(), "tables too big"); + } + builder.appendDouble(p.doubleValue()); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java index 9ffd48d9d0c3b..793f453d5ebf5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java @@ -46,7 +46,6 @@ final class RequestXContent { PARAM_PARSER.declareString(constructorArg(), TYPE); } - static final ParseField ESQL_VERSION_FIELD = new ParseField("version"); static final ParseField QUERY_FIELD = new ParseField("query"); private static final ParseField COLUMNAR_FIELD = new ParseField("columnar"); private static final ParseField FILTER_FIELD = new ParseField("filter"); @@ -74,7 +73,6 @@ static EsqlQueryRequest parseAsync(XContentParser parser) { } private static void objectParserCommon(ObjectParser parser) { - parser.declareString((str, consumer) -> {}, ESQL_VERSION_FIELD); parser.declareString(EsqlQueryRequest::query, QUERY_FIELD); parser.declareBoolean(EsqlQueryRequest::columnar, COLUMNAR_FIELD); parser.declareObject(EsqlQueryRequest::filter, (p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p), FILTER_FIELD); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index b95afe2fadeea..70fbe17a7d470 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -9,7 +9,9 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.logging.LoggerMessageFormat; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.core.analyzer.AnalyzerRules; @@ -46,7 +48,6 @@ import org.elasticsearch.xpack.esql.core.session.Configuration; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; import org.elasticsearch.xpack.esql.core.type.UnsupportedEsField; @@ -58,6 +59,7 @@ import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.DateTimeArithmeticOperation; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -68,10 +70,13 @@ import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Keep; +import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.stats.FeatureMetric; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -94,31 +99,39 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.GEO_MATCH_TYPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.FLOAT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NESTED; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.NESTED; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isTemporalAmount; public class Analyzer extends ParameterizedRuleExecutor { // marker list of attributes for plans that do not have any concrete fields to return, but have other computed columns to return // ie from test | stats c = count(*) public static final List NO_FIELDS = List.of( - new ReferenceAttribute(Source.EMPTY, "", DataTypes.NULL, null, Nullability.TRUE, null, true) + new ReferenceAttribute(Source.EMPTY, "", DataType.NULL, null, Nullability.TRUE, null, true) ); private static final Iterable> rules; static { - var init = new Batch<>("Initialize", Limiter.ONCE, new ResolveTable(), new ResolveEnrich(), new ResolveFunctions()); + var init = new Batch<>( + "Initialize", + Limiter.ONCE, + new ResolveTable(), + new ResolveEnrich(), + new ResolveLookupTables(), + new ResolveFunctions() + ); var resolution = new Batch<>("Resolution", new ResolveRefs(), new ImplicitCasting()); var finish = new Batch<>("Finish Analysis", Limiter.ONCE, new AddImplicitLimit()); rules = List.of(init, resolution, finish); @@ -316,10 +329,60 @@ private static NamedExpression createEnrichFieldExpression( } } - private static class ResolveRefs extends BaseAnalyzerRule { + private static class ResolveLookupTables extends ParameterizedAnalyzerRule { + @Override + protected LogicalPlan rule(Lookup lookup, AnalyzerContext context) { + // the parser passes the string wrapped in a literal + Source source = lookup.source(); + Expression tableNameExpression = lookup.tableName(); + String tableName = lookup.tableName().toString(); + Map> tables = context.configuration().tables(); + LocalRelation localRelation = null; + + if (tables.containsKey(tableName) == false) { + String message = "Unknown table [" + tableName + "]"; + // typos check + List potentialMatches = StringUtils.findSimilar(tableName, tables.keySet()); + if (CollectionUtils.isEmpty(potentialMatches) == false) { + message = UnresolvedAttribute.errorMessage(tableName, potentialMatches).replace("column", "table"); + } + tableNameExpression = new UnresolvedAttribute(tableNameExpression.source(), tableName, null, message); + } + // wrap the table in a local relationship for idiomatic field resolution + else { + localRelation = tableMapAsRelation(source, tables.get(tableName)); + // postpone the resolution for ResolveRefs + } + + return new Lookup(source, lookup.child(), tableNameExpression, lookup.matchFields(), localRelation); + } + + private LocalRelation tableMapAsRelation(Source source, Map mapTable) { + Block[] blocks = new Block[mapTable.size()]; + + List attributes = new ArrayList<>(blocks.length); + int i = 0; + for (Map.Entry entry : mapTable.entrySet()) { + String name = entry.getKey(); + Column column = entry.getValue(); + // create a fake ES field - alternative is to use a ReferenceAttribute + EsField field = new EsField(name, column.type(), Map.of(), false, false); + attributes.add(new FieldAttribute(source, null, name, field)); + // prepare the block for the supplier + blocks[i++] = column.values(); + } + LocalSupplier supplier = LocalSupplier.of(blocks); + return new LocalRelation(source, attributes, supplier); + } + } + + private static class ResolveRefs extends BaseAnalyzerRule { @Override protected LogicalPlan doRule(LogicalPlan plan) { + if (plan.childrenResolved() == false) { + return plan; + } final List childrenOutput = new ArrayList<>(); for (LogicalPlan child : plan.children()) { @@ -355,6 +418,10 @@ protected LogicalPlan doRule(LogicalPlan plan) { return resolveMvExpand(p, childrenOutput); } + if (plan instanceof Lookup l) { + return resolveLookup(l, childrenOutput); + } + return plan.transformExpressionsOnly(UnresolvedAttribute.class, ua -> maybeResolveAttribute(ua, childrenOutput)); } @@ -437,6 +504,70 @@ private LogicalPlan resolveMvExpand(MvExpand p, List childrenOutput) return p; } + private LogicalPlan resolveLookup(Lookup l, List childrenOutput) { + // check if the table exists before performing any resolution + if (l.localRelation() == null) { + return l; + } + + // check the on field against both the child output and the inner relation + List matchFields = new ArrayList<>(l.matchFields().size()); + List localOutput = l.localRelation().output(); + boolean modified = false; + + for (NamedExpression ne : l.matchFields()) { + NamedExpression matchFieldChildReference = ne; + if (ne instanceof UnresolvedAttribute ua && ua.customMessage() == false) { + modified = true; + Attribute joinedAttribute = maybeResolveAttribute(ua, localOutput); + // can't find the field inside the local relation + if (joinedAttribute instanceof UnresolvedAttribute lua) { + // adjust message + matchFieldChildReference = lua.withUnresolvedMessage( + lua.unresolvedMessage().replace("Unknown column", "Unknown column in lookup target") + ); + } else { + // check also the child output by resolving to it + Attribute attr = maybeResolveAttribute(ua, childrenOutput); + matchFieldChildReference = attr; + if (attr instanceof UnresolvedAttribute == false) { + /* + * If they do, make sure the data types line up. If either is + * null it's fine to match it against anything. + */ + boolean dataTypesOk = joinedAttribute.dataType().equals(attr.dataType()); + if (false == dataTypesOk) { + dataTypesOk = joinedAttribute.dataType() == DataType.NULL || attr.dataType() == DataType.NULL; + } + if (false == dataTypesOk) { + dataTypesOk = joinedAttribute.dataType().equals(KEYWORD) && attr.dataType().equals(TEXT); + } + if (false == dataTypesOk) { + matchFieldChildReference = new UnresolvedAttribute( + attr.source(), + attr.name(), + attr.qualifier(), + attr.id(), + "column type mismatch, table column was [" + + joinedAttribute.dataType().typeName() + + "] and original column was [" + + attr.dataType().typeName() + + "]", + null + ); + } + } + } + } + + matchFields.add(matchFieldChildReference); + } + if (modified) { + return new Lookup(l.source(), l.child(), l.tableName(), matchFields, l.localRelation()); + } + return l; + } + private Attribute maybeResolveAttribute(UnresolvedAttribute ua, List childrenOutput) { if (ua.customMessage()) { return ua; @@ -777,7 +908,7 @@ public LogicalPlan apply(LogicalPlan logicalPlan, AnalyzerContext context) { limit = context.configuration().resultTruncationMaxSize(); // user provided a limit: cap result entries to the max } var source = logicalPlan.source(); - return new Limit(source, new Literal(source, limit, DataTypes.INTEGER), logicalPlan); + return new Limit(source, new Literal(source, limit, DataType.INTEGER), logicalPlan); } } @@ -820,7 +951,7 @@ private static Expression processScalarFunction(EsqlScalarFunction f, EsqlFuncti } List newChildren = new ArrayList<>(args.size()); boolean childrenChanged = false; - DataType targetDataType = DataTypes.NULL; + DataType targetDataType = DataType.NULL; Expression arg; for (int i = 0; i < args.size(); i++) { arg = args.get(i); @@ -828,7 +959,7 @@ private static Expression processScalarFunction(EsqlScalarFunction f, EsqlFuncti if (i < targetDataTypes.size()) { targetDataType = targetDataTypes.get(i); } - if (targetDataType != DataTypes.NULL && targetDataType != DataTypes.UNSUPPORTED) { + if (targetDataType != DataType.NULL && targetDataType != DataType.UNSUPPORTED) { Expression e = castStringLiteral(arg, targetDataType); childrenChanged = true; newChildren.add(e); @@ -848,22 +979,26 @@ private static Expression processBinaryOperator(BinaryOperator o) { } List newChildren = new ArrayList<>(2); boolean childrenChanged = false; - DataType targetDataType = DataTypes.NULL; + DataType targetDataType = DataType.NULL; Expression from = Literal.NULL; - if (left.dataType() == KEYWORD - && left.foldable() - && (supportsImplicitCasting(right.dataType())) - && ((left instanceof EsqlScalarFunction) == false)) { - targetDataType = right.dataType(); - from = left; + if (left.dataType() == KEYWORD && left.foldable() && (left instanceof EsqlScalarFunction == false)) { + if (supportsImplicitCasting(right.dataType())) { + targetDataType = right.dataType(); + from = left; + } else if (supportsImplicitTemporalCasting(right, o)) { + targetDataType = DATETIME; + from = left; + } } - if (right.dataType() == KEYWORD - && right.foldable() - && (supportsImplicitCasting(left.dataType())) - && ((right instanceof EsqlScalarFunction) == false)) { - targetDataType = left.dataType(); - from = right; + if (right.dataType() == KEYWORD && right.foldable() && (right instanceof EsqlScalarFunction == false)) { + if (supportsImplicitCasting(left.dataType())) { + targetDataType = left.dataType(); + from = right; + } else if (supportsImplicitTemporalCasting(left, o)) { + targetDataType = DATETIME; + from = right; + } } if (from != Literal.NULL) { Expression e = castStringLiteral(from, targetDataType); @@ -897,6 +1032,10 @@ private static Expression processIn(In in) { return childrenChanged ? in.replaceChildren(newChildren) : in; } + private static boolean supportsImplicitTemporalCasting(Expression e, BinaryOperator o) { + return isTemporalAmount(e.dataType()) && (o instanceof DateTimeArithmeticOperation); + } + private static boolean supportsImplicitCasting(DataType type) { return type == DATETIME || type == IP || type == VERSION || type == BOOLEAN; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 3f6e387e661f1..367ba682274c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingFunction; @@ -35,6 +34,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.Row; @@ -125,7 +125,21 @@ else if (p.resolved()) { var aggs = agg.aggregates(); int size = aggs.size() - groupings.size(); aggs.subList(0, size).forEach(unresolvedExpressions); - } else { + } + // similar approach for Lookup + else if (p instanceof Lookup lookup) { + // first check the table + var tableName = lookup.tableName(); + if (tableName instanceof Unresolvable u) { + failures.add(fail(tableName, u.unresolvedMessage())); + } + // only after that check the match fields + else { + lookup.matchFields().forEach(unresolvedExpressions); + } + } + + else { p.forEachExpression(unresolvedExpressions); } }); @@ -378,17 +392,17 @@ public static Failure validateBinaryComparison(BinaryComparison bc) { } List allowed = new ArrayList<>(); - allowed.add(DataTypes.KEYWORD); - allowed.add(DataTypes.TEXT); - allowed.add(DataTypes.IP); - allowed.add(DataTypes.DATETIME); - allowed.add(DataTypes.VERSION); - allowed.add(DataTypes.GEO_POINT); - allowed.add(DataTypes.GEO_SHAPE); - allowed.add(DataTypes.CARTESIAN_POINT); - allowed.add(DataTypes.CARTESIAN_SHAPE); + allowed.add(DataType.KEYWORD); + allowed.add(DataType.TEXT); + allowed.add(DataType.IP); + allowed.add(DataType.DATETIME); + allowed.add(DataType.VERSION); + allowed.add(DataType.GEO_POINT); + allowed.add(DataType.GEO_SHAPE); + allowed.add(DataType.CARTESIAN_POINT); + allowed.add(DataType.CARTESIAN_SHAPE); if (bc instanceof Equals || bc instanceof NotEquals) { - allowed.add(DataTypes.BOOLEAN); + allowed.add(DataType.BOOLEAN); } Expression.TypeResolution r = TypeResolutions.isType( bc.left(), @@ -400,7 +414,7 @@ public static Failure validateBinaryComparison(BinaryComparison bc) { if (false == r.resolved()) { return fail(bc, r.message()); } - if (DataTypes.isString(bc.left().dataType()) && DataTypes.isString(bc.right().dataType())) { + if (DataType.isString(bc.left().dataType()) && DataType.isString(bc.right().dataType())) { return null; } if (bc.left().dataType() != bc.right().dataType()) { @@ -427,15 +441,15 @@ public static Failure validateBinaryComparison(BinaryComparison bc) { public static Failure validateUnsignedLongOperator(BinaryOperator bo) { DataType leftType = bo.left().dataType(); DataType rightType = bo.right().dataType(); - if ((leftType == DataTypes.UNSIGNED_LONG || rightType == DataTypes.UNSIGNED_LONG) && leftType != rightType) { + if ((leftType == DataType.UNSIGNED_LONG || rightType == DataType.UNSIGNED_LONG) && leftType != rightType) { return fail( bo, "first argument of [{}] is [{}] and second is [{}]. [{}] can only be operated on together with another [{}]", bo.sourceText(), leftType.typeName(), rightType.typeName(), - DataTypes.UNSIGNED_LONG.typeName(), - DataTypes.UNSIGNED_LONG.typeName() + DataType.UNSIGNED_LONG.typeName(), + DataType.UNSIGNED_LONG.typeName() ); } return null; @@ -446,7 +460,7 @@ public static Failure validateUnsignedLongOperator(BinaryOperator bo */ private static Failure validateUnsignedLongNegation(Neg neg) { DataType childExpressionType = neg.field().dataType(); - if (childExpressionType.equals(DataTypes.UNSIGNED_LONG)) { + if (childExpressionType.equals(DataType.UNSIGNED_LONG)) { return fail( neg, "negation unsupported for arguments of type [{}] in expression [{}]", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 69d988c958169..05b78c8b5f309 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -96,9 +96,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; - /** * {@link EnrichLookupService} performs enrich lookup for a given input page. The lookup process consists of three stages: * - Stage 1: Finding matching document IDs for the input page. This stage is done by the {@link EnrichQuerySourceOperator} or its variants. @@ -460,7 +457,7 @@ private static class LookupRequest extends TransportRequest implements IndicesRe } this.toRelease = inputPage; PlanStreamInput planIn = new PlanStreamInput(in, PlanNameRegistry.INSTANCE, in.namedWriteableRegistry(), null); - this.extractFields = planIn.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readNamedExpression)); + this.extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); } @Override @@ -475,7 +472,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(matchField); out.writeWriteable(inputPage); PlanStreamOutput planOut = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, null); - planOut.writeCollection(extractFields, writerFromPlanWriter(PlanStreamOutput::writeNamedExpression)); + planOut.writeNamedWriteableCollection(extractFields); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java index 929206ed58897..923b75055ca9d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java @@ -33,8 +33,8 @@ import java.util.List; import java.util.function.IntFunction; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; /** * Generates a list of Lucene queries based on the input block. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index a788a7abcc8cd..c8074d29e0576 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -127,7 +127,7 @@ private Block eval(BooleanVector lhs, BooleanVector rhs) { int positionCount = lhs.getPositionCount(); try (var result = lhs.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { - result.appendBoolean(bl.function().apply(lhs.getBoolean(p), rhs.getBoolean(p))); + result.appendBoolean(p, bl.function().apply(lhs.getBoolean(p), rhs.getBoolean(p))); } return result.build().asBlock(); } @@ -264,7 +264,7 @@ public Block eval(Page page) { } try (var builder = driverContext.blockFactory().newBooleanVectorFixedBuilder(page.getPositionCount())) { for (int p = 0; p < page.getPositionCount(); p++) { - builder.appendBoolean(fieldBlock.isNull(p)); + builder.appendBoolean(p, fieldBlock.isNull(p)); } return builder.build().asBlock(); } @@ -313,7 +313,7 @@ public Block eval(Page page) { } try (var builder = driverContext.blockFactory().newBooleanVectorFixedBuilder(page.getPositionCount())) { for (int p = 0; p < page.getPositionCount(); p++) { - builder.appendBoolean(fieldBlock.isNull(p) == false); + builder.appendBoolean(p, fieldBlock.isNull(p) == false); } return builder.build().asBlock(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java index 095d2ebe2a4bd..5888e30747557 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java @@ -7,11 +7,7 @@ package org.elasticsearch.xpack.esql.evaluator.mapper; -import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.breaker.NoopCircuitBreaker; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; @@ -71,12 +67,6 @@ public Block eval(Page page) { @Override public void close() {} - }).get( - new DriverContext( - BigArrays.NON_RECYCLING_INSTANCE, - // TODO maybe this should have a small fixed limit? - new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE) - ) - ).eval(new Page(1)), 0); + }).get(DriverContext.getLocalDriver()).eval(new Page(1)), 0); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java index 1e2e58886792a..8f7fcef0ff07e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -20,10 +19,10 @@ import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; public class EsqlTypeResolutions { @@ -38,7 +37,7 @@ public static Expression.TypeResolution isStringAndExact(Expression e, String op public static Expression.TypeResolution isExact(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { if (e instanceof FieldAttribute fa) { - if (DataTypes.isString(fa.dataType())) { + if (DataType.isString(fa.dataType())) { // ESQL can extract exact values for TEXT fields return Expression.TypeResolution.TYPE_RESOLVED; } @@ -65,7 +64,7 @@ public static Expression.TypeResolution isExact(Expression e, String operationNa GEO_SHAPE.typeName(), CARTESIAN_SHAPE.typeName() }; private static final String[] POINT_TYPE_NAMES = new String[] { GEO_POINT.typeName(), CARTESIAN_POINT.typeName() }; - private static final String[] NON_SPATIAL_TYPE_NAMES = DataTypes.types() + private static final String[] NON_SPATIAL_TYPE_NAMES = DataType.types() .stream() .filter(EsqlDataTypes::isRepresentable) .filter(t -> EsqlDataTypes.isSpatial(t) == false) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/NamedExpressions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/NamedExpressions.java index cb6aaf879f3cb..d0c8adfd3c858 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/NamedExpressions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/NamedExpressions.java @@ -17,7 +17,6 @@ import java.util.Map; public class NamedExpressions { - /** * Calculates the actual output of a command given the new attributes plus the existing inputs that are emitted as outputs * @param fields the fields added by the command diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/Order.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/Order.java index fa66f299b2272..10800a2394e8f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/Order.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/Order.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import java.util.List; @@ -21,7 +21,7 @@ public Order(Source source, Expression child, OrderDirection direction, NullsPos @Override protected TypeResolution resolveType() { - if (DataTypes.isString(child().dataType())) { + if (DataType.isString(child().dataType())) { return TypeResolution.TYPE_RESOLVED; } return super.resolveType(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/UnresolvedNamePattern.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/UnresolvedNamePattern.java index 7df28f0648318..98282b5dec0eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/UnresolvedNamePattern.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/UnresolvedNamePattern.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.expression; import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -16,6 +17,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; +import java.io.IOException; import java.util.List; import java.util.Objects; @@ -42,6 +44,16 @@ public UnresolvedNamePattern(Source source, CharacterRunAutomaton automaton, Str this.name = name; } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException("doesn't escape the node"); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException("doesn't escape the node"); + } + public boolean match(String string) { return automaton.run(string); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index a8ab961fc201a..8fd6ebe8d7d69 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.session.Configuration; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.aggregate.Avg; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.CountDistinct; @@ -76,6 +75,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAppend; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvConcat; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; @@ -103,6 +103,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; @@ -123,21 +124,21 @@ import java.util.Map; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSUPPORTED; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; public final class EsqlFunctionRegistry extends FunctionRegistry { @@ -234,7 +235,8 @@ private FunctionDefinition[][] functions() { def(EndsWith.class, EndsWith::new, "ends_with"), def(ToLower.class, ToLower::new, "to_lower"), def(ToUpper.class, ToUpper::new, "to_upper"), - def(Locate.class, Locate::new, "locate") }, + def(Locate.class, Locate::new, "locate"), + def(Repeat.class, Repeat::new, "repeat") }, // date new FunctionDefinition[] { def(DateDiff.class, DateDiff::new, "date_diff"), @@ -280,6 +282,7 @@ private FunctionDefinition[][] functions() { def(ToVersion.class, ToVersion::new, "to_version", "to_ver"), }, // multivalue functions new FunctionDefinition[] { + def(MvAppend.class, MvAppend::new, "mv_append"), def(MvAvg.class, MvAvg::new, "mv_avg"), def(MvConcat.class, MvConcat::new, "mv_concat"), def(MvCount.class, MvCount::new, "mv_count"), @@ -374,7 +377,7 @@ public List argDescriptions() { public static DataType getTargetType(String[] names) { List types = new ArrayList<>(); for (String name : names) { - types.add(DataTypes.fromEs(name)); + types.add(DataType.fromEs(name)); } if (types.contains(KEYWORD) || types.contains(TEXT)) { return UNSUPPORTED; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java index fe6db916f7a0d..79dcc6a3d3920 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.NameId; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -36,6 +37,11 @@ public final class UnsupportedAttribute extends FieldAttribute implements Unreso "UnsupportedAttribute", UnsupportedAttribute::new ); + public static final NamedWriteableRegistry.Entry NAMED_EXPRESSION_ENTRY = new NamedWriteableRegistry.Entry( + NamedExpression.class, + ENTRY.name, + UnsupportedAttribute::new + ); private final String message; private final boolean hasCustomMessage; // TODO remove me and just use message != null? diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Warnings.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Warnings.java index ae1072a0a5bf8..630cf62d0030a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Warnings.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Warnings.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.core.tree.Source; import static org.elasticsearch.common.logging.HeaderWarning.addWarning; @@ -23,6 +24,31 @@ public class Warnings { private int addedWarnings; + public static final Warnings NOOP_WARNINGS = new Warnings(Source.EMPTY) { + @Override + public void registerException(Exception exception) { + // this space intentionally left blank + } + }; + + /** + * Create a new warnings object based on the given mode + * @param warningsMode The warnings collection strategy to use + * @param source used to indicate where in the query the warning occured + * @return A warnings collector object + */ + public static Warnings createWarnings(DriverContext.WarningsMode warningsMode, Source source) { + switch (warningsMode) { + case COLLECT -> { + return new Warnings(source); + } + case IGNORE -> { + return NOOP_WARNINGS; + } + } + throw new IllegalStateException("Unreachable"); + } + public Warnings(Source source) { location = format("Line {}:{}: ", source.source().getLineNumber(), source.source().getColumnNumber()); first = format( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index 39d73dd82e39c..ee75980e10264 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -34,7 +33,7 @@ public Avg(Source source, @Param(name = "number", type = { "double", "integer", protected Expression.TypeResolution resolveType() { return isType( field(), - dt -> dt.isNumeric() && dt != DataTypes.UNSIGNED_LONG, + dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, sourceText(), DEFAULT, "numeric except unsigned_long or counter types" @@ -43,7 +42,7 @@ protected Expression.TypeResolution resolveType() { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index 73bbfba544bd9..d55bc9d618c39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -75,7 +74,7 @@ public String innerName() { @Override public DataType dataType() { - return DataTypes.LONG; + return DataType.LONG; } @Override @@ -103,7 +102,7 @@ public Expression surrogate() { if (l.value() != null && (l.value() instanceof List) == false) { // TODO: Normalize COUNT(*), COUNT(), COUNT("foobar"), COUNT(1) as COUNT(*). // Does not apply to COUNT([1,2,3]) - // return new Count(s, new Literal(s, StringUtils.WILDCARD, DataTypes.KEYWORD)); + // return new Count(s, new Literal(s, StringUtils.WILDCARD, DataType.KEYWORD)); return null; } } @@ -111,8 +110,8 @@ public Expression surrogate() { // COUNT(const) is equivalent to MV_COUNT(const)*COUNT(*) if const is not null; otherwise COUNT(const) == 0. return new Mul( s, - new Coalesce(s, new MvCount(s, field), List.of(new Literal(s, 0, DataTypes.INTEGER))), - new Count(s, new Literal(s, StringUtils.WILDCARD, DataTypes.KEYWORD)) + new Coalesce(s, new MvCount(s, field), List.of(new Literal(s, 0, DataType.INTEGER))), + new Count(s, new Literal(s, StringUtils.WILDCARD, DataType.KEYWORD)) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 481f025399842..c91b9c37ae0a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -69,7 +68,7 @@ public CountDistinct replaceChildren(List newChildren) { @Override public DataType dataType() { - return DataTypes.LONG; + return DataType.LONG; } @Override @@ -86,7 +85,7 @@ protected TypeResolution resolveType() { boolean resolved = resolution.resolved(); resolution = isType( field(), - dt -> resolved && dt != DataTypes.UNSIGNED_LONG, + dt -> resolved && dt != DataType.UNSIGNED_LONG, sourceText(), DEFAULT, "any exact type except unsigned_long or counter types" @@ -101,20 +100,20 @@ protected TypeResolution resolveType() { public AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); int precision = this.precision == null ? DEFAULT_PRECISION : ((Number) this.precision.fold()).intValue(); - if (type == DataTypes.BOOLEAN) { + if (type == DataType.BOOLEAN) { // Booleans ignore the precision because there are only two possible values anyway return new CountDistinctBooleanAggregatorFunctionSupplier(inputChannels); } - if (type == DataTypes.DATETIME || type == DataTypes.LONG) { + if (type == DataType.DATETIME || type == DataType.LONG) { return new CountDistinctLongAggregatorFunctionSupplier(inputChannels, precision); } - if (type == DataTypes.INTEGER) { + if (type == DataType.INTEGER) { return new CountDistinctIntAggregatorFunctionSupplier(inputChannels, precision); } - if (type == DataTypes.DOUBLE) { + if (type == DataType.DOUBLE) { return new CountDistinctDoubleAggregatorFunctionSupplier(inputChannels, precision); } - if (type == DataTypes.KEYWORD || type == DataTypes.IP || type == DataTypes.VERSION || type == DataTypes.TEXT) { + if (type == DataType.KEYWORD || type == DataType.IP || type == DataType.VERSION || type == DataType.TEXT) { return new CountDistinctBytesRefAggregatorFunctionSupplier(inputChannels, precision); } throw EsqlIllegalArgumentException.illegalDataType(type); @@ -126,7 +125,7 @@ public Expression surrogate() { var field = field(); return field.foldable() - ? new ToLong(s, new Coalesce(s, new MvCount(s, new MvDedupe(s, field)), List.of(new Literal(s, 0, DataTypes.INTEGER)))) + ? new ToLong(s, new Coalesce(s, new MvCount(s, new MvDedupe(s, field)), List.of(new Literal(s, 0, DataType.INTEGER)))) : null; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java index 7c48a58ddb649..c381693dbe2ce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -40,7 +39,7 @@ public Median(Source source, @Param(name = "number", type = { "double", "integer protected Expression.TypeResolution resolveType() { return isType( field(), - dt -> dt.isNumeric() && dt != DataTypes.UNSIGNED_LONG, + dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, sourceText(), DEFAULT, "numeric except unsigned_long or counter types" @@ -49,7 +48,7 @@ protected Expression.TypeResolution resolveType() { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override @@ -69,6 +68,6 @@ public Expression surrogate() { return field.foldable() ? new MvMedian(s, new ToDouble(s, field)) - : new Percentile(source(), field(), new Literal(source(), (int) QuantileStates.MEDIAN, DataTypes.INTEGER)); + : new Percentile(source(), field(), new Literal(source(), (int) QuantileStates.MEDIAN, DataType.INTEGER)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java index 7f77f9cbb8ccb..b003b981c0709 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/NumericAggregate.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.util.List; @@ -35,7 +34,7 @@ protected TypeResolution resolveType() { if (supportsDates()) { return TypeResolutions.isType( this, - e -> e == DataTypes.DATETIME || e.isNumeric() && e != DataTypes.UNSIGNED_LONG, + e -> e == DataType.DATETIME || e.isNumeric() && e != DataType.UNSIGNED_LONG, sourceText(), DEFAULT, "datetime", @@ -44,7 +43,7 @@ protected TypeResolution resolveType() { } return isType( field(), - dt -> dt.isNumeric() && dt != DataTypes.UNSIGNED_LONG, + dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, sourceText(), DEFAULT, "numeric except unsigned_long or counter types" @@ -57,22 +56,22 @@ protected boolean supportsDates() { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override public final AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); - if (supportsDates() && type == DataTypes.DATETIME) { + if (supportsDates() && type == DataType.DATETIME) { return longSupplier(inputChannels); } - if (type == DataTypes.LONG) { + if (type == DataType.LONG) { return longSupplier(inputChannels); } - if (type == DataTypes.INTEGER) { + if (type == DataType.INTEGER) { return intSupplier(inputChannels); } - if (type == DataTypes.DOUBLE) { + if (type == DataType.DOUBLE) { return doubleSupplier(inputChannels); } throw EsqlIllegalArgumentException.illegalDataType(type); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index b06053199c5ab..d21247a77d9cf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -65,7 +65,7 @@ protected TypeResolution resolveType() { TypeResolution resolution = isType( field(), - dt -> dt.isNumeric() && dt != DataTypes.UNSIGNED_LONG, + dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, sourceText(), FIRST, "numeric except unsigned_long" diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java index 0c5db63209b33..418f92284cca0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.ToAggregator; @@ -72,18 +71,18 @@ public AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); if (useDocValues) { // When the points are read as doc-values (eg. from the index), feed them into the doc-values aggregator - if (type == DataTypes.GEO_POINT) { + if (type == DataType.GEO_POINT) { return new SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); } - if (type == DataTypes.CARTESIAN_POINT) { + if (type == DataType.CARTESIAN_POINT) { return new SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); } } else { // When the points are read as WKB from source or as point literals, feed them into the source-values aggregator - if (type == DataTypes.GEO_POINT) { + if (type == DataType.GEO_POINT) { return new SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); } - if (type == DataTypes.CARTESIAN_POINT) { + if (type == DataType.CARTESIAN_POINT) { return new SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index 78ad04414cd57..be9ae295f6fbc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -25,9 +24,9 @@ import java.util.List; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; /** * Sum all values of a field in matching documents. @@ -77,7 +76,7 @@ public Expression surrogate() { // SUM(const) is equivalent to MV_SUM(const)*COUNT(*). return field.foldable() - ? new Mul(s, new MvSum(s, field), new Count(s, new Literal(s, StringUtils.WILDCARD, DataTypes.KEYWORD))) + ? new Mul(s, new MvSum(s, field), new Count(s, new Literal(s, StringUtils.WILDCARD, DataType.KEYWORD))) : null; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index 1723027a8e8e0..c76f60fe0f555 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -64,19 +63,19 @@ protected TypeResolution resolveType() { @Override public AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); - if (type == DataTypes.INTEGER) { + if (type == DataType.INTEGER) { return new ValuesIntAggregatorFunctionSupplier(inputChannels); } - if (type == DataTypes.LONG || type == DataTypes.DATETIME) { + if (type == DataType.LONG || type == DataType.DATETIME) { return new ValuesLongAggregatorFunctionSupplier(inputChannels); } - if (type == DataTypes.DOUBLE) { + if (type == DataType.DOUBLE) { return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); } - if (DataTypes.isString(type) || type == DataTypes.IP || type == DataTypes.VERSION) { + if (DataType.isString(type) || type == DataType.IP || type == DataType.VERSION) { return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels); } - if (type == DataTypes.BOOLEAN) { + if (type == DataType.BOOLEAN) { return new ValuesBooleanAggregatorFunctionSupplier(inputChannels); } // TODO cartesian_point, geo_point diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index 6e8341614a973..431494534f4ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -201,7 +200,7 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - if (field.dataType() == DataTypes.DATETIME) { + if (field.dataType() == DataType.DATETIME) { Rounding.Prepared preparedRounding; if (buckets.dataType().isInteger()) { int b = ((Number) buckets.fold()).intValue(); @@ -225,7 +224,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function dt.isInteger() || EsqlDataTypes.isTemporalAmount(dt), @@ -340,7 +339,7 @@ private TypeResolution checkArgsCount(int expectedCount) { private static TypeResolution isStringOrDate(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { return TypeResolutions.isType( e, - exp -> DataTypes.isString(exp) || DataTypes.isDateTime(exp), + exp -> DataType.isString(exp) || DataType.isDateTime(exp), operationName, paramOrd, "datetime", @@ -359,13 +358,13 @@ public void validate(Failures failures) { private long foldToLong(Expression e) { Object value = Foldables.valueOf(e); - return DataTypes.isDateTime(e.dataType()) ? ((Number) value).longValue() : dateTimeToLong(((BytesRef) value).utf8ToString()); + return DataType.isDateTime(e.dataType()) ? ((Number) value).longValue() : dateTimeToLong(((BytesRef) value).utf8ToString()); } @Override public DataType dataType() { if (field.dataType().isNumeric()) { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } return field.dataType(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java index 797a4c31f0f6c..4f991af54ecff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java @@ -14,6 +14,16 @@ import java.util.List; +/** + * A {@code ScalarFunction} is a {@code Function} that takes values from some + * operation and converts each to another value. An example would be + * {@code ABS()}, which takes one value at a time, applies a function to the + * value (abs) and returns a new value. + *

+ * We have a guide for writing these in the javadoc for + * {@link org.elasticsearch.xpack.esql.expression.function.scalar}. + *

+ */ public abstract class EsqlScalarFunction extends ScalarFunction implements EvaluatorMapper { protected EsqlScalarFunction(Source source) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index f67940e95ec09..f98f5c45acd16 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -36,7 +36,7 @@ import java.util.stream.Stream; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; public final class Case extends EsqlScalarFunction { record Condition(Expression condition, Expression value) {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 3491cd00f9ae0..8062019b4c51c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -29,7 +28,7 @@ import java.util.function.Function; import java.util.stream.Stream; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; /** * Returns the maximum value of multiple columns. @@ -118,23 +117,23 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator.apply(new MvMax(e.source(), e))) .toArray(ExpressionEvaluator.Factory[]::new); - if (dataType == DataTypes.BOOLEAN) { + if (dataType == DataType.BOOLEAN) { return new GreatestBooleanEvaluator.Factory(source(), factories); } - if (dataType == DataTypes.DOUBLE) { + if (dataType == DataType.DOUBLE) { return new GreatestDoubleEvaluator.Factory(source(), factories); } - if (dataType == DataTypes.INTEGER) { + if (dataType == DataType.INTEGER) { return new GreatestIntEvaluator.Factory(source(), factories); } - if (dataType == DataTypes.LONG) { + if (dataType == DataType.LONG) { return new GreatestLongEvaluator.Factory(source(), factories); } - if (dataType == DataTypes.KEYWORD - || dataType == DataTypes.TEXT - || dataType == DataTypes.IP - || dataType == DataTypes.VERSION - || dataType == DataTypes.UNSUPPORTED) { + if (dataType == DataType.KEYWORD + || dataType == DataType.TEXT + || dataType == DataType.IP + || dataType == DataType.VERSION + || dataType == DataType.UNSUPPORTED) { return new GreatestBytesRefEvaluator.Factory(source(), factories); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 1238418264fbb..f983e0125a4db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -29,7 +28,7 @@ import java.util.function.Function; import java.util.stream.Stream; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; /** * Returns the minimum value of multiple columns. @@ -117,23 +116,23 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator.apply(new MvMin(e.source(), e))) .toArray(ExpressionEvaluator.Factory[]::new); - if (dataType == DataTypes.BOOLEAN) { + if (dataType == DataType.BOOLEAN) { return new LeastBooleanEvaluator.Factory(source(), factories); } - if (dataType == DataTypes.DOUBLE) { + if (dataType == DataType.DOUBLE) { return new LeastDoubleEvaluator.Factory(source(), factories); } - if (dataType == DataTypes.INTEGER) { + if (dataType == DataType.INTEGER) { return new LeastIntEvaluator.Factory(source(), factories); } - if (dataType == DataTypes.LONG) { + if (dataType == DataType.LONG) { return new LeastLongEvaluator.Factory(source(), factories); } - if (dataType == DataTypes.KEYWORD - || dataType == DataTypes.TEXT - || dataType == DataTypes.IP - || dataType == DataTypes.VERSION - || dataType == DataTypes.UNSUPPORTED) { + if (dataType == DataType.KEYWORD + || dataType == DataType.TEXT + || dataType == DataType.IP + || dataType == DataType.VERSION + || dataType == DataType.UNSUPPORTED) { return new LeastBytesRefEvaluator.Factory(source(), factories); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java index 54d9fcb59f476..f1d0256a1f1c7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -39,17 +38,16 @@ /** * Base class for functions that converts a field into a function-specific type. + *

+ * We have a guide for writing these in the javadoc for + * {@link org.elasticsearch.xpack.esql.expression.function.scalar}. + *

*/ public abstract class AbstractConvertFunction extends UnaryScalarFunction { // the numeric types convert functions need to handle; the other numeric types are converted upstream to one of these - private static final List NUMERIC_TYPES = List.of( - DataTypes.INTEGER, - DataTypes.LONG, - DataTypes.UNSIGNED_LONG, - DataTypes.DOUBLE - ); - public static final List STRING_TYPES = DataTypes.types().stream().filter(EsqlDataTypes::isString).toList(); + private static final List NUMERIC_TYPES = List.of(DataType.INTEGER, DataType.LONG, DataType.UNSIGNED_LONG, DataType.DOUBLE); + public static final List STRING_TYPES = DataType.types().stream().filter(EsqlDataTypes::isString).toList(); protected AbstractConvertFunction(Source source, Expression field) { super(source, field); @@ -88,7 +86,7 @@ public static String supportedTypesNames(Set types) { STRING_TYPES.forEach(supportTypes::remove); } - supportTypes.forEach(t -> supportedTypesNames.add(t.name().toLowerCase(Locale.ROOT))); + supportTypes.forEach(t -> supportedTypesNames.add(t.nameUpper().toLowerCase(Locale.ROOT))); supportedTypesNames.sort(String::compareTo); return Strings.join(supportedTypesNames, " or "); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java index 6cbcd853be3c8..2ebc3d824a4e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java @@ -29,7 +29,7 @@ import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; public class FromBase64 extends UnaryScalarFunction { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java index b2b15a3b824b4..f246513375501 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java @@ -29,7 +29,7 @@ import java.util.function.Function; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; public class ToBase64 extends UnaryScalarFunction { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index 127ad0d8c471c..5de31b7f114e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -20,13 +20,13 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToBoolean; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToBoolean; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java index 5c4208b9f0ce8..9a3362c52c0f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java @@ -20,9 +20,9 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; public class ToCartesianPoint extends AbstractConvertFunction { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java index df2c5186ab730..03e8c565f342b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java @@ -20,10 +20,10 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; public class ToCartesianShape extends AbstractConvertFunction { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index ed14c22092871..53c87427b9607 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -20,13 +20,13 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; public class ToDatetime extends AbstractConvertFunction { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index 590ab9de11d40..af163b6ff8788 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -21,10 +21,10 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; /** * Converts from radians diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index 901257b63ec2b..b30162ef23349 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -22,14 +21,14 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToDouble; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; @@ -44,9 +43,9 @@ public class ToDouble extends AbstractConvertFunction { Map.entry(UNSIGNED_LONG, ToDoubleFromUnsignedLongEvaluator.Factory::new), Map.entry(LONG, ToDoubleFromLongEvaluator.Factory::new), // CastLongToDoubleEvaluator would be a candidate, but not MV'd Map.entry(INTEGER, ToDoubleFromIntEvaluator.Factory::new), // CastIntToDoubleEvaluator would be a candidate, but not MV'd - Map.entry(DataTypes.COUNTER_DOUBLE, (field, source) -> field), - Map.entry(DataTypes.COUNTER_INTEGER, ToDoubleFromIntEvaluator.Factory::new), - Map.entry(DataTypes.COUNTER_LONG, ToDoubleFromLongEvaluator.Factory::new) + Map.entry(DataType.COUNTER_DOUBLE, (field, source) -> field), + Map.entry(DataType.COUNTER_INTEGER, ToDoubleFromIntEvaluator.Factory::new), + Map.entry(DataType.COUNTER_LONG, ToDoubleFromLongEvaluator.Factory::new) ); @FunctionInfo( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java index 122211efbcf10..4692155fc9733 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java @@ -20,9 +20,9 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; public class ToGeoPoint extends AbstractConvertFunction { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java index 9d7cc08ec6345..93acd578f86c0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java @@ -20,10 +20,10 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; public class ToGeoShape extends AbstractConvertFunction { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index b7340f1853d3d..92467d98472c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -20,9 +20,9 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToIP; public class ToIP extends AbstractConvertFunction { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index ea86328a15362..aa926eee3a558 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -22,15 +21,15 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.safeToInt; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToInt; @@ -45,7 +44,7 @@ public class ToInteger extends AbstractConvertFunction { Map.entry(DOUBLE, ToIntegerFromDoubleEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToIntegerFromUnsignedLongEvaluator.Factory::new), Map.entry(LONG, ToIntegerFromLongEvaluator.Factory::new), - Map.entry(DataTypes.COUNTER_INTEGER, (fieldEval, source) -> fieldEval) + Map.entry(DataType.COUNTER_INTEGER, (fieldEval, source) -> fieldEval) ); @FunctionInfo( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index 8d00b19f60657..182c1c923a96d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -22,15 +21,15 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.safeDoubleToLong; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToLong; @@ -45,8 +44,8 @@ public class ToLong extends AbstractConvertFunction { Map.entry(DOUBLE, ToLongFromDoubleEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToLongFromUnsignedLongEvaluator.Factory::new), Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new), // CastIntToLongEvaluator would be a candidate, but not MV'd - Map.entry(DataTypes.COUNTER_LONG, (field, source) -> field), - Map.entry(DataTypes.COUNTER_INTEGER, ToLongFromIntEvaluator.Factory::new) + Map.entry(DataType.COUNTER_LONG, (field, source) -> field), + Map.entry(DataType.COUNTER_INTEGER, ToLongFromIntEvaluator.Factory::new) ); @FunctionInfo( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java index 830463eca38ed..2fd8ae1eb0bdd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java @@ -20,10 +20,10 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; /** * Converts from degrees diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 04318ba4fff64..141fa067e54e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -21,20 +21,20 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.numericBooleanToString; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index b9206c592010d..ad6e935f7d6a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -21,14 +21,14 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.booleanToUnsignedLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.doubleToUnsignedLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.intToUnsignedLong; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index 94c48bace1f34..1941dd00837b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -20,9 +20,9 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToVersion; public class ToVersion extends AbstractConvertFunction { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/BinaryDateTimeFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/BinaryDateTimeFunction.java index ce949c9e6b390..74f0dae76c425 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/BinaryDateTimeFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/BinaryDateTimeFunction.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.time.ZoneId; import java.time.ZoneOffset; @@ -30,7 +29,7 @@ protected BinaryDateTimeFunction(Source source, Expression argument, Expression @Override public DataType dataType() { - return DataTypes.DATETIME; + return DataType.DATETIME; } public Expression timestampField() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java index 33db8430f20bd..42e20a9a4615e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -219,7 +218,7 @@ public boolean foldable() { @Override public DataType dataType() { - return DataTypes.INTEGER; + return DataType.INTEGER; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 11759cdab1584..c28c5e417c152 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -123,7 +122,7 @@ protected NodeInfo info() { @Override public DataType dataType() { - return DataTypes.LONG; + return DataType.LONG; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index 6690d495b0ecb..bcc5d7cb16050 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -62,7 +61,7 @@ Date format (optional). If no format is specified, the `yyyy-MM-dd'T'HH:mm:ss.S @Override public DataType dataType() { - return DataTypes.KEYWORD; + return DataType.KEYWORD; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index 3852d57772e7d..d68664afe8418 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -67,7 +66,7 @@ public DateParse( @Override public DataType dataType() { - return DataTypes.DATETIME; + return DataType.DATETIME; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 49f55e42931ac..ddd51d281105d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -82,7 +81,7 @@ protected TypeResolution resolveType() { } public DataType dataType() { - return DataTypes.DATETIME; + return DataType.DATETIME; } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java index 88e9d2b60b5b5..fe54cfd186fec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/Now.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; @@ -60,7 +59,7 @@ public boolean foldable() { @Override public DataType dataType() { - return DataTypes.DATETIME; + return DataType.DATETIME; } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java index 12d2692460cb3..e2c2395446ed6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -105,7 +104,7 @@ static boolean process(BytesRef ip, BytesRef[] cidrs) { @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java index 9d429a620ed6e..d00d1b2c35fcb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -35,7 +34,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.THIRD; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isIPAndExact; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; /** * Truncates an IP value to a given prefix length. @@ -172,7 +171,7 @@ private static void makePrefix(BytesRef ip, BytesRef scratch, int fullBytes, int @Override public DataType dataType() { - return DataTypes.IP; + return DataType.IP; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index 3898b342f007e..b821b8449a5a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -57,16 +57,16 @@ static int process(int fieldVal) { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var field = toEvaluator.apply(field()); - if (dataType() == DataTypes.DOUBLE) { + if (dataType() == DataType.DOUBLE) { return new AbsDoubleEvaluator.Factory(source(), field); } - if (dataType() == DataTypes.UNSIGNED_LONG) { + if (dataType() == DataType.UNSIGNED_LONG) { return field; } - if (dataType() == DataTypes.LONG) { + if (dataType() == DataType.LONG) { return new AbsLongEvaluator.Factory(source(), field); } - if (dataType() == DataTypes.INTEGER) { + if (dataType() == DataType.INTEGER) { return new AbsIntEvaluator.Factory(source(), field); } throw EsqlIllegalArgumentException.illegalDataType(dataType()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java index 1643d1b21ca5d..fee52567d161e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import java.util.function.Function; @@ -35,7 +34,7 @@ abstract class AbstractTrigonometricFunction extends UnaryScalarFunction { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return doubleEvaluator(Cast.cast(source(), field().dataType(), DataTypes.DOUBLE, toEvaluator.apply(field()))); + return doubleEvaluator(Cast.cast(source(), field().dataType(), DataType.DOUBLE, toEvaluator.apply(field()))); } @Override @@ -49,6 +48,6 @@ protected final TypeResolution resolveType() { @Override public final DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index 50b1e6fc0b17e..a2af991a244c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -74,7 +73,7 @@ static double process(double y, double x) { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override @@ -97,8 +96,8 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var yEval = Cast.cast(source(), y.dataType(), DataTypes.DOUBLE, toEvaluator.apply(y)); - var xEval = Cast.cast(source(), x.dataType(), DataTypes.DOUBLE, toEvaluator.apply(x)); + var yEval = Cast.cast(source(), y.dataType(), DataType.DOUBLE, toEvaluator.apply(y)); + var xEval = Cast.cast(source(), x.dataType(), DataType.DOUBLE, toEvaluator.apply(x)); return new Atan2Evaluator.Factory(source(), yEval, xEval); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java index 5ffac6fb2b015..f4936f8ee37c6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.intToUnsignedLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; @@ -27,31 +26,31 @@ public static ExpressionEvaluator.Factory cast(Source source, DataType current, if (current == required) { return in; } - if (current == DataTypes.NULL || required == DataTypes.NULL) { + if (current == DataType.NULL || required == DataType.NULL) { return EvalOperator.CONSTANT_NULL_FACTORY; } - if (required == DataTypes.DOUBLE) { - if (current == DataTypes.LONG) { + if (required == DataType.DOUBLE) { + if (current == DataType.LONG) { return new CastLongToDoubleEvaluator.Factory(source, in); } - if (current == DataTypes.INTEGER) { + if (current == DataType.INTEGER) { return new CastIntToDoubleEvaluator.Factory(source, in); } - if (current == DataTypes.UNSIGNED_LONG) { + if (current == DataType.UNSIGNED_LONG) { return new CastUnsignedLongToDoubleEvaluator.Factory(source, in); } throw cantCast(current, required); } - if (required == DataTypes.UNSIGNED_LONG) { - if (current == DataTypes.LONG) { + if (required == DataType.UNSIGNED_LONG) { + if (current == DataType.LONG) { return new CastLongToUnsignedLongEvaluator.Factory(source, in); } - if (current == DataTypes.INTEGER) { + if (current == DataType.INTEGER) { return new CastIntToUnsignedLongEvaluator.Factory(source, in); } } - if (required == DataTypes.LONG) { - if (current == DataTypes.INTEGER) { + if (required == DataType.LONG) { + if (current == DataType.INTEGER) { return new CastIntToLongEvaluator.Factory(source, in); } throw cantCast(current, required); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java index 8eec5ff9b7aa1..43c0353de490a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -47,16 +46,16 @@ public ExpressionEvaluator.Factory toEvaluator(Function info() { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/DoubleConstantFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/DoubleConstantFunction.java index 6f9ca89796f0d..8c42fb22db0ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/DoubleConstantFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/DoubleConstantFunction.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; /** * Function that emits constants, like Euler's number. @@ -29,7 +28,7 @@ public final boolean foldable() { @Override public final DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java index f60b0341dbb18..97007f10b31bc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -115,14 +114,14 @@ protected NodeInfo info() { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var valueEval = Cast.cast(source(), value.dataType(), DataTypes.DOUBLE, toEvaluator.apply(value)); + var valueEval = Cast.cast(source(), value.dataType(), DataType.DOUBLE, toEvaluator.apply(value)); if (base != null) { - var baseEval = Cast.cast(source(), base.dataType(), DataTypes.DOUBLE, toEvaluator.apply(base)); + var baseEval = Cast.cast(source(), base.dataType(), DataType.DOUBLE, toEvaluator.apply(base)); return new LogEvaluator.Factory(source(), baseEval, valueEval); } return new LogConstantEvaluator.Factory(source(), valueEval); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index ad08b34e3c53a..69b7efac9b7e0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -53,16 +52,16 @@ public ExpressionEvaluator.Factory toEvaluator(Function info() { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java index 0e60e25af1981..ab5282e665ebf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -103,13 +102,13 @@ public Expression exponent() { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var baseEval = Cast.cast(source(), base.dataType(), DataTypes.DOUBLE, toEvaluator.apply(base)); - var expEval = Cast.cast(source(), exponent.dataType(), DataTypes.DOUBLE, toEvaluator.apply(exponent)); + var baseEval = Cast.cast(source(), base.dataType(), DataType.DOUBLE, toEvaluator.apply(base)); + var expEval = Cast.cast(source(), exponent.dataType(), DataType.DOUBLE, toEvaluator.apply(exponent)); return new PowEvaluator.Factory(source(), baseEval, expEval); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index d1f6eba4081ba..0d7ca026c81ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -144,16 +143,16 @@ public DataType dataType() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { DataType fieldType = dataType(); - if (fieldType == DataTypes.DOUBLE) { + if (fieldType == DataType.DOUBLE) { return toEvaluator(toEvaluator, RoundDoubleNoDecimalsEvaluator.Factory::new, RoundDoubleEvaluator.Factory::new); } - if (fieldType == DataTypes.INTEGER) { + if (fieldType == DataType.INTEGER) { return toEvaluator(toEvaluator, EVALUATOR_IDENTITY, RoundIntEvaluator.Factory::new); } - if (fieldType == DataTypes.LONG) { + if (fieldType == DataType.LONG) { return toEvaluator(toEvaluator, EVALUATOR_IDENTITY, RoundLongEvaluator.Factory::new); } - if (fieldType == DataTypes.UNSIGNED_LONG) { + if (fieldType == DataType.UNSIGNED_LONG) { return toEvaluator(toEvaluator, EVALUATOR_IDENTITY, RoundUnsignedLongEvaluator.Factory::new); } throw EsqlIllegalArgumentException.illegalDataType(fieldType); @@ -168,7 +167,7 @@ private ExpressionEvaluator.Factory toEvaluator( if (decimals == null) { return noDecimals.apply(source(), fieldEvaluator); } - var decimalsEvaluator = Cast.cast(source(), decimals().dataType(), DataTypes.LONG, toEvaluator.apply(decimals())); + var decimalsEvaluator = Cast.cast(source(), decimals().dataType(), DataType.LONG, toEvaluator.apply(decimals())); return withDecimals.apply(source(), fieldEvaluator, decimalsEvaluator); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java index 84126e0d10797..7769e8c6c4a2e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -49,16 +48,16 @@ public EvalOperator.ExpressionEvaluator.Factory toEvaluator( var field = toEvaluator.apply(field()); var fieldType = field().dataType(); - if (fieldType == DataTypes.DOUBLE) { + if (fieldType == DataType.DOUBLE) { return new SignumDoubleEvaluator.Factory(source(), field); } - if (fieldType == DataTypes.INTEGER) { + if (fieldType == DataType.INTEGER) { return new SignumIntEvaluator.Factory(source(), field); } - if (fieldType == DataTypes.LONG) { + if (fieldType == DataType.LONG) { return new SignumLongEvaluator.Factory(source(), field); } - if (fieldType == DataTypes.UNSIGNED_LONG) { + if (fieldType == DataType.UNSIGNED_LONG) { return new SignumUnsignedLongEvaluator.Factory(source(), field); } @@ -77,7 +76,7 @@ protected NodeInfo info() { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Evaluator(extraName = "Double") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index 65e24bb5de13e..a27929b0b5d0a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -47,16 +46,16 @@ public ExpressionEvaluator.Factory toEvaluator(Function info() { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java index 2ceedd14d6fd8..5aa6dad7b2a5b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java @@ -19,6 +19,10 @@ /** * Base class for functions that reduce multivalued fields into single valued fields. + *

+ * We have a guide for writing these in the javadoc for + * {@link org.elasticsearch.xpack.esql.expression.function.scalar}. + *

*/ public abstract class AbstractMultivalueFunction extends UnaryScalarFunction { protected AbstractMultivalueFunction(Source source, Expression field) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java new file mode 100644 index 0000000000000..1f37c15ecfc43 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java @@ -0,0 +1,284 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; + +/** + * Appends values to a multi-value + */ +public class MvAppend extends EsqlScalarFunction implements EvaluatorMapper { + private final Expression field1, field2; + private DataType dataType; + + @FunctionInfo( + returnType = { + "boolean", + "cartesian_point", + "cartesian_shape", + "date", + "double", + "geo_point", + "geo_shape", + "integer", + "ip", + "keyword", + "long", + "text", + "version" }, + description = "Concatenates values of two multi-value fields." + ) + public MvAppend( + Source source, + @Param( + name = "field1", + type = { + "boolean", + "cartesian_point", + "cartesian_shape", + "date", + "double", + "geo_point", + "geo_shape", + "integer", + "ip", + "keyword", + "long", + "text", + "version" } + ) Expression field1, + @Param( + name = "field2", + type = { + "boolean", + "cartesian_point", + "cartesian_shape", + "date", + "double", + "geo_point", + "geo_shape", + "integer", + "ip", + "keyword", + "long", + "text", + "version" } + ) Expression field2 + ) { + super(source, Arrays.asList(field1, field2)); + this.field1 = field1; + this.field2 = field2; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + TypeResolution resolution = isType(field1, EsqlDataTypes::isRepresentable, sourceText(), FIRST, "representable"); + if (resolution.unresolved()) { + return resolution; + } + dataType = field1.dataType(); + if (dataType == DataType.NULL) { + dataType = field2.dataType(); + return isType(field2, EsqlDataTypes::isRepresentable, sourceText(), SECOND, "representable"); + } + return isType(field2, t -> t == dataType, sourceText(), SECOND, dataType.typeName()); + } + + @Override + public boolean foldable() { + return field1.foldable() && field2.foldable(); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + return switch (PlannerUtils.toElementType(dataType())) { + case BOOLEAN -> new MvAppendBooleanEvaluator.Factory(source(), toEvaluator.apply(field1), toEvaluator.apply(field2)); + case BYTES_REF -> new MvAppendBytesRefEvaluator.Factory(source(), toEvaluator.apply(field1), toEvaluator.apply(field2)); + case DOUBLE -> new MvAppendDoubleEvaluator.Factory(source(), toEvaluator.apply(field1), toEvaluator.apply(field2)); + case INT -> new MvAppendIntEvaluator.Factory(source(), toEvaluator.apply(field1), toEvaluator.apply(field2)); + case LONG -> new MvAppendLongEvaluator.Factory(source(), toEvaluator.apply(field1), toEvaluator.apply(field2)); + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; + default -> throw EsqlIllegalArgumentException.illegalDataType(dataType); + }; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new MvAppend(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, MvAppend::new, field1, field2); + } + + @Override + public DataType dataType() { + if (dataType == null) { + resolveType(); + } + return dataType; + } + + @Override + public int hashCode() { + return Objects.hash(field1, field2); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + MvAppend other = (MvAppend) obj; + return Objects.equals(other.field1, field1) && Objects.equals(other.field2, field2); + } + + @Evaluator(extraName = "Int") + static void process(IntBlock.Builder builder, int position, IntBlock field1, IntBlock field2) { + int count1 = field1.getValueCount(position); + int count2 = field2.getValueCount(position); + if (count1 == 0 || count2 == 0) { + builder.appendNull(); + } else { + builder.beginPositionEntry(); + int first1 = field1.getFirstValueIndex(position); + int first2 = field2.getFirstValueIndex(position); + for (int i = 0; i < count1; i++) { + builder.appendInt(field1.getInt(first1 + i)); + } + for (int i = 0; i < count2; i++) { + builder.appendInt(field2.getInt(first2 + i)); + } + builder.endPositionEntry(); + } + + } + + @Evaluator(extraName = "Boolean") + static void process(BooleanBlock.Builder builder, int position, BooleanBlock field1, BooleanBlock field2) { + int count1 = field1.getValueCount(position); + int count2 = field2.getValueCount(position); + if (count1 == 0 || count2 == 0) { + builder.appendNull(); + } else { + int first1 = field1.getFirstValueIndex(position); + int first2 = field2.getFirstValueIndex(position); + builder.beginPositionEntry(); + for (int i = 0; i < count1; i++) { + builder.appendBoolean(field1.getBoolean(first1 + i)); + } + for (int i = 0; i < count2; i++) { + builder.appendBoolean(field2.getBoolean(first2 + i)); + } + builder.endPositionEntry(); + } + + } + + @Evaluator(extraName = "Long") + static void process(LongBlock.Builder builder, int position, LongBlock field1, LongBlock field2) { + int count1 = field1.getValueCount(position); + int count2 = field2.getValueCount(position); + if (count1 == 0 || count2 == 0) { + builder.appendNull(); + } else { + int first1 = field1.getFirstValueIndex(position); + int first2 = field2.getFirstValueIndex(position); + builder.beginPositionEntry(); + for (int i = 0; i < count1; i++) { + builder.appendLong(field1.getLong(first1 + i)); + } + for (int i = 0; i < count2; i++) { + builder.appendLong(field2.getLong(first2 + i)); + } + builder.endPositionEntry(); + } + } + + @Evaluator(extraName = "Double") + static void process(DoubleBlock.Builder builder, int position, DoubleBlock field1, DoubleBlock field2) { + int count1 = field1.getValueCount(position); + int count2 = field2.getValueCount(position); + if (count1 == 0 || count2 == 0) { + builder.appendNull(); + } else { + int first1 = field1.getFirstValueIndex(position); + int first2 = field2.getFirstValueIndex(position); + builder.beginPositionEntry(); + for (int i = 0; i < count1; i++) { + builder.appendDouble(field1.getDouble(first1 + i)); + } + for (int i = 0; i < count2; i++) { + builder.appendDouble(field2.getDouble(first2 + i)); + } + builder.endPositionEntry(); + } + + } + + @Evaluator(extraName = "BytesRef") + static void process(BytesRefBlock.Builder builder, int position, BytesRefBlock field1, BytesRefBlock field2) { + int count1 = field1.getValueCount(position); + int count2 = field2.getValueCount(position); + if (count1 == 0 || count2 == 0) { + builder.appendNull(); + } else { + int first1 = field1.getFirstValueIndex(position); + int first2 = field2.getFirstValueIndex(position); + builder.beginPositionEntry(); + BytesRef spare = new BytesRef(); + for (int i = 0; i < count1; i++) { + builder.appendBytesRef(field1.getBytesRef(first1 + i, spare)); + } + for (int i = 0; i < count2; i++) { + builder.appendBytesRef(field2.getBytesRef(first2 + i, spare)); + } + builder.endPositionEntry(); + } + } + + @Override + public Nullability nullable() { + return Nullability.TRUE; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java index ba217c6fa0392..787bf3e5efd1c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -55,7 +54,7 @@ protected TypeResolution resolveFieldType() { @Override public DataType dataType() { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } @Override @@ -63,7 +62,7 @@ protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fiel return switch (PlannerUtils.toElementType(field().dataType())) { case DOUBLE -> new MvAvgDoubleEvaluator.Factory(fieldEval); case INT -> new MvAvgIntEvaluator.Factory(fieldEval); - case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG + case LONG -> field().dataType() == DataType.UNSIGNED_LONG ? new MvAvgUnsignedLongEvaluator.Factory(fieldEval) : new MvAvgLongEvaluator.Factory(fieldEval); case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java index dd4b552999a00..3e37a739147cf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -70,7 +69,7 @@ protected TypeResolution resolveType() { @Override public DataType dataType() { - return DataTypes.KEYWORD; + return DataType.KEYWORD; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index 26c3345240cd2..b2afef4f2235e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -66,7 +65,7 @@ protected TypeResolution resolveFieldType() { @Override public DataType dataType() { - return DataTypes.INTEGER; + return DataType.INTEGER; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index 543e37f43f671..71cf759b3dbe5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -26,9 +26,22 @@ * Removes duplicate values from a multivalued field. */ public class MvDedupe extends AbstractMultivalueFunction { - // @TODO: add cartesian_point, geo_point, unsigned_long + // @TODO: add unsigned_long @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + returnType = { + "boolean", + "cartesian_point", + "cartesian_shape", + "date", + "double", + "geo_point", + "geo_shape", + "integer", + "ip", + "keyword", + "long", + "text", + "version" }, description = "Remove duplicate values from a multivalued field.", note = "`MV_DEDUPE` may, but won't always, sort the values in the column.", examples = @Example(file = "string", tag = "mv_dedupe") @@ -37,7 +50,20 @@ public MvDedupe( Source source, @Param( name = "field", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, + type = { + "boolean", + "cartesian_point", + "cartesian_shape", + "date", + "double", + "geo_point", + "geo_shape", + "integer", + "ip", + "keyword", + "long", + "text", + "version" }, description = "Multivalue expression." ) Expression field ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java index f936cd6fdfd1a..8d3177926f2e6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -70,7 +70,7 @@ protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fiel return switch (PlannerUtils.toElementType(field().dataType())) { case DOUBLE -> new MvMedianDoubleEvaluator.Factory(fieldEval); case INT -> new MvMedianIntEvaluator.Factory(fieldEval); - case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG + case LONG -> field().dataType() == DataType.UNSIGNED_LONG ? new MvMedianUnsignedLongEvaluator.Factory(fieldEval) : new MvMedianLongEvaluator.Factory(fieldEval); default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index d066745cc65b9..40e9f90df9dc6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -39,7 +39,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.THIRD; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java index 3e1d7aabe7d39..744491b30f702 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java @@ -33,7 +33,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -57,7 +56,7 @@ public class MvSort extends EsqlScalarFunction implements OptionalArgument, Validatable { private final Expression field, order; - private static final Literal ASC = new Literal(Source.EMPTY, "ASC", DataTypes.KEYWORD); + private static final Literal ASC = new Literal(Source.EMPTY, "ASC", DataType.KEYWORD); @FunctionInfo( returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index c3cee1a227325..e14bc401a058a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -57,7 +57,7 @@ protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fiel return switch (PlannerUtils.toElementType(field().dataType())) { case DOUBLE -> new MvSumDoubleEvaluator.Factory(fieldEval); case INT -> new MvSumIntEvaluator.Factory(source(), fieldEval); - case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG + case LONG -> field().dataType() == DataType.UNSIGNED_LONG ? new MvSumUnsignedLongEvaluator.Factory(source(), fieldEval) : new MvSumLongEvaluator.Factory(source(), fieldEval); case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java index 8ca09569015a4..4f42858cbedba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java @@ -14,11 +14,11 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -39,7 +39,7 @@ */ public class MvZip extends EsqlScalarFunction implements OptionalArgument, EvaluatorMapper { private final Expression mvLeft, mvRight, delim; - private static final Literal COMMA = new Literal(Source.EMPTY, ",", DataTypes.TEXT); + private static final Literal COMMA = new Literal(Source.EMPTY, ",", DataType.TEXT); @FunctionInfo( returnType = { "keyword" }, @@ -94,6 +94,12 @@ public boolean foldable() { return mvLeft.foldable() && mvRight.foldable() && (delim == null || delim.foldable()); } + @Override + public Nullability nullable() { + // Nullability.TRUE means if *any* parameter is null we return null. We're only null if the first two are null. + return Nullability.FALSE; + } + @Override public EvalOperator.ExpressionEvaluator.Factory toEvaluator( Function toEvaluator @@ -113,7 +119,7 @@ protected NodeInfo info() { @Override public DataType dataType() { - return DataTypes.KEYWORD; + return DataType.KEYWORD; } private static void buildOneSide(BytesRefBlock.Builder builder, int start, int end, BytesRefBlock field, BytesRef fieldScratch) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 5c823f47f794f..ff7cd83eedbe2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -34,7 +34,7 @@ import java.util.stream.IntStream; import java.util.stream.Stream; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; /** * Function returning the first non-null value. @@ -54,7 +54,8 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { "ip", "keyword", "long", - "text" }, + "text", + "version" }, description = "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", examples = { @Example(file = "null", tag = "coalesce") } ) @@ -73,7 +74,8 @@ public Coalesce( "ip", "keyword", "long", - "text" }, + "text", + "version" }, description = "Expression to evaluate." ) Expression first, @Param( @@ -89,7 +91,8 @@ public Coalesce( "ip", "keyword", "long", - "text" }, + "text", + "version" }, description = "Other expression to evaluate.", optional = true ) List rest diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java index 729262d4d5870..63fdb9b5bc774 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -36,10 +36,10 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2Ds; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.makeGeometryFromLiteral; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java index f66533ba38162..26d48831fdd81 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java @@ -34,10 +34,10 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 8abc678415e71..c0794f59dcf81 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -34,10 +34,10 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java index d2527c5a8bcd2..064df31e35cb2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -23,7 +23,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -40,9 +39,9 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isNull; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.isNull; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatial; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; @@ -65,7 +64,7 @@ protected SpatialRelatesFunction(Source source, Expression left, Expression righ @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java index 8ca7a40639e1f..6568fd42d44c7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -35,10 +35,10 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java index b2ece9388e35c..2af1a353a39dc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.function.Function; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.UNSPECIFIED; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatialPoint; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java index b52d6929ec3a7..266c5f986c526 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.function.Function; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.UNSPECIFIED; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatialPoint; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index 670c96d880f21..d01edbe7024e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -54,7 +53,7 @@ public Concat( @Override public DataType dataType() { - return DataTypes.KEYWORD; + return DataType.KEYWORD; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java index 11a33d9a038e1..767563ed4112a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -58,7 +57,7 @@ public EndsWith( @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 422ae9532aab7..384874e173658 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -30,7 +29,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; /** * {code left(foo, len)} is an alias to {code substring(foo, 0, len)} @@ -95,7 +94,7 @@ protected NodeInfo info() { @Override public DataType dataType() { - return DataTypes.KEYWORD; + return DataType.KEYWORD; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java index cad8bf2770529..e2beda9612b04 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -47,7 +46,7 @@ public Length( @Override public DataType dataType() { - return DataTypes.INTEGER; + return DataType.INTEGER; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java index 067a16cc3fc66..1669a64ec83d2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -64,7 +63,7 @@ public Locate( @Override public DataType dataType() { - return DataTypes.INTEGER; + return DataType.INTEGER; } @Override @@ -82,7 +81,7 @@ protected TypeResolution resolveType() { return resolution; } - return start == null ? TypeResolution.TYPE_RESOLVED : isType(start, dt -> dt == DataTypes.INTEGER, sourceText(), THIRD, "integer"); + return start == null ? TypeResolution.TYPE_RESOLVED : isType(start, dt -> dt == DataType.INTEGER, sourceText(), THIRD, "integer"); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java new file mode 100644 index 0000000000000..e8ad0a83829fe --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; + +import static org.elasticsearch.common.unit.ByteSizeUnit.MB; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; + +public class Repeat extends EsqlScalarFunction implements OptionalArgument { + + static final long MAX_REPEATED_LENGTH = MB.toBytes(1); + + private final Expression str; + private final Expression number; + + @FunctionInfo( + returnType = "keyword", + description = "Returns a string constructed by concatenating `string` with itself the specified `number` of times.", + examples = @Example(file = "string", tag = "repeat") + ) + public Repeat( + Source source, + @Param(name = "string", type = { "keyword", "text" }, description = "String expression.") Expression str, + @Param(name = "number", type = { "integer" }, description = "Number times to repeat.") Expression number + ) { + super(source, Arrays.asList(str, number)); + this.str = str; + this.number = number; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + TypeResolution resolution = isString(str, sourceText(), FIRST); + if (resolution.unresolved()) { + return resolution; + } + + return isType(number, dt -> dt == DataType.INTEGER, sourceText(), SECOND, "integer"); + } + + @Override + public boolean foldable() { + return str.foldable() && number.foldable(); + } + + @Evaluator(extraName = "Constant", warnExceptions = { IllegalArgumentException.class }) + static BytesRef processConstantNumber( + @Fixed(includeInToString = false, build = true) BreakingBytesRefBuilder scratch, + BytesRef str, + @Fixed int number + ) { + return processInner(scratch, str, number); + } + + @Evaluator(warnExceptions = { IllegalArgumentException.class }) + static BytesRef process(@Fixed(includeInToString = false, build = true) BreakingBytesRefBuilder scratch, BytesRef str, int number) { + if (number < 0) { + throw new IllegalArgumentException("Number parameter cannot be negative, found [" + number + "]"); + } + return processInner(scratch, str, number); + } + + static BytesRef processInner(BreakingBytesRefBuilder scratch, BytesRef str, int number) { + int repeatedLen = str.length * number; + if (repeatedLen > MAX_REPEATED_LENGTH) { + throw new IllegalArgumentException( + "Creating repeated strings with more than [" + MAX_REPEATED_LENGTH + "] bytes is not supported" + ); + } + scratch.grow(repeatedLen); + scratch.clear(); + for (int i = 0; i < number; ++i) { + scratch.append(str); + } + return scratch.bytesRefView(); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Repeat(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Repeat::new, str, number); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + ExpressionEvaluator.Factory strExpr = toEvaluator.apply(str); + + if (number.foldable()) { + int num = (int) number.fold(); + if (num < 0) { + throw new IllegalArgumentException("Number parameter cannot be negative, found [" + number + "]"); + } + return new RepeatConstantEvaluator.Factory( + source(), + context -> new BreakingBytesRefBuilder(context.breaker(), "repeat"), + strExpr, + num + ); + } + + ExpressionEvaluator.Factory numberExpr = toEvaluator.apply(number); + return new RepeatEvaluator.Factory( + source(), + context -> new BreakingBytesRefBuilder(context.breaker(), "repeat"), + strExpr, + numberExpr + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java index c0871974bcc1a..7318c50a2e54d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -63,7 +62,7 @@ public Replace( @Override public DataType dataType() { - return DataTypes.KEYWORD; + return DataType.KEYWORD; } @Override @@ -125,7 +124,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function info() { @Override public DataType dataType() { - return DataTypes.KEYWORD; + return DataType.KEYWORD; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index edb052821bef3..99c109d72ae63 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -57,7 +56,7 @@ public Split( @Override public DataType dataType() { - return DataTypes.KEYWORD; + return DataType.KEYWORD; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java index 13a9fb42d2db1..f1d67f317a60f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -58,7 +57,7 @@ public StartsWith( @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 4f3d0e15eef72..94b9f06b63b5d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -31,7 +30,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.THIRD; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; public class Substring extends EsqlScalarFunction implements OptionalArgument { @@ -72,7 +71,7 @@ public Substring( @Override public DataType dataType() { - return DataTypes.KEYWORD; + return DataType.KEYWORD; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index 38f44988d6ac6..04a7b8a6067bd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import java.time.Duration; @@ -22,15 +21,15 @@ import java.util.Collection; import java.util.function.Function; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATE_PERIOD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TIME_DURATION; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isDateTime; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isNull; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; +import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTime; +import static org.elasticsearch.xpack.esql.core.type.DataType.isNull; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isDateTimeOrTemporal; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isTemporalAmount; -abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperation { +public abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperation { /** Arithmetic (quad) function. */ interface DatetimeArithmeticEvaluator { ExpressionEvaluator.Factory apply(Source source, ExpressionEvaluator.Factory expressionEvaluator, TemporalAmount temporalAmount); @@ -57,7 +56,7 @@ interface DatetimeArithmeticEvaluator { protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { return TypeResolutions.isType( e, - t -> t.isNumeric() || EsqlDataTypes.isDateTimeOrTemporal(t) || DataTypes.isNull(t), + t -> t.isNumeric() || EsqlDataTypes.isDateTimeOrTemporal(t) || DataType.isNull(t), sourceText(), paramOrdinal, "datetime", @@ -91,7 +90,7 @@ protected TypeResolution checkCompatibility() { } /** - * Override this to allow processing literals of type {@link DataTypes#DATE_PERIOD} when folding constants. + * Override this to allow processing literals of type {@link DataType#DATE_PERIOD} when folding constants. * Used in {@link DateTimeArithmeticOperation#fold()}. * @param left the left period * @param right the right period @@ -100,7 +99,7 @@ protected TypeResolution checkCompatibility() { abstract Period fold(Period left, Period right); /** - * Override this to allow processing literals of type {@link DataTypes#TIME_DURATION} when folding constants. + * Override this to allow processing literals of type {@link DataType#TIME_DURATION} when folding constants. * Used in {@link DateTimeArithmeticOperation#fold()}. * @param left the left duration * @param right the right duration diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index f43b217d7ebef..6d63551abd314 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.BinaryArithmeticOperation; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; @@ -24,10 +23,10 @@ import java.util.function.Function; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; public abstract class EsqlArithmeticOperation extends ArithmeticOperation implements EvaluatorMapper { @@ -131,8 +130,8 @@ protected TypeResolution checkCompatibility() { // This checks that unsigned longs should only be compatible with other unsigned longs DataType leftType = left().dataType(); DataType rightType = right().dataType(); - if ((rightType == UNSIGNED_LONG && (false == (leftType == UNSIGNED_LONG || leftType == DataTypes.NULL))) - || (leftType == UNSIGNED_LONG && (false == (rightType == UNSIGNED_LONG || rightType == DataTypes.NULL)))) { + if ((rightType == UNSIGNED_LONG && (false == (leftType == UNSIGNED_LONG || leftType == DataType.NULL))) + || (leftType == UNSIGNED_LONG && (false == (rightType == UNSIGNED_LONG || rightType == DataType.NULL)))) { return new TypeResolution(formatIncompatibleTypesMessage(symbol(), leftType, rightType)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java index 0847c740805d9..50fb5c58e2005 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -26,8 +25,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATE_PERIOD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TIME_DURATION; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isTemporalAmount; public class Neg extends UnaryScalarFunction { @@ -47,13 +46,13 @@ public ExpressionEvaluator.Factory toEvaluator(Function dt != DataTypes.UNSIGNED_LONG && (dt.isNumeric() || isTemporalAmount(dt)), + dt -> dt != DataType.UNSIGNED_LONG && (dt.isNumeric() || isTemporalAmount(dt)), sourceText(), DEFAULT, "numeric", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java index 7c1ece4efaa12..b2ae8cff6a697 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.BinaryComparisonInversible; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import java.time.DateTimeException; @@ -47,7 +47,7 @@ public Sub(Source source, Expression left, Expression right) { protected TypeResolution resolveType() { TypeResolution resolution = super.resolveType(); // As opposed to general date time arithmetics, we cannot subtract a datetime from something else. - if (resolution.resolved() && EsqlDataTypes.isDateTimeOrTemporal(dataType()) && DataTypes.isDateTime(right().dataType())) { + if (resolution.resolved() && EsqlDataTypes.isDateTimeOrTemporal(dataType()) && DataType.isDateTime(right().dataType())) { return new TypeResolution( format( null, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/Equals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/Equals.java index 44c7cb39b9a1a..e73cf91cd52a8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/Equals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/Equals.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import java.time.ZoneId; @@ -21,20 +20,20 @@ public class Equals extends EsqlBinaryComparison implements Negatable { private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.BOOLEAN, EqualsBoolsEvaluator.Factory::new), - Map.entry(DataTypes.INTEGER, EqualsIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, EqualsDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, EqualsLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, EqualsLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, EqualsLongsEvaluator.Factory::new), - Map.entry(DataTypes.GEO_POINT, EqualsGeometriesEvaluator.Factory::new), - Map.entry(DataTypes.CARTESIAN_POINT, EqualsGeometriesEvaluator.Factory::new), - Map.entry(DataTypes.GEO_SHAPE, EqualsGeometriesEvaluator.Factory::new), - Map.entry(DataTypes.CARTESIAN_SHAPE, EqualsGeometriesEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, EqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, EqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, EqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, EqualsKeywordsEvaluator.Factory::new) + Map.entry(DataType.BOOLEAN, EqualsBoolsEvaluator.Factory::new), + Map.entry(DataType.INTEGER, EqualsIntsEvaluator.Factory::new), + Map.entry(DataType.DOUBLE, EqualsDoublesEvaluator.Factory::new), + Map.entry(DataType.LONG, EqualsLongsEvaluator.Factory::new), + Map.entry(DataType.UNSIGNED_LONG, EqualsLongsEvaluator.Factory::new), + Map.entry(DataType.DATETIME, EqualsLongsEvaluator.Factory::new), + Map.entry(DataType.GEO_POINT, EqualsGeometriesEvaluator.Factory::new), + Map.entry(DataType.CARTESIAN_POINT, EqualsGeometriesEvaluator.Factory::new), + Map.entry(DataType.GEO_SHAPE, EqualsGeometriesEvaluator.Factory::new), + Map.entry(DataType.CARTESIAN_SHAPE, EqualsGeometriesEvaluator.Factory::new), + Map.entry(DataType.KEYWORD, EqualsKeywordsEvaluator.Factory::new), + Map.entry(DataType.TEXT, EqualsKeywordsEvaluator.Factory::new), + Map.entry(DataType.VERSION, EqualsKeywordsEvaluator.Factory::new), + Map.entry(DataType.IP, EqualsKeywordsEvaluator.Factory::new) ); public Equals(Source source, Expression left, Expression right) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java index a1a1bf6b2c19a..41dafecbff76e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; @@ -30,7 +29,7 @@ import java.util.function.Function; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; public abstract class EsqlBinaryComparison extends BinaryComparison implements EvaluatorMapper { @@ -182,16 +181,16 @@ protected TypeResolution checkCompatibility() { DataType rightType = right().dataType(); // Unsigned long is only interoperable with other unsigned longs - if ((rightType == UNSIGNED_LONG && (false == (leftType == UNSIGNED_LONG || leftType == DataTypes.NULL))) - || (leftType == UNSIGNED_LONG && (false == (rightType == UNSIGNED_LONG || rightType == DataTypes.NULL)))) { + if ((rightType == UNSIGNED_LONG && (false == (leftType == UNSIGNED_LONG || leftType == DataType.NULL))) + || (leftType == UNSIGNED_LONG && (false == (rightType == UNSIGNED_LONG || rightType == DataType.NULL)))) { return new TypeResolution(formatIncompatibleTypesMessage()); } if ((leftType.isNumeric() && rightType.isNumeric()) - || (DataTypes.isString(leftType) && DataTypes.isString(rightType)) + || (DataType.isString(leftType) && DataType.isString(rightType)) || leftType.equals(rightType) - || DataTypes.isNull(leftType) - || DataTypes.isNull(rightType)) { + || DataType.isNull(leftType) + || DataType.isNull(rightType)) { return TypeResolution.TYPE_RESOLVED; } return new TypeResolution(formatIncompatibleTypesMessage()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThan.java index 655641be88f84..da639b328b7c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThan.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import java.time.ZoneId; @@ -21,15 +20,15 @@ public class GreaterThan extends EsqlBinaryComparison implements Negatable { private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.INTEGER, GreaterThanIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, GreaterThanDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, GreaterThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, GreaterThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, GreaterThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, GreaterThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, GreaterThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, GreaterThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, GreaterThanKeywordsEvaluator.Factory::new) + Map.entry(DataType.INTEGER, GreaterThanIntsEvaluator.Factory::new), + Map.entry(DataType.DOUBLE, GreaterThanDoublesEvaluator.Factory::new), + Map.entry(DataType.LONG, GreaterThanLongsEvaluator.Factory::new), + Map.entry(DataType.UNSIGNED_LONG, GreaterThanLongsEvaluator.Factory::new), + Map.entry(DataType.DATETIME, GreaterThanLongsEvaluator.Factory::new), + Map.entry(DataType.KEYWORD, GreaterThanKeywordsEvaluator.Factory::new), + Map.entry(DataType.TEXT, GreaterThanKeywordsEvaluator.Factory::new), + Map.entry(DataType.VERSION, GreaterThanKeywordsEvaluator.Factory::new), + Map.entry(DataType.IP, GreaterThanKeywordsEvaluator.Factory::new) ); public GreaterThan(Source source, Expression left, Expression right) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqual.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqual.java index 48ed5f513bbdc..0644cd5df9038 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqual.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqual.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import java.time.ZoneId; @@ -21,15 +20,15 @@ public class GreaterThanOrEqual extends EsqlBinaryComparison implements Negatable { private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.INTEGER, GreaterThanOrEqualIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, GreaterThanOrEqualDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, GreaterThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, GreaterThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, GreaterThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, GreaterThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, GreaterThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, GreaterThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, GreaterThanOrEqualKeywordsEvaluator.Factory::new) + Map.entry(DataType.INTEGER, GreaterThanOrEqualIntsEvaluator.Factory::new), + Map.entry(DataType.DOUBLE, GreaterThanOrEqualDoublesEvaluator.Factory::new), + Map.entry(DataType.LONG, GreaterThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataType.UNSIGNED_LONG, GreaterThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataType.DATETIME, GreaterThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataType.KEYWORD, GreaterThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataType.TEXT, GreaterThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataType.VERSION, GreaterThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataType.IP, GreaterThanOrEqualKeywordsEvaluator.Factory::new) ); public GreaterThanOrEqual(Source source, Expression left, Expression right) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java index c46b3588df3df..17fca1e1cff88 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -66,7 +65,7 @@ public Boolean fold() { @Override protected boolean areCompatible(DataType left, DataType right) { - if (left == DataTypes.UNSIGNED_LONG || right == DataTypes.UNSIGNED_LONG) { + if (left == DataType.UNSIGNED_LONG || right == DataType.UNSIGNED_LONG) { // automatic numerical conversions not applicable for UNSIGNED_LONG, see Verifier#validateUnsignedLongOperator(). return left == right; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java index 6275db7d78524..9302f6e9c5a77 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; public abstract class InsensitiveBinaryComparison extends BinaryScalarFunction { @@ -20,7 +19,7 @@ protected InsensitiveBinaryComparison(Source source, Expression left, Expression @Override public DataType dataType() { - return DataTypes.BOOLEAN; + return DataType.BOOLEAN; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java index f73960fc73e88..0afc9e0280f4c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsMapper.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; import org.elasticsearch.xpack.esql.planner.Layout; @@ -36,7 +35,7 @@ public final ExpressionEvaluator.Factory map(InsensitiveEquals bc, Layout layout var leftEval = toEvaluator(bc.left(), layout); var rightEval = toEvaluator(bc.right(), layout); - if (leftType == DataTypes.KEYWORD || leftType == DataTypes.TEXT) { + if (leftType == DataType.KEYWORD || leftType == DataType.TEXT) { if (bc.right().foldable() && EsqlDataTypes.isString(rightType)) { BytesRef rightVal = BytesRefs.toBytesRef(bc.right().fold()); Automaton automaton = InsensitiveEquals.automaton(rightVal); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java index c1e8d167c9d2a..8c6824a9827d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import java.time.ZoneId; @@ -22,15 +21,15 @@ public class LessThan extends EsqlBinaryComparison implements Negatable { private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.INTEGER, LessThanIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, LessThanDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, LessThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, LessThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, LessThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, LessThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, LessThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, LessThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, LessThanKeywordsEvaluator.Factory::new) + Map.entry(DataType.INTEGER, LessThanIntsEvaluator.Factory::new), + Map.entry(DataType.DOUBLE, LessThanDoublesEvaluator.Factory::new), + Map.entry(DataType.LONG, LessThanLongsEvaluator.Factory::new), + Map.entry(DataType.UNSIGNED_LONG, LessThanLongsEvaluator.Factory::new), + Map.entry(DataType.DATETIME, LessThanLongsEvaluator.Factory::new), + Map.entry(DataType.KEYWORD, LessThanKeywordsEvaluator.Factory::new), + Map.entry(DataType.TEXT, LessThanKeywordsEvaluator.Factory::new), + Map.entry(DataType.VERSION, LessThanKeywordsEvaluator.Factory::new), + Map.entry(DataType.IP, LessThanKeywordsEvaluator.Factory::new) ); public LessThan(Source source, Expression left, Expression right) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqual.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqual.java index a2daa8b48ea63..0a18c44ea2891 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqual.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqual.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import java.time.ZoneId; @@ -21,15 +20,15 @@ public class LessThanOrEqual extends EsqlBinaryComparison implements Negatable { private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.INTEGER, LessThanOrEqualIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, LessThanOrEqualDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, LessThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, LessThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, LessThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, LessThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, LessThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, LessThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, LessThanOrEqualKeywordsEvaluator.Factory::new) + Map.entry(DataType.INTEGER, LessThanOrEqualIntsEvaluator.Factory::new), + Map.entry(DataType.DOUBLE, LessThanOrEqualDoublesEvaluator.Factory::new), + Map.entry(DataType.LONG, LessThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataType.UNSIGNED_LONG, LessThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataType.DATETIME, LessThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataType.KEYWORD, LessThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataType.TEXT, LessThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataType.VERSION, LessThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataType.IP, LessThanOrEqualKeywordsEvaluator.Factory::new) ); public LessThanOrEqual(Source source, Expression left, Expression right) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEquals.java index 788c2396fa626..0a60a6da818c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEquals.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import java.time.ZoneId; @@ -21,20 +20,20 @@ public class NotEquals extends EsqlBinaryComparison implements Negatable { private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.BOOLEAN, NotEqualsBoolsEvaluator.Factory::new), - Map.entry(DataTypes.INTEGER, NotEqualsIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, NotEqualsDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, NotEqualsLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, NotEqualsLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, NotEqualsLongsEvaluator.Factory::new), - Map.entry(DataTypes.GEO_POINT, NotEqualsGeometriesEvaluator.Factory::new), - Map.entry(DataTypes.CARTESIAN_POINT, NotEqualsGeometriesEvaluator.Factory::new), - Map.entry(DataTypes.GEO_SHAPE, NotEqualsGeometriesEvaluator.Factory::new), - Map.entry(DataTypes.CARTESIAN_SHAPE, NotEqualsGeometriesEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, NotEqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, NotEqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, NotEqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, NotEqualsKeywordsEvaluator.Factory::new) + Map.entry(DataType.BOOLEAN, NotEqualsBoolsEvaluator.Factory::new), + Map.entry(DataType.INTEGER, NotEqualsIntsEvaluator.Factory::new), + Map.entry(DataType.DOUBLE, NotEqualsDoublesEvaluator.Factory::new), + Map.entry(DataType.LONG, NotEqualsLongsEvaluator.Factory::new), + Map.entry(DataType.UNSIGNED_LONG, NotEqualsLongsEvaluator.Factory::new), + Map.entry(DataType.DATETIME, NotEqualsLongsEvaluator.Factory::new), + Map.entry(DataType.GEO_POINT, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(DataType.CARTESIAN_POINT, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(DataType.GEO_SHAPE, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(DataType.CARTESIAN_SHAPE, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(DataType.KEYWORD, NotEqualsKeywordsEvaluator.Factory::new), + Map.entry(DataType.TEXT, NotEqualsKeywordsEvaluator.Factory::new), + Map.entry(DataType.VERSION, NotEqualsKeywordsEvaluator.Factory::new), + Map.entry(DataType.IP, NotEqualsKeywordsEvaluator.Factory::new) ); public NotEquals(Source source, Expression left, Expression right) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index f605f898366e1..20d9907c61ba2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; -import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.Order; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; @@ -118,6 +117,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.AbstractMultivalueFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAppend; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvConcat; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; @@ -147,6 +147,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; @@ -178,10 +179,13 @@ import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; @@ -195,7 +199,9 @@ import org.elasticsearch.xpack.esql.plan.physical.FilterExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.GrokExec; +import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; +import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -212,8 +218,8 @@ import java.util.function.Function; import static java.util.Map.entry; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.Entry.of; @@ -276,6 +282,8 @@ public static List namedTypeEntries() { of(PhysicalPlan.class, FragmentExec.class, PlanNamedTypes::writeFragmentExec, PlanNamedTypes::readFragmentExec), of(PhysicalPlan.class, GrokExec.class, PlanNamedTypes::writeGrokExec, PlanNamedTypes::readGrokExec), of(PhysicalPlan.class, LimitExec.class, PlanNamedTypes::writeLimitExec, PlanNamedTypes::readLimitExec), + of(PhysicalPlan.class, LocalSourceExec.class, (out, v) -> v.writeTo(out), LocalSourceExec::new), + of(PhysicalPlan.class, HashJoinExec.class, (out, v) -> v.writeTo(out), HashJoinExec::new), of(PhysicalPlan.class, MvExpandExec.class, PlanNamedTypes::writeMvExpandExec, PlanNamedTypes::readMvExpandExec), of(PhysicalPlan.class, OrderExec.class, PlanNamedTypes::writeOrderExec, PlanNamedTypes::readOrderExec), of(PhysicalPlan.class, ProjectExec.class, PlanNamedTypes::writeProjectExec, PlanNamedTypes::readProjectExec), @@ -291,18 +299,21 @@ public static List namedTypeEntries() { of(LogicalPlan.class, EsqlProject.class, PlanNamedTypes::writeEsqlProject, PlanNamedTypes::readEsqlProject), of(LogicalPlan.class, Filter.class, PlanNamedTypes::writeFilter, PlanNamedTypes::readFilter), of(LogicalPlan.class, Grok.class, PlanNamedTypes::writeGrok, PlanNamedTypes::readGrok), + of(LogicalPlan.class, Join.class, (out, p) -> p.writeTo(out), Join::new), of(LogicalPlan.class, Limit.class, PlanNamedTypes::writeLimit, PlanNamedTypes::readLimit), + of(LogicalPlan.class, LocalRelation.class, (out, p) -> p.writeTo(out), LocalRelation::new), + of(LogicalPlan.class, Lookup.class, (out, p) -> p.writeTo(out), Lookup::new), of(LogicalPlan.class, MvExpand.class, PlanNamedTypes::writeMvExpand, PlanNamedTypes::readMvExpand), of(LogicalPlan.class, OrderBy.class, PlanNamedTypes::writeOrderBy, PlanNamedTypes::readOrderBy), of(LogicalPlan.class, Project.class, PlanNamedTypes::writeProject, PlanNamedTypes::readProject), of(LogicalPlan.class, TopN.class, PlanNamedTypes::writeTopN, PlanNamedTypes::readTopN), // Attributes - of(NamedExpression.class, FieldAttribute.class, (o, a) -> a.writeTo(o), FieldAttribute::new), - of(NamedExpression.class, ReferenceAttribute.class, (o, a) -> a.writeTo(o), ReferenceAttribute::new), - of(NamedExpression.class, MetadataAttribute.class, (o, a) -> a.writeTo(o), MetadataAttribute::new), - of(NamedExpression.class, UnsupportedAttribute.class, (o, a) -> a.writeTo(o), UnsupportedAttribute::new), + of(Expression.class, FieldAttribute.class, (o, a) -> a.writeTo(o), FieldAttribute::new), + of(Expression.class, ReferenceAttribute.class, (o, a) -> a.writeTo(o), ReferenceAttribute::new), + of(Expression.class, MetadataAttribute.class, (o, a) -> a.writeTo(o), MetadataAttribute::new), + of(Expression.class, UnsupportedAttribute.class, (o, a) -> a.writeTo(o), UnsupportedAttribute::new), // NamedExpressions - of(NamedExpression.class, Alias.class, PlanNamedTypes::writeAlias, PlanNamedTypes::readAlias), + of(Expression.class, Alias.class, (o, a) -> a.writeTo(o), Alias::new), // BinaryComparison of(EsqlBinaryComparison.class, Equals.class, PlanNamedTypes::writeBinComparison, PlanNamedTypes::readBinComparison), of(EsqlBinaryComparison.class, NotEquals.class, PlanNamedTypes::writeBinComparison, PlanNamedTypes::readBinComparison), @@ -398,6 +409,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring), of(ScalarFunction.class, Locate.class, PlanNamedTypes::writeLocate, PlanNamedTypes::readLocate), of(ScalarFunction.class, Left.class, PlanNamedTypes::writeLeft, PlanNamedTypes::readLeft), + of(ScalarFunction.class, Repeat.class, PlanNamedTypes::writeRepeat, PlanNamedTypes::readRepeat), of(ScalarFunction.class, Right.class, PlanNamedTypes::writeRight, PlanNamedTypes::readRight), of(ScalarFunction.class, Split.class, PlanNamedTypes::writeSplit, PlanNamedTypes::readSplit), of(ScalarFunction.class, Tau.class, PlanNamedTypes::writeNoArgScalar, PlanNamedTypes::readNoArgScalar), @@ -425,6 +437,7 @@ public static List namedTypeEntries() { of(AggregateFunction.class, Sum.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), of(AggregateFunction.class, Values.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), // Multivalue functions + of(ScalarFunction.class, MvAppend.class, PlanNamedTypes::writeMvAppend, PlanNamedTypes::readMvAppend), of(ScalarFunction.class, MvAvg.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), of(ScalarFunction.class, MvCount.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), of(ScalarFunction.class, MvConcat.class, PlanNamedTypes::writeMvConcat, PlanNamedTypes::readMvConcat), @@ -450,7 +463,7 @@ static AggregateExec readAggregateExec(PlanStreamInput in) throws IOException { Source.readFrom(in), in.readPhysicalPlanNode(), in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readExpression)), - readNamedExpressions(in), + in.readNamedWriteableCollectionAsList(NamedExpression.class), in.readEnum(AggregateExec.Mode.class), in.readOptionalVInt() ); @@ -460,7 +473,7 @@ static void writeAggregateExec(PlanStreamOutput out, AggregateExec aggregateExec Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(aggregateExec.child()); out.writeCollection(aggregateExec.groupings(), writerFromPlanWriter(PlanStreamOutput::writeExpression)); - writeNamedExpressions(out, aggregateExec.aggregates()); + out.writeNamedWriteableCollection(aggregateExec.aggregates()); out.writeEnum(aggregateExec.getMode()); out.writeOptionalVInt(aggregateExec.estimatedRowSize()); } @@ -543,19 +556,19 @@ static void writeIndexMode(StreamOutput out, IndexMode indexMode) throws IOExcep } static EvalExec readEvalExec(PlanStreamInput in) throws IOException { - return new EvalExec(Source.readFrom(in), in.readPhysicalPlanNode(), readAliases(in)); + return new EvalExec(Source.readFrom(in), in.readPhysicalPlanNode(), in.readCollectionAsList(Alias::new)); } static void writeEvalExec(PlanStreamOutput out, EvalExec evalExec) throws IOException { Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(evalExec.child()); - writeAliases(out, evalExec.fields()); + out.writeCollection(evalExec.fields()); } static EnrichExec readEnrichExec(PlanStreamInput in) throws IOException { final Source source = Source.readFrom(in); final PhysicalPlan child = in.readPhysicalPlanNode(); - final NamedExpression matchField = in.readNamedExpression(); + final NamedExpression matchField = in.readNamedWriteable(NamedExpression.class); final String policyName = in.readString(); final String matchType = (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_EXTENDED_ENRICH_TYPES)) ? in.readString() @@ -583,14 +596,14 @@ static EnrichExec readEnrichExec(PlanStreamInput in) throws IOException { policyName, policyMatchField, concreteIndices, - readNamedExpressions(in) + in.readNamedWriteableCollectionAsList(NamedExpression.class) ); } static void writeEnrichExec(PlanStreamOutput out, EnrichExec enrich) throws IOException { Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(enrich.child()); - out.writeNamedExpression(enrich.matchField()); + out.writeNamedWriteable(enrich.matchField()); out.writeString(enrich.policyName()); if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_EXTENDED_ENRICH_TYPES)) { out.writeString(enrich.matchType()); @@ -607,7 +620,7 @@ static void writeEnrichExec(PlanStreamOutput out, EnrichExec enrich) throws IOEx throw new IllegalStateException("expected a single concrete enrich index; got " + enrich.concreteIndices()); } } - writeNamedExpressions(out, enrich.enrichFields()); + out.writeNamedWriteableCollection(enrich.enrichFields()); } static ExchangeExec readExchangeExec(PlanStreamInput in) throws IOException { @@ -724,7 +737,7 @@ static MvExpandExec readMvExpandExec(PlanStreamInput in) throws IOException { return new MvExpandExec( Source.readFrom(in), in.readPhysicalPlanNode(), - in.readNamedExpression(), + in.readNamedWriteable(NamedExpression.class), in.readNamedWriteable(Attribute.class) ); } @@ -732,7 +745,7 @@ static MvExpandExec readMvExpandExec(PlanStreamInput in) throws IOException { static void writeMvExpandExec(PlanStreamOutput out, MvExpandExec mvExpandExec) throws IOException { Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(mvExpandExec.child()); - out.writeNamedExpression(mvExpandExec.target()); + out.writeNamedWriteable(mvExpandExec.target()); out.writeNamedWriteable(mvExpandExec.expanded()); } @@ -751,23 +764,27 @@ static void writeOrderExec(PlanStreamOutput out, OrderExec orderExec) throws IOE } static ProjectExec readProjectExec(PlanStreamInput in) throws IOException { - return new ProjectExec(Source.readFrom(in), in.readPhysicalPlanNode(), readNamedExpressions(in)); + return new ProjectExec( + Source.readFrom(in), + in.readPhysicalPlanNode(), + in.readNamedWriteableCollectionAsList(NamedExpression.class) + ); } static void writeProjectExec(PlanStreamOutput out, ProjectExec projectExec) throws IOException { Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(projectExec.child()); - writeNamedExpressions(out, projectExec.projections()); + out.writeNamedWriteableCollection(projectExec.projections()); } static RowExec readRowExec(PlanStreamInput in) throws IOException { - return new RowExec(Source.readFrom(in), readAliases(in)); + return new RowExec(Source.readFrom(in), in.readCollectionAsList(Alias::new)); } static void writeRowExec(PlanStreamOutput out, RowExec rowExec) throws IOException { assert rowExec.children().size() == 0; Source.EMPTY.writeTo(out); - writeAliases(out, rowExec.fields()); + out.writeCollection(rowExec.fields()); } @SuppressWarnings("unchecked") @@ -809,7 +826,7 @@ static Aggregate readAggregate(PlanStreamInput in) throws IOException { Source.readFrom(in), in.readLogicalPlanNode(), in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readExpression)), - readNamedExpressions(in) + in.readNamedWriteableCollectionAsList(NamedExpression.class) ); } @@ -817,7 +834,7 @@ static void writeAggregate(PlanStreamOutput out, Aggregate aggregate) throws IOE Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(aggregate.child()); out.writeCollection(aggregate.groupings(), writerFromPlanWriter(PlanStreamOutput::writeExpression)); - writeNamedExpressions(out, aggregate.aggregates()); + out.writeNamedWriteableCollection(aggregate.aggregates()); } static Dissect readDissect(PlanStreamInput in) throws IOException { @@ -886,13 +903,13 @@ private static void writeEsSourceOptions(PlanStreamOutput out) throws IOExceptio } static Eval readEval(PlanStreamInput in) throws IOException { - return new Eval(Source.readFrom(in), in.readLogicalPlanNode(), readAliases(in)); + return new Eval(Source.readFrom(in), in.readLogicalPlanNode(), in.readCollectionAsList(Alias::new)); } static void writeEval(PlanStreamOutput out, Eval eval) throws IOException { Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(eval.child()); - writeAliases(out, eval.fields()); + out.writeCollection(eval.fields()); } static Enrich readEnrich(PlanStreamInput in) throws IOException { @@ -903,7 +920,7 @@ static Enrich readEnrich(PlanStreamInput in) throws IOException { final Source source = Source.readFrom(in); final LogicalPlan child = in.readLogicalPlanNode(); final Expression policyName = in.readExpression(); - final NamedExpression matchField = in.readNamedExpression(); + final NamedExpression matchField = in.readNamedWriteable(NamedExpression.class); if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { in.readString(); // discard the old policy name } @@ -918,7 +935,16 @@ static Enrich readEnrich(PlanStreamInput in) throws IOException { } concreteIndices = Map.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, Iterables.get(esIndex.concreteIndices(), 0)); } - return new Enrich(source, child, mode, policyName, matchField, policy, concreteIndices, readNamedExpressions(in)); + return new Enrich( + source, + child, + mode, + policyName, + matchField, + policy, + concreteIndices, + in.readNamedWriteableCollectionAsList(NamedExpression.class) + ); } static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException { @@ -929,7 +955,7 @@ static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(enrich.child()); out.writeExpression(enrich.policyName()); - out.writeNamedExpression(enrich.matchField()); + out.writeNamedWriteable(enrich.matchField()); if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { out.writeString(BytesRefs.toString(enrich.policyName().fold())); // old policy name } @@ -946,17 +972,17 @@ static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException throw new IllegalStateException("expected a single enrich index; got " + concreteIndices); } } - writeNamedExpressions(out, enrich.enrichFields()); + out.writeNamedWriteableCollection(enrich.enrichFields()); } static EsqlProject readEsqlProject(PlanStreamInput in) throws IOException { - return new EsqlProject(Source.readFrom(in), in.readLogicalPlanNode(), readNamedExpressions(in)); + return new EsqlProject(Source.readFrom(in), in.readLogicalPlanNode(), in.readNamedWriteableCollectionAsList(NamedExpression.class)); } static void writeEsqlProject(PlanStreamOutput out, EsqlProject project) throws IOException { Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(project.child()); - writeNamedExpressions(out, project.projections()); + out.writeNamedWriteableCollection(project.projections()); } static Filter readFilter(PlanStreamInput in) throws IOException { @@ -1002,7 +1028,7 @@ static MvExpand readMvExpand(PlanStreamInput in) throws IOException { return new MvExpand( Source.readFrom(in), in.readLogicalPlanNode(), - in.readNamedExpression(), + in.readNamedWriteable(NamedExpression.class), in.readNamedWriteable(Attribute.class) ); } @@ -1010,7 +1036,7 @@ static MvExpand readMvExpand(PlanStreamInput in) throws IOException { static void writeMvExpand(PlanStreamOutput out, MvExpand mvExpand) throws IOException { Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(mvExpand.child()); - out.writeNamedExpression(mvExpand.target()); + out.writeNamedWriteable(mvExpand.target()); out.writeNamedWriteable(mvExpand.expanded()); } @@ -1029,13 +1055,13 @@ static void writeOrderBy(PlanStreamOutput out, OrderBy order) throws IOException } static Project readProject(PlanStreamInput in) throws IOException { - return new Project(Source.readFrom(in), in.readLogicalPlanNode(), readNamedExpressions(in)); + return new Project(Source.readFrom(in), in.readLogicalPlanNode(), in.readNamedWriteableCollectionAsList(NamedExpression.class)); } static void writeProject(PlanStreamOutput out, Project project) throws IOException { Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(project.child()); - writeNamedExpressions(out, project.projections()); + out.writeNamedWriteableCollection(project.projections()); } static TopN readTopN(PlanStreamInput in) throws IOException { @@ -1054,29 +1080,9 @@ static void writeTopN(PlanStreamOutput out, TopN topN) throws IOException { out.writeExpression(topN.limit()); } - // - // -- Attributes - // - - private static List readNamedExpressions(PlanStreamInput in) throws IOException { - return in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readNamedExpression)); - } - - static void writeNamedExpressions(PlanStreamOutput out, List namedExpressions) throws IOException { - out.writeCollection(namedExpressions, writerFromPlanWriter(PlanStreamOutput::writeNamedExpression)); - } - - private static List readAliases(PlanStreamInput in) throws IOException { - return in.readCollectionAsList(readerFromPlanReader(PlanNamedTypes::readAlias)); - } - - static void writeAliases(PlanStreamOutput out, List aliases) throws IOException { - out.writeCollection(aliases, writerFromPlanWriter(PlanNamedTypes::writeAlias)); - } - // -- BinaryComparison - static EsqlBinaryComparison readBinComparison(PlanStreamInput in, String name) throws IOException { + public static EsqlBinaryComparison readBinComparison(PlanStreamInput in, String name) throws IOException { var source = Source.readFrom(in); EsqlBinaryComparison.BinaryComparisonOperation operation = EsqlBinaryComparison.BinaryComparisonOperation.readFromStream(in); var left = in.readExpression(); @@ -1086,7 +1092,7 @@ static EsqlBinaryComparison readBinComparison(PlanStreamInput in, String name) t return operation.buildNewInstance(source, left, right); } - static void writeBinComparison(PlanStreamOutput out, EsqlBinaryComparison binaryComparison) throws IOException { + public static void writeBinComparison(PlanStreamOutput out, EsqlBinaryComparison binaryComparison) throws IOException { binaryComparison.source().writeTo(out); binaryComparison.getFunctionType().writeTo(out); out.writeExpression(binaryComparison.left()); @@ -1552,6 +1558,18 @@ static void writeLeft(PlanStreamOutput out, Left left) throws IOException { out.writeExpression(fields.get(1)); } + static Repeat readRepeat(PlanStreamInput in) throws IOException { + return new Repeat(Source.readFrom(in), in.readExpression(), in.readExpression()); + } + + static void writeRepeat(PlanStreamOutput out, Repeat repeat) throws IOException { + repeat.source().writeTo(out); + List fields = repeat.children(); + assert fields.size() == 2; + out.writeExpression(fields.get(0)); + out.writeExpression(fields.get(1)); + } + static Right readRight(PlanStreamInput in) throws IOException { return new Right(Source.readFrom(in), in.readExpression(), in.readExpression()); } @@ -1667,28 +1685,6 @@ static void writeMvConcat(PlanStreamOutput out, MvConcat fn) throws IOException out.writeExpression(fn.right()); } - // -- NamedExpressions - - static Alias readAlias(PlanStreamInput in) throws IOException { - return new Alias( - Source.readFrom(in), - in.readString(), - in.readOptionalString(), - in.readNamed(Expression.class), - NameId.readFrom(in), - in.readBoolean() - ); - } - - static void writeAlias(PlanStreamOutput out, Alias alias) throws IOException { - Source.EMPTY.writeTo(out); - out.writeString(alias.name()); - out.writeOptionalString(alias.qualifier()); - out.writeExpression(alias.child()); - alias.id().writeTo(out); - out.writeBoolean(alias.synthetic()); - } - // -- Expressions (other) static Literal readLiteral(PlanStreamInput in) throws IOException { @@ -1857,4 +1853,16 @@ static void writeMvZip(PlanStreamOutput out, MvZip fn) throws IOException { out.writeExpression(fields.get(1)); out.writeOptionalWriteable(fields.size() == 3 ? o -> out.writeExpression(fields.get(2)) : null); } + + static MvAppend readMvAppend(PlanStreamInput in) throws IOException { + return new MvAppend(Source.readFrom(in), in.readExpression(), in.readExpression()); + } + + static void writeMvAppend(PlanStreamOutput out, MvAppend fn) throws IOException { + Source.EMPTY.writeTo(out); + List fields = fn.children(); + assert fields.size() == 2; + out.writeExpression(fields.get(0)); + out.writeExpression(fields.get(1)); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index e7f1fbd6e1460..0b671d6b90c7e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NameId; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanNamedReader; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader; @@ -93,14 +92,11 @@ public PhysicalPlan readOptionalPhysicalPlanNode() throws IOException { return readOptionalNamed(PhysicalPlan.class); } + @Override public Expression readExpression() throws IOException { return readNamed(Expression.class); } - public NamedExpression readNamedExpression() throws IOException { - return readNamed(NamedExpression.class); - } - public T readNamed(Class type) throws IOException { String name = readString(); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 05dc7ab919868..45662d13e2618 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -20,9 +20,9 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; @@ -35,7 +35,7 @@ * A customized stream output used to serialize ESQL physical plan fragments. Complements stream * output with methods that write plan nodes, Attributes, Expressions, etc. */ -public final class PlanStreamOutput extends StreamOutput { +public final class PlanStreamOutput extends StreamOutput implements org.elasticsearch.xpack.esql.core.util.PlanStreamOutput { /** * Cache of written blocks. We use an {@link IdentityHashMap} for this @@ -76,7 +76,7 @@ public PlanStreamOutput( } public void writeLogicalPlanNode(LogicalPlan logicalPlan) throws IOException { - assert logicalPlan.children().size() <= 1; + assert logicalPlan.children().size() <= 1 || (logicalPlan instanceof Join && logicalPlan.children().size() == 2); writeNamed(LogicalPlan.class, logicalPlan); } @@ -94,14 +94,11 @@ public void writeOptionalPhysicalPlanNode(PhysicalPlan physicalPlan) throws IOEx } } + @Override public void writeExpression(Expression expression) throws IOException { writeNamed(Expression.class, expression); } - public void writeNamedExpression(NamedExpression namedExpression) throws IOException { - writeNamed(NamedExpression.class, namedExpression); - } - public void writeOptionalExpression(Expression expression) throws IOException { if (expression == null) { writeBoolean(false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java index 2bcbbec1a8982..c5f96988a2ed5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; @@ -41,6 +40,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.stats.SearchStats; @@ -124,7 +124,7 @@ public LogicalPlan apply(LogicalPlan plan, LocalLogicalOptimizerContext localLog } private LogicalPlan missingToNull(LogicalPlan plan, SearchStats stats) { - if (plan instanceof EsRelation) { + if (plan instanceof EsRelation || plan instanceof LocalRelation) { return plan; } @@ -136,7 +136,7 @@ private LogicalPlan missingToNull(LogicalPlan plan, SearchStats stats) { else if (plan instanceof Project project) { var projections = project.projections(); List newProjections = new ArrayList<>(projections.size()); - Map nullLiteral = Maps.newLinkedHashMapWithExpectedSize(DataTypes.types().size()); + Map nullLiteral = Maps.newLinkedHashMapWithExpectedSize(DataType.types().size()); for (NamedExpression projection : projections) { if (projection instanceof FieldAttribute f && stats.exists(f.qualifiedName()) == false) { @@ -276,7 +276,7 @@ protected void aggOutput(NamedExpression agg, AggregateFunction aggFunc, BlockFa for (Attribute o : output) { DataType dataType = o.dataType(); // boolean right now is used for the internal #seen so always return true - var value = dataType == DataTypes.BOOLEAN ? true + var value = dataType == DataType.BOOLEAN ? true // look for count(literal) with literal != null : aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null) ? 0L // otherwise nullify diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index d1eb8c32dfcb0..5eb024d410992 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -34,7 +34,7 @@ import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.esql.core.rule.Rule; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Queries; import org.elasticsearch.xpack.esql.core.util.Queries.Clause; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -252,7 +252,7 @@ public static boolean canPushToSource(Expression exp, Predicate } else if (exp instanceof UnaryScalarFunction usf) { if (usf instanceof RegexMatch || usf instanceof IsNull || usf instanceof IsNotNull) { if (usf instanceof IsNull || usf instanceof IsNotNull) { - if (usf.field() instanceof FieldAttribute fa && fa.dataType().equals(DataTypes.TEXT)) { + if (usf.field() instanceof FieldAttribute fa && fa.dataType().equals(DataType.TEXT)) { return true; } } @@ -444,7 +444,7 @@ public static boolean hasIdenticalDelegate(FieldAttribute attr, SearchStats stat public static boolean isPushableFieldAttribute(Expression exp, Predicate hasIdenticalDelegate) { if (exp instanceof FieldAttribute fa && fa.getExactInfo().hasExact() && isAggregatable(fa)) { - return fa.dataType() != DataTypes.TEXT || hasIdenticalDelegate.test(fa); + return fa.dataType() != DataType.TEXT || hasIdenticalDelegate.test(fa); } return false; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 7beb3aca05e74..4e2cb2c8223e6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -32,12 +32,10 @@ import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; +import org.elasticsearch.xpack.esql.core.expression.predicate.regex.StringPattern; import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.ConstantFolding; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.LiteralsOnTheRight; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PruneLiteralsInOrderBy; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.SetAsOptimized; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; @@ -47,7 +45,7 @@ import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -61,15 +59,25 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.optimizer.rules.BooleanFunctionEqualsElimination; +import org.elasticsearch.xpack.esql.optimizer.rules.CombineDisjunctionsToIn; +import org.elasticsearch.xpack.esql.optimizer.rules.ConstantFolding; +import org.elasticsearch.xpack.esql.optimizer.rules.LiteralsOnTheRight; +import org.elasticsearch.xpack.esql.optimizer.rules.PropagateEquals; +import org.elasticsearch.xpack.esql.optimizer.rules.PruneLiteralsInOrderBy; +import org.elasticsearch.xpack.esql.optimizer.rules.SetAsOptimized; import org.elasticsearch.xpack.esql.optimizer.rules.SimplifyComparisonsArithmetics; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -119,6 +127,7 @@ protected static Batch substitutions() { return new Batch<>( "Substitutions", Limiter.ONCE, + new ReplaceLookupWithJoin(), new RemoveStatsOverride(), // first extract nested expressions inside aggs new ReplaceStatsNestedExpressionWithEval(), @@ -153,10 +162,10 @@ protected static Batch operators() { new BooleanSimplification(), new LiteralsOnTheRight(), // needs to occur before BinaryComparison combinations (see class) - new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.PropagateEquals(), + new PropagateEquals(), new PropagateNullable(), - new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.BooleanFunctionEqualsElimination(), - new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.CombineDisjunctionsToIn(), + new BooleanFunctionEqualsElimination(), + new CombineDisjunctionsToIn(), new SimplifyComparisonsArithmetics(EsqlDataTypes::areCompatible), // prune/elimination new PruneFilters(), @@ -656,6 +665,11 @@ protected LogicalPlan rule(Limit limit) { } } } + } else if (limit.child() instanceof Join join) { + if (join.config().type() == JoinType.LEFT && join.right() instanceof LocalRelation) { + // This is a hash join from something like a lookup. + return join.replaceChildren(limit.replaceChild(join.left()), join.right()); + } } return limit; } @@ -1259,6 +1273,19 @@ protected LogicalPlan rule(Limit plan) { } } + private static class ReplaceLookupWithJoin extends OptimizerRules.OptimizerRule { + + ReplaceLookupWithJoin() { + super(TransformDirection.UP); + } + + @Override + protected LogicalPlan rule(Lookup lookup) { + // left join between the main relation and the local, lookup relation + return new Join(lookup.source(), lookup.child(), lookup.localRelation(), lookup.joinConfig()); + } + } + /** * This adds an explicit TopN node to a plan that only has an OrderBy right before Lucene. * To date, the only known use case that "needs" this is a query of the form @@ -1286,14 +1313,35 @@ static class AddDefaultTopN extends ParameterizedOptimizerRule> { + + ReplaceRegexMatch() { + super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.DOWN); + } + + @Override + public Expression rule(RegexMatch regexMatch) { + Expression e = regexMatch; + StringPattern pattern = regexMatch.pattern(); + if (pattern.matchesAll()) { + e = new IsNotNull(e.source(), regexMatch.field()); + } else { + String match = pattern.exactMatch(); + if (match != null) { + Literal literal = new Literal(regexMatch.source(), match, DataType.KEYWORD); + e = regexToEquals(regexMatch, literal); + } + } + return e; + } protected Expression regexToEquals(RegexMatch regexMatch, Literal literal) { return new Equals(regexMatch.source(), regexMatch.field(), literal); @@ -1756,7 +1804,7 @@ private static LogicalPlan normalize(Aggregate aggregate, AttributeMap { - CombineDisjunctionsToIn() { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); - } - - protected In createIn(Expression key, List values, ZoneId zoneId) { - return new In(key.source(), key, values); - } - - protected Equals createEquals(Expression k, Set v, ZoneId finalZoneId) { - return new Equals(k.source(), k, v.iterator().next(), finalZoneId); - } - - @Override - protected Expression rule(Or or) { - Expression e = or; - // look only at equals and In - List exps = splitOr(e); - - Map> found = new LinkedHashMap<>(); - ZoneId zoneId = null; - List ors = new LinkedList<>(); - - for (Expression exp : exps) { - if (exp instanceof Equals eq) { - // consider only equals against foldables - if (eq.right().foldable()) { - found.computeIfAbsent(eq.left(), k -> new LinkedHashSet<>()).add(eq.right()); - } else { - ors.add(exp); - } - if (zoneId == null) { - zoneId = eq.zoneId(); - } - } else if (exp instanceof In in) { - found.computeIfAbsent(in.value(), k -> new LinkedHashSet<>()).addAll(in.list()); - if (zoneId == null) { - zoneId = in.zoneId(); - } - } else { - ors.add(exp); - } - } - - if (found.isEmpty() == false) { - // combine equals alongside the existing ors - final ZoneId finalZoneId = zoneId; - found.forEach( - (k, v) -> { ors.add(v.size() == 1 ? createEquals(k, v, finalZoneId) : createIn(k, new ArrayList<>(v), finalZoneId)); } - ); - - // TODO: this makes a QL `or`, not an ESQL `or` - Expression combineOr = combineOr(ors); - // check the result semantically since the result might different in order - // but be actually the same which can trigger a loop - // e.g. a == 1 OR a == 2 OR null --> null OR a in (1,2) --> literalsOnTheRight --> cycle - if (e.semanticEquals(combineOr) == false) { - e = combineOr; - } - } - - return e; - } - } - - /** - * This rule must always be placed after {@link org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.LiteralsOnTheRight} - * since it looks at TRUE/FALSE literals' existence on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. - */ - public static final class BooleanFunctionEqualsElimination extends - org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { - - BooleanFunctionEqualsElimination() { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); - } - - @Override - protected Expression rule(BinaryComparison bc) { - if ((bc instanceof Equals || bc instanceof NotEquals) && bc.left() instanceof Function) { - // for expression "==" or "!=" TRUE/FALSE, return the expression itself or its negated variant - - // TODO: Replace use of QL Not with ESQL Not - if (TRUE.equals(bc.right())) { - return bc instanceof Equals ? bc.left() : new Not(bc.left().source(), bc.left()); - } - if (FALSE.equals(bc.right())) { - return bc instanceof Equals ? new Not(bc.left().source(), bc.left()) : bc.left(); - } - } - - return bc; - } - } - - /** - * Propagate Equals to eliminate conjuncted Ranges or BinaryComparisons. - * When encountering a different Equals, non-containing {@link Range} or {@link BinaryComparison}, the conjunction becomes false. - * When encountering a containing {@link Range}, {@link BinaryComparison} or {@link NotEquals}, these get eliminated by the equality. - * - * Since this rule can eliminate Ranges and BinaryComparisons, it should be applied before - * {@link org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.CombineBinaryComparisons}. - * - * This rule doesn't perform any promotion of {@link BinaryComparison}s, that is handled by - * {@link org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.CombineBinaryComparisons} on purpose as the resulting Range might - * be foldable (which is picked by the folding rule on the next run). - */ - public static final class PropagateEquals extends org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule< - BinaryLogic> { - - PropagateEquals() { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.DOWN); - } - - public Expression rule(BinaryLogic e) { - if (e instanceof And) { - return propagate((And) e); - } else if (e instanceof Or) { - return propagate((Or) e); - } - return e; - } - - // combine conjunction - private static Expression propagate(And and) { - List ranges = new ArrayList<>(); - // Only equalities, not-equalities and inequalities with a foldable .right are extracted separately; - // the others go into the general 'exps'. - // TODO: In 105217, this should change to EsqlBinaryComparison, but it doesn't exist in this branch yet - List equals = new ArrayList<>(); - List notEquals = new ArrayList<>(); - List inequalities = new ArrayList<>(); - List exps = new ArrayList<>(); - - boolean changed = false; - - for (Expression ex : Predicates.splitAnd(and)) { - if (ex instanceof Range) { - ranges.add((Range) ex); - } else if (ex instanceof Equals otherEq) { - // equals on different values evaluate to FALSE - // ignore date/time fields as equality comparison might actually be a range check - if (otherEq.right().foldable() && DataTypes.isDateTime(otherEq.left().dataType()) == false) { - for (BinaryComparison eq : equals) { - if (otherEq.left().semanticEquals(eq.left())) { - Integer comp = BinaryComparison.compare(eq.right().fold(), otherEq.right().fold()); - if (comp != null) { - // var cannot be equal to two different values at the same time - if (comp != 0) { - return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } - } - } - } - equals.add(otherEq); - } else { - exps.add(otherEq); - } - } else if (ex instanceof GreaterThan - || ex instanceof GreaterThanOrEqual - || ex instanceof LessThan - || ex instanceof LessThanOrEqual) { - BinaryComparison bc = (BinaryComparison) ex; - if (bc.right().foldable()) { - inequalities.add(bc); - } else { - exps.add(ex); - } - } else if (ex instanceof NotEquals otherNotEq) { - if (otherNotEq.right().foldable()) { - notEquals.add(otherNotEq); - } else { - exps.add(ex); - } - } else { - exps.add(ex); - } - } - - // check - for (BinaryComparison eq : equals) { - Object eqValue = eq.right().fold(); - - for (Iterator iterator = ranges.iterator(); iterator.hasNext();) { - Range range = iterator.next(); - - if (range.value().semanticEquals(eq.left())) { - // if equals is outside the interval, evaluate the whole expression to FALSE - if (range.lower().foldable()) { - Integer compare = BinaryComparison.compare(range.lower().fold(), eqValue); - if (compare != null && ( - // eq outside the lower boundary - compare > 0 || - // eq matches the boundary but should not be included - (compare == 0 && range.includeLower() == false))) { - return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } - } - if (range.upper().foldable()) { - Integer compare = BinaryComparison.compare(range.upper().fold(), eqValue); - if (compare != null && ( - // eq outside the upper boundary - compare < 0 || - // eq matches the boundary but should not be included - (compare == 0 && range.includeUpper() == false))) { - return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } - } - - // it's in the range and thus, remove it - iterator.remove(); - changed = true; - } - } - - // evaluate all NotEquals against the Equal - for (Iterator iter = notEquals.iterator(); iter.hasNext();) { - NotEquals neq = iter.next(); - if (eq.left().semanticEquals(neq.left())) { - Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); - if (comp != null) { - if (comp == 0) { // clashing and conflicting: a = 1 AND a != 1 - return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } else { // clashing and redundant: a = 1 AND a != 2 - iter.remove(); - changed = true; - } - } - } - } - - // evaluate all inequalities against the Equal - for (Iterator iter = inequalities.iterator(); iter.hasNext();) { - BinaryComparison bc = iter.next(); - if (eq.left().semanticEquals(bc.left())) { - Integer compare = BinaryComparison.compare(eqValue, bc.right().fold()); - if (compare != null) { - if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { // a = 2 AND a />= ? - if ((compare == 0 && bc instanceof GreaterThan) || // a = 2 AND a > 2 - compare < 0) { // a = 2 AND a >/>= 3 - return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); - } - } - - iter.remove(); - changed = true; - } - } - } - } - - return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : and; - } - - // combine disjunction: - // a = 2 OR a > 3 -> nop; a = 2 OR a > 1 -> a > 1 - // a = 2 OR a < 3 -> a < 3; a = 2 OR a < 1 -> nop - // a = 2 OR 3 < a < 5 -> nop; a = 2 OR 1 < a < 3 -> 1 < a < 3; a = 2 OR 0 < a < 1 -> nop - // a = 2 OR a != 2 -> TRUE; a = 2 OR a = 5 -> nop; a = 2 OR a != 5 -> a != 5 - private static Expression propagate(Or or) { - List exps = new ArrayList<>(); - List equals = new ArrayList<>(); // foldable right term Equals - List notEquals = new ArrayList<>(); // foldable right term NotEquals - List ranges = new ArrayList<>(); - List inequalities = new ArrayList<>(); // foldable right term (=limit) BinaryComparision - - // split expressions by type - for (Expression ex : Predicates.splitOr(or)) { - if (ex instanceof Equals eq) { - if (eq.right().foldable()) { - equals.add(eq); - } else { - exps.add(ex); - } - } else if (ex instanceof NotEquals neq) { - if (neq.right().foldable()) { - notEquals.add(neq); - } else { - exps.add(ex); - } - } else if (ex instanceof Range) { - ranges.add((Range) ex); - } else if (ex instanceof BinaryComparison bc) { - if (bc.right().foldable()) { - inequalities.add(bc); - } else { - exps.add(ex); - } - } else { - exps.add(ex); - } - } - - boolean updated = false; // has the expression been modified? - - // evaluate the impact of each Equal over the different types of Expressions - for (Iterator iterEq = equals.iterator(); iterEq.hasNext();) { - Equals eq = iterEq.next(); - Object eqValue = eq.right().fold(); - boolean removeEquals = false; - - // Equals OR NotEquals - for (NotEquals neq : notEquals) { - if (eq.left().semanticEquals(neq.left())) { // a = 2 OR a != ? -> ... - Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); - if (comp != null) { - if (comp == 0) { // a = 2 OR a != 2 -> TRUE - return TRUE; - } else { // a = 2 OR a != 5 -> a != 5 - removeEquals = true; - break; - } - } - } - } - if (removeEquals) { - iterEq.remove(); - updated = true; - continue; - } - - // Equals OR Range - for (int i = 0; i < ranges.size(); i++) { // might modify list, so use index loop - Range range = ranges.get(i); - if (eq.left().semanticEquals(range.value())) { - Integer lowerComp = range.lower().foldable() ? BinaryComparison.compare(eqValue, range.lower().fold()) : null; - Integer upperComp = range.upper().foldable() ? BinaryComparison.compare(eqValue, range.upper().fold()) : null; - - if (lowerComp != null && lowerComp == 0) { - if (range.includeLower() == false) { // a = 2 OR 2 < a < ? -> 2 <= a < ? - ranges.set( - i, - new Range( - range.source(), - range.value(), - range.lower(), - true, - range.upper(), - range.includeUpper(), - range.zoneId() - ) - ); - } // else : a = 2 OR 2 <= a < ? -> 2 <= a < ? - removeEquals = true; // update range with lower equality instead or simply superfluous - break; - } else if (upperComp != null && upperComp == 0) { - if (range.includeUpper() == false) { // a = 2 OR ? < a < 2 -> ? < a <= 2 - ranges.set( - i, - new Range( - range.source(), - range.value(), - range.lower(), - range.includeLower(), - range.upper(), - true, - range.zoneId() - ) - ); - } // else : a = 2 OR ? < a <= 2 -> ? < a <= 2 - removeEquals = true; // update range with upper equality instead - break; - } else if (lowerComp != null && upperComp != null) { - if (0 < lowerComp && upperComp < 0) { // a = 2 OR 1 < a < 3 - removeEquals = true; // equality is superfluous - break; - } - } - } - } - if (removeEquals) { - iterEq.remove(); - updated = true; - continue; - } - - // Equals OR Inequality - for (int i = 0; i < inequalities.size(); i++) { - BinaryComparison bc = inequalities.get(i); - if (eq.left().semanticEquals(bc.left())) { - Integer comp = BinaryComparison.compare(eqValue, bc.right().fold()); - if (comp != null) { - if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { - if (comp < 0) { // a = 1 OR a > 2 -> nop - continue; - } else if (comp == 0 && bc instanceof GreaterThan) { // a = 2 OR a > 2 -> a >= 2 - inequalities.set(i, new GreaterThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); - } // else (0 < comp || bc instanceof GreaterThanOrEqual) : - // a = 3 OR a > 2 -> a > 2; a = 2 OR a => 2 -> a => 2 - - removeEquals = true; // update range with equality instead or simply superfluous - break; - } else if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { - if (comp > 0) { // a = 2 OR a < 1 -> nop - continue; - } - if (comp == 0 && bc instanceof LessThan) { // a = 2 OR a < 2 -> a <= 2 - inequalities.set(i, new LessThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); - } // else (comp < 0 || bc instanceof LessThanOrEqual) : a = 2 OR a < 3 -> a < 3; a = 2 OR a <= 2 -> a <= 2 - removeEquals = true; // update range with equality instead or simply superfluous - break; - } - } - } - } - if (removeEquals) { - iterEq.remove(); - updated = true; - } - } - - return updated ? Predicates.combineOr(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : or; - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java index 17ded5cf44c09..a0a3874a2c2de 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.rule.RuleExecutor; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.Holder; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Project; @@ -28,6 +29,7 @@ import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; +import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; @@ -117,6 +119,12 @@ public PhysicalPlan apply(PhysicalPlan plan) { if (p instanceof MvExpandExec mvee) { attributes.remove(mvee.expanded()); } + if (p instanceof HashJoinExec join) { + attributes.removeAll(join.addedFields()); + for (Equals cond : join.conditions()) { + attributes.remove(cond.right()); + } + } if (p instanceof EnrichExec ee) { for (NamedExpression enrichField : ee.enrichFields()) { // TODO: why is this different then the remove above? diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java new file mode 100644 index 0000000000000..cf62f9219f3c8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; + +import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; +import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; + +/** + * This rule must always be placed after {@link LiteralsOnTheRight} + * since it looks at TRUE/FALSE literals' existence on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. + */ +public final class BooleanFunctionEqualsElimination extends + org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { + + public BooleanFunctionEqualsElimination() { + super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); + } + + @Override + public Expression rule(BinaryComparison bc) { + if ((bc instanceof Equals || bc instanceof NotEquals) && bc.left() instanceof Function) { + // for expression "==" or "!=" TRUE/FALSE, return the expression itself or its negated variant + + // TODO: Replace use of QL Not with ESQL Not + if (TRUE.equals(bc.right())) { + return bc instanceof Equals ? bc.left() : new Not(bc.left().source(), bc.left()); + } + if (FALSE.equals(bc.right())) { + return bc instanceof Equals ? new Not(bc.left().source(), bc.left()) : bc.left(); + } + } + + return bc; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java new file mode 100644 index 0000000000000..5cc3184d9ea70 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; + +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.combineOr; +import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.splitOr; + +/** + * Combine disjunctions on the same field into an In expression. + * This rule looks for both simple equalities: + * 1. a == 1 OR a == 2 becomes a IN (1, 2) + * and combinations of In + * 2. a == 1 OR a IN (2) becomes a IN (1, 2) + * 3. a IN (1) OR a IN (2) becomes a IN (1, 2) + *

+ * This rule does NOT check for type compatibility as that phase has been + * already be verified in the analyzer. + */ +public class CombineDisjunctionsToIn extends org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { + public CombineDisjunctionsToIn() { + super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); + } + + protected In createIn(Expression key, List values, ZoneId zoneId) { + return new In(key.source(), key, values); + } + + protected Equals createEquals(Expression k, Set v, ZoneId finalZoneId) { + return new Equals(k.source(), k, v.iterator().next(), finalZoneId); + } + + @Override + public Expression rule(Or or) { + Expression e = or; + // look only at equals and In + List exps = splitOr(e); + + Map> found = new LinkedHashMap<>(); + ZoneId zoneId = null; + List ors = new LinkedList<>(); + + for (Expression exp : exps) { + if (exp instanceof Equals eq) { + // consider only equals against foldables + if (eq.right().foldable()) { + found.computeIfAbsent(eq.left(), k -> new LinkedHashSet<>()).add(eq.right()); + } else { + ors.add(exp); + } + if (zoneId == null) { + zoneId = eq.zoneId(); + } + } else if (exp instanceof In in) { + found.computeIfAbsent(in.value(), k -> new LinkedHashSet<>()).addAll(in.list()); + if (zoneId == null) { + zoneId = in.zoneId(); + } + } else { + ors.add(exp); + } + } + + if (found.isEmpty() == false) { + // combine equals alongside the existing ors + final ZoneId finalZoneId = zoneId; + found.forEach( + (k, v) -> { ors.add(v.size() == 1 ? createEquals(k, v, finalZoneId) : createIn(k, new ArrayList<>(v), finalZoneId)); } + ); + + // TODO: this makes a QL `or`, not an ESQL `or` + Expression combineOr = combineOr(ors); + // check the result semantically since the result might different in order + // but be actually the same which can trigger a loop + // e.g. a == 1 OR a == 2 OR null --> null OR a in (1,2) --> literalsOnTheRight --> cycle + if (e.semanticEquals(combineOr) == false) { + e = combineOr; + } + } + + return e; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java new file mode 100644 index 0000000000000..f2638333c9601 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; + +public final class ConstantFolding extends OptimizerRules.OptimizerExpressionRule { + + public ConstantFolding() { + super(OptimizerRules.TransformDirection.DOWN); + } + + @Override + public Expression rule(Expression e) { + return e.foldable() ? Literal.of(e) : e; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java new file mode 100644 index 0000000000000..528fe65766972 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; + +public final class LiteralsOnTheRight extends OptimizerRules.OptimizerExpressionRule> { + + public LiteralsOnTheRight() { + super(OptimizerRules.TransformDirection.UP); + } + + @Override + public BinaryOperator rule(BinaryOperator be) { + return be.left() instanceof Literal && (be.right() instanceof Literal) == false ? be.swapLeftAndRight() : be; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java new file mode 100644 index 0000000000000..5f08363abdbaf --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java @@ -0,0 +1,348 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.core.expression.predicate.Range; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.CollectionUtils; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; + +/** + * Propagate Equals to eliminate conjuncted Ranges or BinaryComparisons. + * When encountering a different Equals, non-containing {@link Range} or {@link BinaryComparison}, the conjunction becomes false. + * When encountering a containing {@link Range}, {@link BinaryComparison} or {@link NotEquals}, these get eliminated by the equality. + */ +public final class PropagateEquals extends org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { + + public PropagateEquals() { + super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.DOWN); + } + + public Expression rule(BinaryLogic e) { + if (e instanceof And) { + return propagate((And) e); + } else if (e instanceof Or) { + return propagate((Or) e); + } + return e; + } + + // combine conjunction + private static Expression propagate(And and) { + List ranges = new ArrayList<>(); + // Only equalities, not-equalities and inequalities with a foldable .right are extracted separately; + // the others go into the general 'exps'. + // TODO: In 105217, this should change to EsqlBinaryComparison, but it doesn't exist in this branch yet + List equals = new ArrayList<>(); + List notEquals = new ArrayList<>(); + List inequalities = new ArrayList<>(); + List exps = new ArrayList<>(); + + boolean changed = false; + + for (Expression ex : Predicates.splitAnd(and)) { + if (ex instanceof Range) { + ranges.add((Range) ex); + } else if (ex instanceof Equals otherEq) { + // equals on different values evaluate to FALSE + // ignore date/time fields as equality comparison might actually be a range check + if (otherEq.right().foldable() && DataType.isDateTime(otherEq.left().dataType()) == false) { + for (BinaryComparison eq : equals) { + if (otherEq.left().semanticEquals(eq.left())) { + Integer comp = BinaryComparison.compare(eq.right().fold(), otherEq.right().fold()); + if (comp != null) { + // var cannot be equal to two different values at the same time + if (comp != 0) { + return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); + } + } + } + } + equals.add(otherEq); + } else { + exps.add(otherEq); + } + } else if (ex instanceof GreaterThan + || ex instanceof GreaterThanOrEqual + || ex instanceof LessThan + || ex instanceof LessThanOrEqual) { + BinaryComparison bc = (BinaryComparison) ex; + if (bc.right().foldable()) { + inequalities.add(bc); + } else { + exps.add(ex); + } + } else if (ex instanceof NotEquals otherNotEq) { + if (otherNotEq.right().foldable()) { + notEquals.add(otherNotEq); + } else { + exps.add(ex); + } + } else { + exps.add(ex); + } + } + + // check + for (BinaryComparison eq : equals) { + Object eqValue = eq.right().fold(); + + for (Iterator iterator = ranges.iterator(); iterator.hasNext();) { + Range range = iterator.next(); + + if (range.value().semanticEquals(eq.left())) { + // if equals is outside the interval, evaluate the whole expression to FALSE + if (range.lower().foldable()) { + Integer compare = BinaryComparison.compare(range.lower().fold(), eqValue); + if (compare != null && ( + // eq outside the lower boundary + compare > 0 || + // eq matches the boundary but should not be included + (compare == 0 && range.includeLower() == false))) { + return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); + } + } + if (range.upper().foldable()) { + Integer compare = BinaryComparison.compare(range.upper().fold(), eqValue); + if (compare != null && ( + // eq outside the upper boundary + compare < 0 || + // eq matches the boundary but should not be included + (compare == 0 && range.includeUpper() == false))) { + return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); + } + } + + // it's in the range and thus, remove it + iterator.remove(); + changed = true; + } + } + + // evaluate all NotEquals against the Equal + for (Iterator iter = notEquals.iterator(); iter.hasNext();) { + NotEquals neq = iter.next(); + if (eq.left().semanticEquals(neq.left())) { + Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); + if (comp != null) { + if (comp == 0) { // clashing and conflicting: a = 1 AND a != 1 + return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); + } else { // clashing and redundant: a = 1 AND a != 2 + iter.remove(); + changed = true; + } + } + } + } + + // evaluate all inequalities against the Equal + for (Iterator iter = inequalities.iterator(); iter.hasNext();) { + BinaryComparison bc = iter.next(); + if (eq.left().semanticEquals(bc.left())) { + Integer compare = BinaryComparison.compare(eqValue, bc.right().fold()); + if (compare != null) { + if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { // a = 2 AND a />= ? + if ((compare == 0 && bc instanceof GreaterThan) || // a = 2 AND a > 2 + compare < 0) { // a = 2 AND a >/>= 3 + return new Literal(and.source(), Boolean.FALSE, DataType.BOOLEAN); + } + } + + iter.remove(); + changed = true; + } + } + } + } + + return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : and; + } + + // combine disjunction: + // a = 2 OR a > 3 -> nop; a = 2 OR a > 1 -> a > 1 + // a = 2 OR a < 3 -> a < 3; a = 2 OR a < 1 -> nop + // a = 2 OR 3 < a < 5 -> nop; a = 2 OR 1 < a < 3 -> 1 < a < 3; a = 2 OR 0 < a < 1 -> nop + // a = 2 OR a != 2 -> TRUE; a = 2 OR a = 5 -> nop; a = 2 OR a != 5 -> a != 5 + private static Expression propagate(Or or) { + List exps = new ArrayList<>(); + List equals = new ArrayList<>(); // foldable right term Equals + List notEquals = new ArrayList<>(); // foldable right term NotEquals + List ranges = new ArrayList<>(); + List inequalities = new ArrayList<>(); // foldable right term (=limit) BinaryComparision + + // split expressions by type + for (Expression ex : Predicates.splitOr(or)) { + if (ex instanceof Equals eq) { + if (eq.right().foldable()) { + equals.add(eq); + } else { + exps.add(ex); + } + } else if (ex instanceof NotEquals neq) { + if (neq.right().foldable()) { + notEquals.add(neq); + } else { + exps.add(ex); + } + } else if (ex instanceof Range) { + ranges.add((Range) ex); + } else if (ex instanceof BinaryComparison bc) { + if (bc.right().foldable()) { + inequalities.add(bc); + } else { + exps.add(ex); + } + } else { + exps.add(ex); + } + } + + boolean updated = false; // has the expression been modified? + + // evaluate the impact of each Equal over the different types of Expressions + for (Iterator iterEq = equals.iterator(); iterEq.hasNext();) { + Equals eq = iterEq.next(); + Object eqValue = eq.right().fold(); + boolean removeEquals = false; + + // Equals OR NotEquals + for (NotEquals neq : notEquals) { + if (eq.left().semanticEquals(neq.left())) { // a = 2 OR a != ? -> ... + Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); + if (comp != null) { + if (comp == 0) { // a = 2 OR a != 2 -> TRUE + return TRUE; + } else { // a = 2 OR a != 5 -> a != 5 + removeEquals = true; + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + continue; + } + + // Equals OR Range + for (int i = 0; i < ranges.size(); i++) { // might modify list, so use index loop + Range range = ranges.get(i); + if (eq.left().semanticEquals(range.value())) { + Integer lowerComp = range.lower().foldable() ? BinaryComparison.compare(eqValue, range.lower().fold()) : null; + Integer upperComp = range.upper().foldable() ? BinaryComparison.compare(eqValue, range.upper().fold()) : null; + + if (lowerComp != null && lowerComp == 0) { + if (range.includeLower() == false) { // a = 2 OR 2 < a < ? -> 2 <= a < ? + ranges.set( + i, + new Range( + range.source(), + range.value(), + range.lower(), + true, + range.upper(), + range.includeUpper(), + range.zoneId() + ) + ); + } // else : a = 2 OR 2 <= a < ? -> 2 <= a < ? + removeEquals = true; // update range with lower equality instead or simply superfluous + break; + } else if (upperComp != null && upperComp == 0) { + if (range.includeUpper() == false) { // a = 2 OR ? < a < 2 -> ? < a <= 2 + ranges.set( + i, + new Range( + range.source(), + range.value(), + range.lower(), + range.includeLower(), + range.upper(), + true, + range.zoneId() + ) + ); + } // else : a = 2 OR ? < a <= 2 -> ? < a <= 2 + removeEquals = true; // update range with upper equality instead + break; + } else if (lowerComp != null && upperComp != null) { + if (0 < lowerComp && upperComp < 0) { // a = 2 OR 1 < a < 3 + removeEquals = true; // equality is superfluous + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + continue; + } + + // Equals OR Inequality + for (int i = 0; i < inequalities.size(); i++) { + BinaryComparison bc = inequalities.get(i); + if (eq.left().semanticEquals(bc.left())) { + Integer comp = BinaryComparison.compare(eqValue, bc.right().fold()); + if (comp != null) { + if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { + if (comp < 0) { // a = 1 OR a > 2 -> nop + continue; + } else if (comp == 0 && bc instanceof GreaterThan) { // a = 2 OR a > 2 -> a >= 2 + inequalities.set(i, new GreaterThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); + } // else (0 < comp || bc instanceof GreaterThanOrEqual) : + // a = 3 OR a > 2 -> a > 2; a = 2 OR a => 2 -> a => 2 + + removeEquals = true; // update range with equality instead or simply superfluous + break; + } else if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { + if (comp > 0) { // a = 2 OR a < 1 -> nop + continue; + } + if (comp == 0 && bc instanceof LessThan) { // a = 2 OR a < 2 -> a <= 2 + inequalities.set(i, new LessThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); + } // else (comp < 0 || bc instanceof LessThanOrEqual) : a = 2 OR a < 3 -> a < 3; a = 2 OR a <= 2 -> a <= 2 + removeEquals = true; // update range with equality instead or simply superfluous + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + } + } + + return updated ? Predicates.combineOr(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : or; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java new file mode 100644 index 0000000000000..591cfe043c00d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.expression.Order; +import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; +import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; + +import java.util.ArrayList; +import java.util.List; + +public final class PruneLiteralsInOrderBy extends OptimizerRules.OptimizerRule { + + @Override + protected LogicalPlan rule(OrderBy ob) { + List prunedOrders = new ArrayList<>(); + + for (Order o : ob.order()) { + if (o.child().foldable()) { + prunedOrders.add(o); + } + } + + // everything was eliminated, the order isn't needed anymore + if (prunedOrders.size() == ob.order().size()) { + return ob.child(); + } + if (prunedOrders.size() > 0) { + List newOrders = new ArrayList<>(ob.order()); + newOrders.removeAll(prunedOrders); + return new OrderBy(ob.source(), ob.child(), newOrders); + } + + return ob; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java new file mode 100644 index 0000000000000..168270b68db2d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules; + +import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.core.rule.Rule; + +public final class SetAsOptimized extends Rule { + + @Override + public LogicalPlan apply(LogicalPlan plan) { + plan.forEachUp(SetAsOptimized::rule); + return plan; + } + + private static void rule(LogicalPlan plan) { + if (plan.optimized() == false) { + plan.setOptimized(); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java index 9a7ee0a587335..0d3aaaa3a9d47 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.BinaryComparisonInversible; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Sub; @@ -148,7 +147,7 @@ final boolean isUnsafe(BiFunction typesCompatible) final Expression apply() { // force float point folding for FlP field Literal bcl = operation.dataType().isRational() - ? new Literal(bcLiteral.source(), ((Number) bcLiteral.value()).doubleValue(), DataTypes.DOUBLE) + ? new Literal(bcLiteral.source(), ((Number) bcLiteral.value()).doubleValue(), DataType.DOUBLE) : bcLiteral; Expression bcRightExpression = ((BinaryComparisonInversible) operation).binaryComparisonInverse() diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/package-info.java new file mode 100644 index 0000000000000..863476ba55686 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/package-info.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +/** + * ES|QL Overview and Documentation Links + * + *

Major Components

+ *
    + *
  • {@link org.elasticsearch.compute} - The compute engine drives query execution + *
      + *
    • {@link org.elasticsearch.compute.data.Block} - fundamental unit of data. Operations vectorize over blocks.
    • + *
    • {@link org.elasticsearch.compute.data.Page} - Data is broken up into pages (which are collections of blocks) to + * manage size in memory
    • + *
    + *
  • + *
  • {@link org.elasticsearch.xpack.esql.core} - Core Utility Classes + *
      + *
    • {@link org.elasticsearch.xpack.esql.core.type.DataType} - ES|QL is a typed language, and all the supported data types + * are listed in this collection.
    • + *
    • {@link org.elasticsearch.xpack.esql.core.expression.Expression} - Expression is the basis for all functions in ES|QL, + * but see also {@link org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper}
    • + *
    + *
  • + *
  • org.elasticsearch.compute.gen - ES|QL generates code for evaluators, which are type-specific implementations of + * functions, designed to run over a {@link org.elasticsearch.compute.data.Block}
  • + *
  • {@link org.elasticsearch.xpack.esql.session.EsqlSession} - manages state across a query
  • + *
  • {@link org.elasticsearch.xpack.esql.expression.function.scalar} - Guide to writing scalar functions
  • + *
  • {@link org.elasticsearch.xpack.esql.analysis.Analyzer} - The first step in query processing
  • + *
  • {@link org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer} - Coordinator level logical optimizations
  • + *
  • {@link org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer} - Data node level logical optimizations
  • + *
  • {@link org.elasticsearch.xpack.esql.action.RestEsqlQueryAction} - REST API entry point
  • + *
+ */ + +package org.elasticsearch.xpack.esql; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index a94d449b9e401..a32ae9167aeb2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -38,7 +38,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.DateUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.Order; @@ -78,8 +77,8 @@ import static org.elasticsearch.xpack.esql.core.parser.ParserUtils.source; import static org.elasticsearch.xpack.esql.core.parser.ParserUtils.typedParsing; import static org.elasticsearch.xpack.esql.core.parser.ParserUtils.visitList; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATE_PERIOD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TIME_DURATION; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.unsignedLongAsNumber; import static org.elasticsearch.xpack.esql.core.util.StringUtils.WILDCARD; @@ -125,7 +124,7 @@ protected List expressions(List context @Override public Literal visitBooleanValue(EsqlBaseParser.BooleanValueContext ctx) { Source source = source(ctx); - return new Literal(source, ctx.TRUE() != null, DataTypes.BOOLEAN); + return new Literal(source, ctx.TRUE() != null, DataType.BOOLEAN); } @Override @@ -134,7 +133,7 @@ public Literal visitDecimalValue(EsqlBaseParser.DecimalValueContext ctx) { String text = ctx.getText(); try { - return new Literal(source, StringUtils.parseDouble(text), DataTypes.DOUBLE); + return new Literal(source, StringUtils.parseDouble(text), DataType.DOUBLE); } catch (InvalidArgumentException iae) { throw new ParsingException(source, iae.getMessage()); } @@ -151,7 +150,7 @@ public Literal visitIntegerValue(EsqlBaseParser.IntegerValueContext ctx) { } catch (InvalidArgumentException siae) { // if it's too large, then quietly try to parse as a float instead try { - return new Literal(source, EsqlDataTypeConverter.stringToDouble(text), DataTypes.DOUBLE); + return new Literal(source, EsqlDataTypeConverter.stringToDouble(text), DataType.DOUBLE); } catch (InvalidArgumentException ignored) {} throw new ParsingException(source, siae.getMessage()); @@ -161,13 +160,13 @@ public Literal visitIntegerValue(EsqlBaseParser.IntegerValueContext ctx) { DataType type; if (number instanceof BigInteger bi) { val = asLongUnsigned(bi); - type = DataTypes.UNSIGNED_LONG; + type = DataType.UNSIGNED_LONG; } else if (number.intValue() == number.longValue()) { // try to downsize to int if possible (since that's the most common type) val = number.intValue(); - type = DataTypes.INTEGER; + type = DataType.INTEGER; } else { val = number.longValue(); - type = DataTypes.LONG; + type = DataType.LONG; } return new Literal(source, val, type); } @@ -176,25 +175,23 @@ public Literal visitIntegerValue(EsqlBaseParser.IntegerValueContext ctx) { public Object visitNumericArrayLiteral(EsqlBaseParser.NumericArrayLiteralContext ctx) { Source source = source(ctx); List numbers = visitList(this, ctx.numericValue(), Literal.class); - if (numbers.stream().anyMatch(l -> l.dataType() == DataTypes.DOUBLE)) { - return new Literal(source, mapNumbers(numbers, (no, dt) -> no.doubleValue()), DataTypes.DOUBLE); + if (numbers.stream().anyMatch(l -> l.dataType() == DataType.DOUBLE)) { + return new Literal(source, mapNumbers(numbers, (no, dt) -> no.doubleValue()), DataType.DOUBLE); } - if (numbers.stream().anyMatch(l -> l.dataType() == DataTypes.UNSIGNED_LONG)) { + if (numbers.stream().anyMatch(l -> l.dataType() == DataType.UNSIGNED_LONG)) { return new Literal( source, mapNumbers( numbers, - (no, dt) -> dt == DataTypes.UNSIGNED_LONG - ? no.longValue() - : bigIntegerToUnsignedLong(BigInteger.valueOf(no.longValue())) + (no, dt) -> dt == DataType.UNSIGNED_LONG ? no.longValue() : bigIntegerToUnsignedLong(BigInteger.valueOf(no.longValue())) ), - DataTypes.UNSIGNED_LONG + DataType.UNSIGNED_LONG ); } - if (numbers.stream().anyMatch(l -> l.dataType() == DataTypes.LONG)) { - return new Literal(source, mapNumbers(numbers, (no, dt) -> no.longValue()), DataTypes.LONG); + if (numbers.stream().anyMatch(l -> l.dataType() == DataType.LONG)) { + return new Literal(source, mapNumbers(numbers, (no, dt) -> no.longValue()), DataType.LONG); } - return new Literal(source, mapNumbers(numbers, (no, dt) -> no.intValue()), DataTypes.INTEGER); + return new Literal(source, mapNumbers(numbers, (no, dt) -> no.intValue()), DataType.INTEGER); } private List mapNumbers(List numbers, BiFunction map) { @@ -203,12 +200,12 @@ private List mapNumbers(List numbers, BiFunction contexts, DataType dataType) { @@ -220,7 +217,7 @@ private Object visitArrayLiteral(ParserRuleContext ctx, List keys = new ArrayList<>(); for (var x : parser.outputKeys()) { if (x.isEmpty() == false) { - keys.add(new ReferenceAttribute(src, x, DataTypes.KEYWORD)); + keys.add(new ReferenceAttribute(src, x, DataType.KEYWORD)); } } return new Dissect(src, p, expression(ctx.primaryExpression()), new Dissect.Parser(pattern, appendSeparator, parser), keys); @@ -300,7 +301,7 @@ public PlanFactory visitWhereCommand(EsqlBaseParser.WhereCommandContext ctx) { public PlanFactory visitLimitCommand(EsqlBaseParser.LimitCommandContext ctx) { Source source = source(ctx); int limit = stringToInt(ctx.INTEGER_LITERAL().getText()); - return input -> new Limit(source, new Literal(source, limit, DataTypes.INTEGER), input); + return input -> new Limit(source, new Literal(source, limit, DataType.INTEGER), input); } @Override @@ -378,7 +379,7 @@ public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { source, p, mode, - new Literal(source(ctx.policyName), policyNameString, DataTypes.KEYWORD), + new Literal(source(ctx.policyName), policyNameString, DataType.KEYWORD), matchField, null, Map.of(), @@ -443,9 +444,9 @@ public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { } }); - Literal tableName = new Literal(source, ctx.tableName.getText(), DataTypes.KEYWORD); + Literal tableName = new Literal(source, ctx.tableName.getText(), DataType.KEYWORD); - throw new ParsingException(source, "LOOKUP not yet supported"); + return p -> new Lookup(source, p, tableName, matchFields, null /* localRelation will be resolved later*/); } interface PlanFactory extends Function {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java index 8500f60bd5d60..86f3e0bdf349a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import java.util.List; import java.util.Objects; @@ -55,8 +55,8 @@ public Explain(Source source, LogicalPlan query) { @Override public List output() { return List.of( - new ReferenceAttribute(Source.EMPTY, "plan", DataTypes.KEYWORD), - new ReferenceAttribute(Source.EMPTY, "type", DataTypes.KEYWORD) + new ReferenceAttribute(Source.EMPTY, "plan", DataType.KEYWORD), + new ReferenceAttribute(Source.EMPTY, "type", DataType.KEYWORD) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java index d9f28eff34ad3..5a85e385da8ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.NamedExpressions; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -45,12 +44,12 @@ private List extractedFields() { private static DataType toDataType(GrokCaptureType type) { return switch (type) { - case STRING -> DataTypes.KEYWORD; - case INTEGER -> DataTypes.INTEGER; - case LONG -> DataTypes.LONG; - case FLOAT -> DataTypes.FLOAT; - case DOUBLE -> DataTypes.DOUBLE; - case BOOLEAN -> DataTypes.BOOLEAN; + case STRING -> DataType.KEYWORD; + case INTEGER -> DataType.INTEGER; + case LONG -> DataType.LONG; + case FLOAT -> DataType.FLOAT; + case DOUBLE -> DataType.DOUBLE; + case BOOLEAN -> DataType.BOOLEAN; }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java new file mode 100644 index 0000000000000..690e4595f64e5 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -0,0 +1,154 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +/** + * Looks up values from the associated {@code tables}. + * The class is supposed to be substituted by a {@link Join}. + */ +public class Lookup extends UnaryPlan { + private final Expression tableName; + /** + * References to the input fields to match against the {@link #localRelation}. + */ + private final List matchFields; + // initialized during the analysis phase for output and validation + // afterward, it is converted into a Join (BinaryPlan) hence why here it is not a child + private final LocalRelation localRelation; + private List lazyOutput; + + public Lookup( + Source source, + LogicalPlan child, + Expression tableName, + List matchFields, + @Nullable LocalRelation localRelation + ) { + super(source, child); + this.tableName = tableName; + this.matchFields = matchFields; + this.localRelation = localRelation; + } + + public Lookup(PlanStreamInput in) throws IOException { + super(Source.readFrom(in), in.readLogicalPlanNode()); + this.tableName = in.readExpression(); + this.matchFields = in.readNamedWriteableCollectionAsList(NamedExpression.class); + this.localRelation = in.readBoolean() ? new LocalRelation(in) : null; + } + + public void writeTo(PlanStreamOutput out) throws IOException { + source().writeTo(out); + out.writeLogicalPlanNode(child()); + out.writeExpression(tableName); + out.writeNamedWriteableCollection(matchFields); + if (localRelation == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + localRelation.writeTo(out); + } + } + + public Expression tableName() { + return tableName; + } + + public List matchFields() { + return matchFields; + } + + public LocalRelation localRelation() { + return localRelation; + } + + public JoinConfig joinConfig() { + List conditions = new ArrayList<>(matchFields.size()); + List rhsOutput = Join.makeReference(localRelation.output()); + for (NamedExpression lhs : matchFields) { + for (Attribute rhs : rhsOutput) { + if (lhs.name().equals(rhs.name())) { + conditions.add(new Equals(source(), lhs, rhs)); + break; + } + } + } + return new JoinConfig(JoinType.LEFT, matchFields, conditions); + } + + @Override + public boolean expressionsResolved() { + return tableName.resolved() && Resolvables.resolved(matchFields) && localRelation != null; + } + + @Override + public UnaryPlan replaceChild(LogicalPlan newChild) { + return new Lookup(source(), newChild, tableName, matchFields, localRelation); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Lookup::new, child(), tableName, matchFields, localRelation); + } + + @Override + public List output() { + if (lazyOutput == null) { + List rightSide = localRelation != null + ? Join.makeNullable(Join.makeReference(localRelation.output())) + : Expressions.asAttributes(matchFields); + lazyOutput = Join.mergeOutput(child().output(), rightSide, matchFields); + } + return lazyOutput; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (super.equals(o) == false) { + return false; + } + Lookup lookup = (Lookup) o; + return Objects.equals(tableName, lookup.tableName) + && Objects.equals(matchFields, lookup.matchFields) + && Objects.equals(localRelation, lookup.localRelation); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), tableName, matchFields, localRelation); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java new file mode 100644 index 0000000000000..81ec67a28bbfd --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.join; + +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; +import org.elasticsearch.xpack.esql.core.plan.logical.BinaryPlan; +import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +public class Join extends BinaryPlan { + + private final JoinConfig config; + // TODO: The matching attributes from the left and right logical plans should become part of the `expressions()` + // so that `references()` returns the attributes we actually rely on. + private List lazyOutput; + + public Join(Source source, LogicalPlan left, LogicalPlan right, JoinConfig config) { + super(source, left, right); + this.config = config; + } + + public Join(PlanStreamInput in) throws IOException { + super(Source.readFrom(in), in.readLogicalPlanNode(), in.readLogicalPlanNode()); + this.config = new JoinConfig(in); + } + + public void writeTo(PlanStreamOutput out) throws IOException { + source().writeTo(out); + out.writeLogicalPlanNode(left()); + out.writeLogicalPlanNode(right()); + config.writeTo(out); + } + + public JoinConfig config() { + return config; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Join::new, left(), right(), config); + } + + @Override + public Join replaceChildren(List newChildren) { + return new Join(source(), newChildren.get(0), newChildren.get(1), config); + } + + public Join replaceChildren(LogicalPlan left, LogicalPlan right) { + return new Join(source(), left, right, config); + } + + @Override + public List output() { + if (lazyOutput == null) { + lazyOutput = computeOutput(); + } + return lazyOutput; + } + + private List computeOutput() { + List right = makeReference(right().output()); + return switch (config.type()) { + case LEFT -> // right side becomes nullable + mergeOutput(left().output(), makeNullable(right), config.matchFields()); + case RIGHT -> // left side becomes nullable + mergeOutput(makeNullable(left().output()), right, config.matchFields()); + case FULL -> // both sides become nullable + mergeOutput(makeNullable(left().output()), makeNullable(right), config.matchFields()); + default -> // neither side becomes nullable + mergeOutput(left().output(), right, config.matchFields()); + }; + } + + /** + * Merge output fields, left hand side wins in name conflicts except + * for fields defined in {@link JoinConfig#matchFields()}. + */ + public static List mergeOutput( + List lhs, + List rhs, + List matchFields + ) { + List results = new ArrayList<>(lhs.size() + rhs.size()); + + for (Attribute a : lhs) { + if (rhs.contains(a) == false || matchFields.stream().anyMatch(m -> m.name().equals(a.name()))) { + results.add(a); + } + } + for (Attribute a : rhs) { + if (false == matchFields.stream().anyMatch(m -> m.name().equals(a.name()))) { + results.add(a); + } + } + return results; + } + + /** + * Make fields references, so we don't check if they exist in the index. + * We do this for fields that we know don't come from the index. + *

+ * It's important that name is returned as a *reference* here + * instead of a field. If it were a field we'd use SearchStats + * on it and discover that it doesn't exist in the index. It doesn't! + * We don't expect it to. It exists only in the lookup table. + * TODO we should rework stats so we don't have to do this + *

+ */ + public static List makeReference(List output) { + List out = new ArrayList<>(output.size()); + for (Attribute a : output) { + if (a.resolved()) { + out.add(new ReferenceAttribute(a.source(), a.name(), a.dataType(), a.qualifier(), a.nullable(), a.id(), a.synthetic())); + } else { + out.add(a); + } + } + return out; + } + + public static List makeNullable(List output) { + List out = new ArrayList<>(output.size()); + for (Attribute a : output) { + out.add(a.withNullability(Nullability.TRUE)); + } + return out; + } + + @Override + public boolean expressionsResolved() { + return config.expressionsResolved(); + } + + @Override + public boolean resolved() { + // resolve the join if + // - the children are resolved + // - the condition (if present) is resolved to a boolean + return childrenResolved() && expressionsResolved(); + } + + @Override + public int hashCode() { + return Objects.hash(config, left(), right()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + Join other = (Join) obj; + return config.equals(other.config) && Objects.equals(left(), other.left()) && Objects.equals(right(), other.right()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java new file mode 100644 index 0000000000000..b5cf5d9234c6b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.join; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; + +import java.io.IOException; +import java.util.List; + +/** + * Configuration for a {@code JOIN} style operation. + * @param matchFields fields that are merged from the left and right relations + * @param conditions when these conditions are true the rows are joined + */ +public record JoinConfig(JoinType type, List matchFields, List conditions) implements Writeable { + public JoinConfig(StreamInput in) throws IOException { + this( + JoinType.readFrom(in), + in.readNamedWriteableCollectionAsList(NamedExpression.class), + in.readCollectionAsList(i -> ((PlanStreamInput) i).readExpression()) + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + type.writeTo(out); + out.writeNamedWriteableCollection(matchFields); + out.writeCollection(conditions, (o, v) -> ((PlanStreamOutput) o).writeExpression(v)); + } + + public boolean expressionsResolved() { + return Resolvables.resolved(matchFields) && Resolvables.resolved(conditions); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java new file mode 100644 index 0000000000000..bd3ba43c25016 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.join; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; + +public enum JoinType implements Writeable { + INNER(0, "INNER"), + LEFT(1, "LEFT OUTER"), + RIGHT(2, "RIGHT OUTER"), + FULL(3, "FULL OUTER"), + CROSS(4, "CROSS"); + + private final byte id; + private final String name; + + JoinType(int id, String name) { + this.id = (byte) id; + this.name = name; + } + + @Override + public String toString() { + return name; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByte(id); + } + + public static JoinType readFrom(StreamInput in) throws IOException { + byte id = in.readByte(); + return switch (id) { + case 0 -> INNER; + case 1 -> LEFT; + case 2 -> RIGHT; + case 4 -> FULL; + case 5 -> CROSS; + default -> throw new IllegalArgumentException("unsupported join [" + id + "]"); + }; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java index 1ef8ca49b6e37..862098621e9ee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java @@ -10,7 +10,10 @@ import org.elasticsearch.xpack.esql.core.plan.logical.LeafPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import java.io.IOException; import java.util.List; import java.util.Objects; @@ -25,6 +28,18 @@ public LocalRelation(Source source, List output, LocalSupplier suppli this.supplier = supplier; } + public LocalRelation(PlanStreamInput in) throws IOException { + super(Source.readFrom(in)); + this.output = in.readNamedWriteableCollectionAsList(Attribute.class); + this.supplier = LocalSupplier.readFrom(in); + } + + public void writeTo(PlanStreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteableCollection(output); + supplier.writeTo(out); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, LocalRelation::new, output, supplier); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java index 32159ac3caea8..6356b2644e67a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java @@ -25,8 +25,8 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; public class MetaFunctions extends LeafPlan { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java index 9ff97fe47d2a9..4867d8ca77a39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java @@ -19,7 +19,7 @@ import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; public class ShowInfo extends LeafPlan { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java index 30986f9c626da..fc43f1002d112 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java @@ -20,7 +20,7 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.NodeUtils; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import java.util.List; @@ -28,10 +28,10 @@ import java.util.Objects; public class EsQueryExec extends LeafExec implements EstimatesRowSize { - static final EsField DOC_ID_FIELD = new EsField("_doc", DataTypes.DOC_DATA_TYPE, Map.of(), false); - static final EsField TSID_FIELD = new EsField("_tsid", DataTypes.TSID_DATA_TYPE, Map.of(), true); - static final EsField TIMESTAMP_FIELD = new EsField("@timestamp", DataTypes.DATETIME, Map.of(), true); - static final EsField INTERVAL_FIELD = new EsField("@timestamp_interval", DataTypes.DATETIME, Map.of(), true); + static final EsField DOC_ID_FIELD = new EsField("_doc", DataType.DOC_DATA_TYPE, Map.of(), false); + static final EsField TSID_FIELD = new EsField("_tsid", DataType.TSID_DATA_TYPE, Map.of(), true); + static final EsField TIMESTAMP_FIELD = new EsField("@timestamp", DataType.DATETIME, Map.of(), true); + static final EsField INTERVAL_FIELD = new EsField("@timestamp_interval", DataType.DATETIME, Map.of(), true); private final EsIndex index; private final IndexMode indexMode; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java new file mode 100644 index 0000000000000..7c124701fe332 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.physical; + +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.io.stream.PlanNamedTypes; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.Set; + +public class HashJoinExec extends UnaryExec implements EstimatesRowSize { + private final LocalSourceExec joinData; + private final List matchFields; + /** + * Conditions that must match for rows to be joined. The {@link Equals#left()} + * is always from the child and the {@link Equals#right()} is always from the + * {@link #joinData()}. + */ + private final List conditions; + private final List output; + private AttributeSet lazyAddedFields; + + public HashJoinExec( + Source source, + PhysicalPlan child, + LocalSourceExec hashData, + List matchFields, + List conditions, + List output + ) { + super(source, child); + this.joinData = hashData; + this.matchFields = matchFields; + this.conditions = conditions; + this.output = output; + } + + public HashJoinExec(PlanStreamInput in) throws IOException { + super(Source.readFrom(in), in.readPhysicalPlanNode()); + this.joinData = new LocalSourceExec(in); + this.matchFields = in.readNamedWriteableCollectionAsList(NamedExpression.class); + this.conditions = in.readCollectionAsList(i -> (Equals) PlanNamedTypes.readBinComparison(in, "equals")); + this.output = in.readNamedWriteableCollectionAsList(Attribute.class); + } + + public void writeTo(PlanStreamOutput out) throws IOException { + source().writeTo(out); + out.writePhysicalPlanNode(child()); + joinData.writeTo(out); + out.writeNamedWriteableCollection(matchFields); + out.writeCollection(conditions, (o, v) -> PlanNamedTypes.writeBinComparison(out, v)); + out.writeNamedWriteableCollection(output); + } + + public LocalSourceExec joinData() { + return joinData; + } + + public List matchFields() { + return matchFields; + } + + /** + * Conditions that must match for rows to be joined. The {@link Equals#left()} + * is always from the child and the {@link Equals#right()} is always from the + * {@link #joinData()}. + */ + public List conditions() { + return conditions; + } + + public Set addedFields() { + if (lazyAddedFields == null) { + lazyAddedFields = outputSet(); + lazyAddedFields.removeAll(child().output()); + } + return lazyAddedFields; + } + + @Override + public PhysicalPlan estimateRowSize(State state) { + state.add(false, output); + return this; + } + + @Override + public List output() { + return output; + } + + @Override + public HashJoinExec replaceChild(PhysicalPlan newChild) { + return new HashJoinExec(source(), newChild, joinData, matchFields, conditions, output); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, HashJoinExec::new, child(), joinData, matchFields, conditions, output); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (super.equals(o) == false) { + return false; + } + HashJoinExec hash = (HashJoinExec) o; + return joinData.equals(hash.joinData) + && matchFields.equals(hash.matchFields) + && conditions.equals(hash.conditions) + && output.equals(hash.output); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), joinData, matchFields, conditions, output); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LocalSourceExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LocalSourceExec.java index 71c5955e31a1a..915e31bef7596 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LocalSourceExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LocalSourceExec.java @@ -10,8 +10,11 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; +import java.io.IOException; import java.util.List; import java.util.Objects; @@ -26,6 +29,18 @@ public LocalSourceExec(Source source, List output, LocalSupplier supp this.supplier = supplier; } + public LocalSourceExec(PlanStreamInput in) throws IOException { + super(Source.readFrom(in)); + this.output = in.readNamedWriteableCollectionAsList(Attribute.class); + this.supplier = LocalSupplier.readFrom(in); + } + + public void writeTo(PlanStreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteableCollection(output); + supplier.writeTo(out); + } + @Override public List output() { return output; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 05bc7d6b1bb8a..68e6ea4d6cadb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.CountDistinct; @@ -44,8 +43,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; final class AggregateMapper { @@ -228,11 +227,11 @@ private static Stream isToNE(List interm // defaults to aggstate, but we'll eventually be able to remove this private static DataType toDataType(ElementType elementType) { return switch (elementType) { - case BOOLEAN -> DataTypes.BOOLEAN; - case BYTES_REF -> DataTypes.KEYWORD; - case INT -> DataTypes.INTEGER; - case LONG -> DataTypes.LONG; - case DOUBLE -> DataTypes.DOUBLE; + case BOOLEAN -> DataType.BOOLEAN; + case BYTES_REF -> DataType.KEYWORD; + case INT -> DataType.INTEGER; + case LONG -> DataType.LONG; + case DOUBLE -> DataType.DOUBLE; default -> throw new EsqlIllegalArgumentException("unsupported agg type: " + elementType); }; } @@ -242,18 +241,18 @@ private static String dataTypeToString(DataType type, Class aggClass) { if (aggClass == Count.class) { return ""; // no type distinction } - if (type.equals(DataTypes.BOOLEAN)) { + if (type.equals(DataType.BOOLEAN)) { return "Boolean"; - } else if (type.equals(DataTypes.INTEGER)) { + } else if (type.equals(DataType.INTEGER)) { return "Int"; - } else if (type.equals(DataTypes.LONG) || type.equals(DataTypes.DATETIME)) { + } else if (type.equals(DataType.LONG) || type.equals(DataType.DATETIME)) { return "Long"; - } else if (type.equals(DataTypes.DOUBLE)) { + } else if (type.equals(DataType.DOUBLE)) { return "Double"; - } else if (type.equals(DataTypes.KEYWORD) - || type.equals(DataTypes.IP) - || type.equals(DataTypes.VERSION) - || type.equals(DataTypes.TEXT)) { + } else if (type.equals(DataType.KEYWORD) + || type.equals(DataType.IP) + || type.equals(DataType.VERSION) + || type.equals(DataType.TEXT)) { return "BytesRef"; } else if (type.equals(GEO_POINT)) { return "GeoPoint"; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 8873bd770a84e..349483116a0a8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.esql.core.querydsl.query.TermsQuery; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.Check; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; @@ -54,9 +53,9 @@ import java.util.List; import java.util.Set; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.unsignedLongAsNumber; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.HOUR_MINUTE_SECOND; @@ -203,7 +202,7 @@ static Query translate(BinaryComparison bc, TranslatorHandler handler) { } ZoneId zoneId = null; - if (DataTypes.isDateTime(attribute.dataType())) { + if (DataType.isDateTime(attribute.dataType())) { zoneId = bc.zoneId(); } if (bc instanceof GreaterThan) { @@ -301,28 +300,28 @@ private static boolean isInRange(DataType numericFieldDataType, DataType valueDa // Determine min/max for dataType. Use BigDecimals as doubles will have rounding errors for long/ulong. BigDecimal minValue; BigDecimal maxValue; - if (numericFieldDataType == DataTypes.BYTE) { + if (numericFieldDataType == DataType.BYTE) { minValue = BigDecimal.valueOf(Byte.MIN_VALUE); maxValue = BigDecimal.valueOf(Byte.MAX_VALUE); - } else if (numericFieldDataType == DataTypes.SHORT) { + } else if (numericFieldDataType == DataType.SHORT) { minValue = BigDecimal.valueOf(Short.MIN_VALUE); maxValue = BigDecimal.valueOf(Short.MAX_VALUE); - } else if (numericFieldDataType == DataTypes.INTEGER) { + } else if (numericFieldDataType == DataType.INTEGER) { minValue = BigDecimal.valueOf(Integer.MIN_VALUE); maxValue = BigDecimal.valueOf(Integer.MAX_VALUE); - } else if (numericFieldDataType == DataTypes.LONG) { + } else if (numericFieldDataType == DataType.LONG) { minValue = BigDecimal.valueOf(Long.MIN_VALUE); maxValue = BigDecimal.valueOf(Long.MAX_VALUE); - } else if (numericFieldDataType == DataTypes.UNSIGNED_LONG) { + } else if (numericFieldDataType == DataType.UNSIGNED_LONG) { minValue = BigDecimal.ZERO; maxValue = UNSIGNED_LONG_MAX; - } else if (numericFieldDataType == DataTypes.HALF_FLOAT) { + } else if (numericFieldDataType == DataType.HALF_FLOAT) { minValue = HALF_FLOAT_MAX.negate(); maxValue = HALF_FLOAT_MAX; - } else if (numericFieldDataType == DataTypes.FLOAT) { + } else if (numericFieldDataType == DataType.FLOAT) { minValue = BigDecimal.valueOf(-Float.MAX_VALUE); maxValue = BigDecimal.valueOf(Float.MAX_VALUE); - } else if (numericFieldDataType == DataTypes.DOUBLE || numericFieldDataType == DataTypes.SCALED_FLOAT) { + } else if (numericFieldDataType == DataType.DOUBLE || numericFieldDataType == DataType.SCALED_FLOAT) { // Scaled floats are represented as doubles in ESQL. minValue = BigDecimal.valueOf(-Double.MAX_VALUE); maxValue = BigDecimal.valueOf(Double.MAX_VALUE); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 3109575aa9c42..dc7e09dc8f174 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.operator.ColumnExtractOperator; +import org.elasticsearch.compute.operator.ColumnLoadOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.EvalOperatorFactory; @@ -28,6 +29,7 @@ import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.Operator.OperatorFactory; import org.elasticsearch.compute.operator.OutputOperator.OutputOperatorFactory; +import org.elasticsearch.compute.operator.RowInTableLookupOperator; import org.elasticsearch.compute.operator.RowOperator.RowOperatorFactory; import org.elasticsearch.compute.operator.ShowOperator; import org.elasticsearch.compute.operator.SinkOperator; @@ -61,6 +63,7 @@ import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; @@ -73,6 +76,7 @@ import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.FilterExec; import org.elasticsearch.xpack.esql.plan.physical.GrokExec; +import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; @@ -93,6 +97,7 @@ import java.util.Objects; import java.util.Optional; import java.util.function.Function; +import java.util.stream.IntStream; import java.util.stream.Stream; import static java.util.Arrays.asList; @@ -218,6 +223,8 @@ else if (node instanceof EsQueryExec esQuery) { // lookups and joins else if (node instanceof EnrichExec enrich) { return planEnrich(enrich, context); + } else if (node instanceof HashJoinExec lookup) { + return planHashJoin(lookup, context); } // output else if (node instanceof OutputExec outputExec) { @@ -480,6 +487,67 @@ private PhysicalOperation planEnrich(EnrichExec enrich, LocalExecutionPlannerCon ); } + private PhysicalOperation planHashJoin(HashJoinExec join, LocalExecutionPlannerContext context) { + PhysicalOperation source = plan(join.child(), context); + int positionsChannel = source.layout.numberOfChannels(); + + Layout.Builder layoutBuilder = source.layout.builder(); + for (Attribute f : join.output()) { + if (join.child().outputSet().contains(f)) { + continue; + } + layoutBuilder.append(f); + } + Layout layout = layoutBuilder.build(); + Block[] localData = join.joinData().supplier().get(); + + RowInTableLookupOperator.Key[] keys = new RowInTableLookupOperator.Key[join.conditions().size()]; + int[] blockMapping = new int[join.conditions().size()]; + for (int k = 0; k < join.conditions().size(); k++) { + Equals cond = join.conditions().get(k); + Block localField = null; + for (int l = 0; l < join.joinData().output().size(); l++) { + if (join.joinData().output().get(l).name().equals((((NamedExpression) cond.right()).name()))) { + localField = localData[l]; + } + } + if (localField == null) { + throw new IllegalArgumentException("can't find local data for [" + cond.right() + "]"); + } + + NamedExpression left = (NamedExpression) cond.left(); + keys[k] = new RowInTableLookupOperator.Key(left.name(), localField); + Layout.ChannelAndType input = source.layout.get(left.id()); + blockMapping[k] = input.channel(); + } + + // Load the "positions" of each match + source = source.with(new RowInTableLookupOperator.Factory(keys, blockMapping), layout); + + // Load the "values" from each match + for (Attribute f : join.addedFields()) { + Block localField = null; + for (int l = 0; l < join.joinData().output().size(); l++) { + if (join.joinData().output().get(l).name().equals(f.name())) { + localField = localData[l]; + } + } + if (localField == null) { + throw new IllegalArgumentException("can't find local data for [" + f + "]"); + } + source = source.with( + new ColumnLoadOperator.Factory(new ColumnLoadOperator.Values(f.name(), localField), positionsChannel), + layout + ); + } + + // Drop the "positions" of the match + List projection = new ArrayList<>(); + IntStream.range(0, positionsChannel).boxed().forEach(projection::add); + IntStream.range(positionsChannel + 1, positionsChannel + 1 + join.addedFields().size()).boxed().forEach(projection::add); + return source.with(new ProjectOperatorFactory(projection), layout); + } + private ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { return EvalMapper.toEvaluator(exp, layout); } @@ -522,6 +590,9 @@ private PhysicalOperation planProject(ProjectExec project, LocalExecutionPlanner inputId = ne.id(); } Layout.ChannelAndType input = source.layout.get(inputId); + if (input == null) { + throw new IllegalStateException("can't find input for [" + ne + "]"); + } Layout.ChannelSet channelSet = inputChannelToOutputIds.get(input.channel()); if (channelSet == null) { channelSet = new Layout.ChannelSet(new HashSet<>(), input.type()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 12052b92432cf..9518954f78c64 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -9,12 +9,15 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; +import org.elasticsearch.xpack.esql.core.plan.logical.BinaryPlan; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -25,6 +28,8 @@ import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; @@ -37,6 +42,7 @@ import org.elasticsearch.xpack.esql.plan.physical.FilterExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.GrokExec; +import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; @@ -47,6 +53,9 @@ import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; +import java.util.ArrayList; +import java.util.List; + import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode; import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.FINAL; import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.PARTIAL; @@ -113,6 +122,24 @@ public PhysicalPlan map(LogicalPlan p) { return map(ua, child); } + if (p instanceof BinaryPlan bp) { + var left = map(bp.left()); + var right = map(bp.right()); + + if (left instanceof FragmentExec) { + if (right instanceof FragmentExec) { + throw new EsqlIllegalArgumentException("can't plan binary [" + p.nodeName() + "]"); + } + // in case of a fragment, push to it any current streaming operator + return new FragmentExec(p); + } + if (right instanceof FragmentExec) { + // in case of a fragment, push to it any current streaming operator + return new FragmentExec(p); + } + return map(bp, left, right); + } + throw new EsqlIllegalArgumentException("unsupported logical plan node [" + p.nodeName() + "]"); } @@ -181,7 +208,7 @@ private PhysicalPlan map(UnaryPlan p, PhysicalPlan child) { return map(aggregate, child); } - throw new EsqlIllegalArgumentException("unsupported unary logical plan node [" + p.nodeName() + "]"); + throw new EsqlIllegalArgumentException("unsupported logical plan node [" + p.nodeName() + "]"); } private PhysicalPlan map(Aggregate aggregate, PhysicalPlan child) { @@ -239,4 +266,32 @@ private PhysicalPlan addExchangeForFragment(LogicalPlan logical, PhysicalPlan ch } return child; } + + private PhysicalPlan map(BinaryPlan p, PhysicalPlan lhs, PhysicalPlan rhs) { + if (p instanceof Join join) { + PhysicalPlan hash = tryHashJoin(join, lhs, rhs); + if (hash != null) { + return hash; + } + } + throw new EsqlIllegalArgumentException("unsupported logical plan node [" + p.nodeName() + "]"); + } + + private PhysicalPlan tryHashJoin(Join join, PhysicalPlan lhs, PhysicalPlan rhs) { + if (join.config().type() != JoinType.LEFT) { + return null; + } + List conditions = new ArrayList<>(join.config().conditions().size()); + for (Expression cond : join.config().conditions()) { + if (cond instanceof Equals eq) { + conditions.add(eq); + } else { + return null; + } + } + if (rhs instanceof LocalSourceExec local) { + return new HashJoinExec(join.source(), lhs, local, join.config().matchFields(), conditions, join.output()); + } + return null; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 39b641a872c58..cc28839fd6575 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -28,7 +28,6 @@ import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.core.util.Queries; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; @@ -240,37 +239,37 @@ public static ElementType toElementType(DataType dataType) { * For example, spatial types can be extracted into doc-values under specific conditions, otherwise they extract as BytesRef. */ public static ElementType toElementType(DataType dataType, MappedFieldType.FieldExtractPreference fieldExtractPreference) { - if (dataType == DataTypes.LONG - || dataType == DataTypes.DATETIME - || dataType == DataTypes.UNSIGNED_LONG - || dataType == DataTypes.COUNTER_LONG) { + if (dataType == DataType.LONG + || dataType == DataType.DATETIME + || dataType == DataType.UNSIGNED_LONG + || dataType == DataType.COUNTER_LONG) { return ElementType.LONG; } - if (dataType == DataTypes.INTEGER || dataType == DataTypes.COUNTER_INTEGER) { + if (dataType == DataType.INTEGER || dataType == DataType.COUNTER_INTEGER) { return ElementType.INT; } - if (dataType == DataTypes.DOUBLE || dataType == DataTypes.COUNTER_DOUBLE) { + if (dataType == DataType.DOUBLE || dataType == DataType.COUNTER_DOUBLE) { return ElementType.DOUBLE; } // unsupported fields are passed through as a BytesRef - if (dataType == DataTypes.KEYWORD - || dataType == DataTypes.TEXT - || dataType == DataTypes.IP - || dataType == DataTypes.SOURCE - || dataType == DataTypes.VERSION - || dataType == DataTypes.UNSUPPORTED) { + if (dataType == DataType.KEYWORD + || dataType == DataType.TEXT + || dataType == DataType.IP + || dataType == DataType.SOURCE + || dataType == DataType.VERSION + || dataType == DataType.UNSUPPORTED) { return ElementType.BYTES_REF; } - if (dataType == DataTypes.NULL) { + if (dataType == DataType.NULL) { return ElementType.NULL; } - if (dataType == DataTypes.BOOLEAN) { + if (dataType == DataType.BOOLEAN) { return ElementType.BOOLEAN; } - if (dataType == DataTypes.DOC_DATA_TYPE) { + if (dataType == DataType.DOC_DATA_TYPE) { return ElementType.DOC; } - if (dataType == DataTypes.TSID_DATA_TYPE) { + if (dataType == DataType.TSID_DATA_TYPE) { return ElementType.BYTES_REF; } if (EsqlDataTypes.isSpatialPoint(dataType)) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index 6059b61031d1e..4fdc0bdab5ade 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -56,6 +56,7 @@ import org.elasticsearch.xpack.esql.action.RestEsqlGetAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.index.IndexResolver; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; @@ -195,6 +196,8 @@ public List getNamedWriteables() { entries.addAll(EsField.getNamedWriteables()); entries.addAll(Attribute.getNamedWriteables()); entries.add(UnsupportedAttribute.ENTRY); // TODO combine with above once these are in the same project + entries.addAll(NamedExpression.getNamedWriteables()); + entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); return entries; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index 9b6d392b5c899..23d2126e39706 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -40,7 +40,7 @@ import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; public class SpatialRelatesQuery extends Query { private final String field; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlIndexResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlIndexResolver.java index 6108fbe7d0cae..f973983e47f39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlIndexResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlIndexResolver.java @@ -37,11 +37,11 @@ import java.util.TreeMap; import java.util.TreeSet; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.OBJECT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSUPPORTED; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.OBJECT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; public class EsqlIndexResolver { private final Client client; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index ce4bf5d6143a3..10116bb24e6b9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -35,7 +35,7 @@ import org.elasticsearch.xpack.esql.core.index.MappingException; import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -291,7 +291,7 @@ private void assertSameMappings(String prefix, Map fromQl, Map< * we don't actually use it in ESQL and the EsqlIndexResolver doesn't * produce exactly the same result. */ - if (qlField.getDataType().equals(DataTypes.UNSUPPORTED) == false + if (qlField.getDataType().equals(DataType.UNSUPPORTED) == false && qlField.getName().equals(esqlField.getName()) == false // QL uses full paths for unsupported fields. ESQL does not. This particular difference is fine. ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index 7ce2d012bf76b..cc2525799224b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.core.type.Converter; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.DataTypeConverter; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; @@ -54,27 +53,27 @@ import java.util.function.Function; import static java.util.Map.entry; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.isPrimitive; +import static org.elasticsearch.xpack.esql.core.type.DataType.isString; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.safeToInt; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.safeToLong; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.safeToUnsignedLong; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NULL; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isPrimitive; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isString; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.ONE_AS_UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.asLongUnsigned; @@ -122,34 +121,34 @@ public static boolean canConvert(DataType from, DataType to) { public static Converter converterFor(DataType from, DataType to) { // TODO move EXPRESSION_TO_LONG here if there is no regression if (isString(from)) { - if (to == DataTypes.DATETIME) { + if (to == DataType.DATETIME) { return EsqlConverter.STRING_TO_DATETIME; } - if (to == DataTypes.IP) { + if (to == DataType.IP) { return EsqlConverter.STRING_TO_IP; } - if (to == DataTypes.VERSION) { + if (to == DataType.VERSION) { return EsqlConverter.STRING_TO_VERSION; } - if (to == DataTypes.DOUBLE) { + if (to == DataType.DOUBLE) { return EsqlConverter.STRING_TO_DOUBLE; } - if (to == DataTypes.LONG) { + if (to == DataType.LONG) { return EsqlConverter.STRING_TO_LONG; } - if (to == DataTypes.INTEGER) { + if (to == DataType.INTEGER) { return EsqlConverter.STRING_TO_INT; } - if (to == DataTypes.BOOLEAN) { + if (to == DataType.BOOLEAN) { return EsqlConverter.STRING_TO_BOOLEAN; } if (EsqlDataTypes.isSpatial(to)) { return EsqlConverter.STRING_TO_SPATIAL; } - if (to == DataTypes.TIME_DURATION) { + if (to == DataType.TIME_DURATION) { return EsqlConverter.STRING_TO_TIME_DURATION; } - if (to == DataTypes.DATE_PERIOD) { + if (to == DataType.DATE_PERIOD) { return EsqlConverter.STRING_TO_DATE_PERIOD; } } @@ -188,15 +187,15 @@ public static TemporalAmount parseTemporalAmount(Object val, DataType expectedTy if ((value.isEmpty() || qualifier.isEmpty()) == false) { try { TemporalAmount result = parseTemporalAmout(Integer.parseInt(value.toString()), qualifier.toString(), Source.EMPTY); - if (DataTypes.DATE_PERIOD == expectedType && result instanceof Period - || DataTypes.TIME_DURATION == expectedType && result instanceof Duration) { + if (DataType.DATE_PERIOD == expectedType && result instanceof Period + || DataType.TIME_DURATION == expectedType && result instanceof Duration) { return result; } - if (result instanceof Period && expectedType == DataTypes.TIME_DURATION) { - errorMessage += ", did you mean " + DataTypes.DATE_PERIOD + "?"; + if (result instanceof Period && expectedType == DataType.TIME_DURATION) { + errorMessage += ", did you mean " + DataType.DATE_PERIOD + "?"; } - if (result instanceof Duration && expectedType == DataTypes.DATE_PERIOD) { - errorMessage += ", did you mean " + DataTypes.TIME_DURATION + "?"; + if (result instanceof Duration && expectedType == DataType.DATE_PERIOD) { + errorMessage += ", did you mean " + DataType.TIME_DURATION + "?"; } } catch (NumberFormatException ex) { // wrong pattern @@ -431,8 +430,8 @@ public static long booleanToUnsignedLong(boolean number) { public enum EsqlConverter implements Converter { - STRING_TO_DATE_PERIOD(x -> EsqlDataTypeConverter.parseTemporalAmount(x, DataTypes.DATE_PERIOD)), - STRING_TO_TIME_DURATION(x -> EsqlDataTypeConverter.parseTemporalAmount(x, DataTypes.TIME_DURATION)), + STRING_TO_DATE_PERIOD(x -> EsqlDataTypeConverter.parseTemporalAmount(x, DataType.DATE_PERIOD)), + STRING_TO_TIME_DURATION(x -> EsqlDataTypeConverter.parseTemporalAmount(x, DataType.TIME_DURATION)), STRING_TO_CHRONO_FIELD(EsqlDataTypeConverter::stringToChrono), STRING_TO_DATETIME(x -> EsqlDataTypeConverter.dateTimeToLong((String) x)), STRING_TO_IP(x -> EsqlDataTypeConverter.stringToIP((String) x)), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistry.java index 936aeab569e5f..dc680e5305842 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistry.java @@ -10,14 +10,13 @@ import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.DataTypeRegistry; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.util.Collection; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATE_PERIOD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TIME_DURATION; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isDateTime; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; +import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; +import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTime; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isDateTimeOrTemporal; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isNullOrDatePeriod; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isNullOrTemporalAmount; @@ -31,7 +30,7 @@ private EsqlDataTypeRegistry() {} @Override public Collection dataTypes() { - return DataTypes.types(); + return DataType.types(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java index 893dcfe9103ac..e48b46758f36c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java @@ -8,7 +8,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.util.Collections; import java.util.Locale; @@ -17,45 +16,45 @@ import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toUnmodifiableMap; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BYTE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATE_PERIOD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.FLOAT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.HALF_FLOAT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NESTED; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.NULL; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.OBJECT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.SCALED_FLOAT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.SHORT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.SOURCE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TIME_DURATION; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSUPPORTED; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isNull; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.BYTE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; +import static org.elasticsearch.xpack.esql.core.type.DataType.HALF_FLOAT; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.NESTED; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.OBJECT; +import static org.elasticsearch.xpack.esql.core.type.DataType.SCALED_FLOAT; +import static org.elasticsearch.xpack.esql.core.type.DataType.SHORT; +import static org.elasticsearch.xpack.esql.core.type.DataType.SOURCE; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; +import static org.elasticsearch.xpack.esql.core.type.DataType.isNull; public final class EsqlDataTypes { - private static final Map NAME_TO_TYPE = DataTypes.types() + private static final Map NAME_TO_TYPE = DataType.types() .stream() .collect(toUnmodifiableMap(DataType::typeName, t -> t)); private static final Map ES_TO_TYPE; static { - Map map = DataTypes.types().stream().filter(e -> e.esType() != null).collect(toMap(DataType::esType, t -> t)); + Map map = DataType.types().stream().filter(e -> e.esType() != null).collect(toMap(DataType::esType, t -> t)); // ES calls this 'point', but ESQL calls it 'cartesian_point' - map.put("point", DataTypes.CARTESIAN_POINT); - map.put("shape", DataTypes.CARTESIAN_SHAPE); + map.put("point", DataType.CARTESIAN_POINT); + map.put("shape", DataType.CARTESIAN_SHAPE); ES_TO_TYPE = Collections.unmodifiableMap(map); } private static final Map NAME_OR_ALIAS_TO_TYPE; static { - Map map = DataTypes.types().stream().collect(toMap(DataType::typeName, Function.identity())); + Map map = DataType.types().stream().collect(toMap(DataType::typeName, Function.identity())); map.put("bool", BOOLEAN); map.put("int", INTEGER); map.put("string", KEYWORD); @@ -105,7 +104,7 @@ public static DataType fromJava(Object value) { } public static boolean isUnsupported(DataType type) { - return DataTypes.isUnsupported(type); + return DataType.isUnsupported(type); } public static String outputType(DataType type) { @@ -124,11 +123,11 @@ public static boolean isPrimitive(DataType t) { } public static boolean isDateTimeOrTemporal(DataType t) { - return DataTypes.isDateTime(t) || isTemporalAmount(t); + return DataType.isDateTime(t) || isTemporalAmount(t); } public static boolean isTemporalAmount(DataType t) { - return t == DataTypes.DATE_PERIOD || t == DataTypes.TIME_DURATION; + return t == DataType.DATE_PERIOD || t == DataType.TIME_DURATION; } public static boolean isNullOrTemporalAmount(DataType t) { @@ -136,23 +135,23 @@ public static boolean isNullOrTemporalAmount(DataType t) { } public static boolean isNullOrDatePeriod(DataType t) { - return t == DataTypes.DATE_PERIOD || isNull(t); + return t == DataType.DATE_PERIOD || isNull(t); } public static boolean isNullOrTimeDuration(DataType t) { - return t == DataTypes.TIME_DURATION || isNull(t); + return t == DataType.TIME_DURATION || isNull(t); } public static boolean isSpatial(DataType t) { - return t == DataTypes.GEO_POINT || t == DataTypes.CARTESIAN_POINT || t == DataTypes.GEO_SHAPE || t == DataTypes.CARTESIAN_SHAPE; + return t == DataType.GEO_POINT || t == DataType.CARTESIAN_POINT || t == DataType.GEO_SHAPE || t == DataType.CARTESIAN_SHAPE; } public static boolean isSpatialGeo(DataType t) { - return t == DataTypes.GEO_POINT || t == DataTypes.GEO_SHAPE; + return t == DataType.GEO_POINT || t == DataType.GEO_SHAPE; } public static boolean isSpatialPoint(DataType t) { - return t == DataTypes.GEO_POINT || t == DataTypes.CARTESIAN_POINT; + return t == DataType.GEO_POINT || t == DataType.CARTESIAN_POINT; } /** @@ -201,6 +200,6 @@ public static DataType getCounterType(String typeName) { } public static boolean isCounterType(DataType dt) { - return dt == DataTypes.COUNTER_LONG || dt == DataTypes.COUNTER_INTEGER || dt == DataTypes.COUNTER_DOUBLE; + return dt == DataType.COUNTER_LONG || dt == DataType.COUNTER_INTEGER || dt == DataType.COUNTER_DOUBLE; } } diff --git a/x-pack/plugin/esql/src/main/resources/forbidden/ql-signatures.txt b/x-pack/plugin/esql/src/main/resources/forbidden/ql-signatures.txt index 95e452533dae3..5371b35f4e033 100644 --- a/x-pack/plugin/esql/src/main/resources/forbidden/ql-signatures.txt +++ b/x-pack/plugin/esql/src/main/resources/forbidden/ql-signatures.txt @@ -2,5 +2,4 @@ org.elasticsearch.xpack.esql.core.plan.logical.Aggregate @ use @org.elasticsearc org.elasticsearch.xpack.esql.core.plan.logical.EsRelation @ use @org.elasticsearch.xpack.esql.plan.logical.EsRelation instead org.elasticsearch.xpack.esql.core.plan.logical.Project @ use @org.elasticsearch.xpack.esql.plan.logical.Project instead org.elasticsearch.xpack.esql.core.plan.logical.UnresolvedRelation @ use @org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation instead -org.elasticsearch.xpack.esql.core.analyzer.PreAnalyzer @ use @org.elasticsearch.xpack.esql.analysis.PreAnalyzer instead org.elasticsearch.xpack.esql.core.expression.function.Functions @ use @org.elasticsearch.xpack.esql.expression.function.Functions instead diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 0ff0b2c9bd9b9..59cabcd459fba 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -403,7 +403,7 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { List columnTypes = coordinatorPlan.output() .stream() .peek(o -> dataTypes.add(EsqlDataTypes.outputType(o.dataType()))) - .map(o -> Type.asType(o.dataType().name())) + .map(o -> Type.asType(o.dataType().nameUpper())) .toList(); List drivers = new ArrayList<>(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java index 6ef33b7ae5eb8..a614ff3c621f8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java @@ -25,6 +25,7 @@ import org.elasticsearch.test.EqualsHashCodeTestUtils; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; @@ -118,6 +119,8 @@ public static NamedWriteableRegistry writableRegistry() { entries.addAll(EsField.getNamedWriteables()); entries.addAll(Attribute.getNamedWriteables()); entries.add(UnsupportedAttribute.ENTRY); + entries.addAll(NamedExpression.getNamedWriteables()); + entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); return new NamedWriteableRegistry(entries); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index 317fabf0ac434..b78958152b170 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.core.TimeValue; @@ -32,7 +33,7 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.Column; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.parser.TypedParamValue; import java.io.IOException; @@ -160,41 +161,10 @@ public void testRejectUnknownFields() { }""", "unknown field [asdf]"); } - public void testAnyVersionIsValid() throws IOException { - String validVersionString = randomAlphaOfLength(5); - - String json = String.format(Locale.ROOT, """ - { - "version": "%s", - "query": "ROW x = 1" - } - """, validVersionString); - - EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); - assertNull(request.validate()); - - request = parseEsqlQueryRequestAsync(json); - assertNull(request.validate()); - } - - public void testMissingVersionIsValid() throws IOException { - String missingVersion = randomBoolean() ? "" : ", \"version\": \"\""; - String json = String.format(Locale.ROOT, """ - { - "columnar": true, - "query": "row x = 1" - %s - }""", missingVersion); - - EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); - assertNull(request.validate()); - } - public void testMissingQueryIsNotValid() throws IOException { String json = """ { - "columnar": true, - "version": "snapshot" + "columnar": true }"""; EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); assertNotNull(request.validate()); @@ -227,7 +197,7 @@ public void testTablesKeyword() throws IOException { """; EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); Column c = request.tables().get("a").get("c"); - assertThat(c.type(), equalTo(DataTypes.KEYWORD)); + assertThat(c.type(), equalTo(DataType.KEYWORD)); try ( BytesRefBlock.Builder builder = new BlockFactory( new NoopCircuitBreaker(CircuitBreaker.REQUEST), @@ -259,7 +229,7 @@ public void testTablesInteger() throws IOException { EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); Column c = request.tables().get("a").get("c"); - assertThat(c.type(), equalTo(DataTypes.INTEGER)); + assertThat(c.type(), equalTo(DataType.INTEGER)); try ( IntBlock.Builder builder = new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE) .newIntBlockBuilder(10) @@ -287,7 +257,7 @@ public void testTablesLong() throws IOException { EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); Column c = request.tables().get("a").get("c"); - assertThat(c.type(), equalTo(DataTypes.LONG)); + assertThat(c.type(), equalTo(DataType.LONG)); try ( LongBlock.Builder builder = new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE) .newLongBlockBuilder(10) @@ -305,6 +275,34 @@ public void testTablesLong() throws IOException { assertTablesOnlyValidOnSnapshot(request); } + public void testTablesDouble() throws IOException { + String json = """ + { + "query": "ROW x = 1", + "tables": {"a": {"c:double": [1.1, 2, "3.1415", null, [5.1, "-6"]]}} + } + """; + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + Column c = request.tables().get("a").get("c"); + assertThat(c.type(), equalTo(DataType.DOUBLE)); + try ( + DoubleBlock.Builder builder = new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE) + .newDoubleBlockBuilder(10) + ) { + builder.appendDouble(1.1); + builder.appendDouble(2); + builder.appendDouble(3.1415); + builder.appendNull(); + builder.beginPositionEntry(); + builder.appendDouble(5.1); + builder.appendDouble(-6); + builder.endPositionEntry(); + assertThat(c.values(), equalTo(builder.build())); + } + assertTablesOnlyValidOnSnapshot(request); + } + public void testManyTables() throws IOException { String json = """ { @@ -329,15 +327,15 @@ public void testManyTables() throws IOException { EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); assertThat(request.tables().keySet(), hasSize(2)); Map t1 = request.tables().get("t1"); - assertThat(t1.get("a").type(), equalTo(DataTypes.LONG)); - assertThat(t1.get("b").type(), equalTo(DataTypes.LONG)); - assertThat(t1.get("c").type(), equalTo(DataTypes.KEYWORD)); - assertThat(t1.get("d").type(), equalTo(DataTypes.LONG)); + assertThat(t1.get("a").type(), equalTo(DataType.LONG)); + assertThat(t1.get("b").type(), equalTo(DataType.LONG)); + assertThat(t1.get("c").type(), equalTo(DataType.KEYWORD)); + assertThat(t1.get("d").type(), equalTo(DataType.LONG)); Map t2 = request.tables().get("t2"); - assertThat(t2.get("a").type(), equalTo(DataTypes.LONG)); - assertThat(t2.get("b").type(), equalTo(DataTypes.INTEGER)); - assertThat(t2.get("c").type(), equalTo(DataTypes.LONG)); - assertThat(t2.get("d").type(), equalTo(DataTypes.LONG)); + assertThat(t2.get("a").type(), equalTo(DataType.LONG)); + assertThat(t2.get("b").type(), equalTo(DataType.INTEGER)); + assertThat(t2.get("c").type(), equalTo(DataType.LONG)); + assertThat(t2.get("d").type(), equalTo(DataType.LONG)); assertTablesOnlyValidOnSnapshot(request); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index c1fec5d48d23d..9bc792ffe04aa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -47,7 +47,6 @@ import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.versionfield.Version; @@ -125,8 +124,8 @@ EsqlQueryResponse randomResponseAsync(boolean columnar, EsqlQueryResponse.Profil private ColumnInfo randomColumnInfo() { DataType type = randomValueOtherThanMany( - t -> false == DataTypes.isPrimitive(t) || t == DataTypes.DATE_PERIOD || t == DataTypes.TIME_DURATION, - () -> randomFrom(DataTypes.types()) + t -> false == DataType.isPrimitive(t) || t == DataType.DATE_PERIOD || t == DataType.TIME_DURATION, + () -> randomFrom(DataType.types()) ); type = EsqlDataTypes.widenSmallNumericTypes(type); return new ColumnInfo(randomAlphaOfLength(10), type.esType()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 5f2b4290f48f3..975d8e1c7d7b8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -37,7 +37,6 @@ import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.TypesTests; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; @@ -50,6 +49,7 @@ import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; @@ -62,9 +62,12 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.EsqlTestUtils.configuration; @@ -78,8 +81,11 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.matchesRegex; +import static org.hamcrest.Matchers.startsWith; //@TestLogging(value = "org.elasticsearch.xpack.esql.analysis:TRACE", reason = "debug") public class AnalyzerTests extends ESTestCase { @@ -181,7 +187,7 @@ public void testRowAttributeResolution() { var plan = analyzer.analyze( new Eval( EMPTY, - new Row(EMPTY, List.of(new Alias(EMPTY, "emp_no", new Literal(EMPTY, 1, DataTypes.INTEGER)))), + new Row(EMPTY, List.of(new Alias(EMPTY, "emp_no", new Literal(EMPTY, 1, DataType.INTEGER)))), List.of(new Alias(EMPTY, "e", new UnresolvedAttribute(EMPTY, "emp_no"))) ) ); @@ -189,7 +195,7 @@ public void testRowAttributeResolution() { var limit = as(plan, Limit.class); var eval = as(limit.child(), Eval.class); assertEquals(1, eval.fields().size()); - assertEquals(new Alias(EMPTY, "e", new ReferenceAttribute(EMPTY, "emp_no", DataTypes.INTEGER)), eval.fields().get(0)); + assertEquals(new Alias(EMPTY, "e", new ReferenceAttribute(EMPTY, "emp_no", DataType.INTEGER)), eval.fields().get(0)); assertEquals(2, eval.output().size()); Attribute empNo = eval.output().get(0); @@ -232,7 +238,7 @@ public void testProjectBasicPattern() { assertProjectionTypes(""" from test | keep first*name - """, DataTypes.KEYWORD); + """, DataType.KEYWORD); } public void testProjectIncludePattern() { @@ -293,16 +299,16 @@ public void testNoProjection() { """ from test """, - DataTypes.KEYWORD, - DataTypes.INTEGER, - DataTypes.KEYWORD, - DataTypes.TEXT, - DataTypes.TEXT, - DataTypes.KEYWORD, - DataTypes.INTEGER, - DataTypes.KEYWORD, - DataTypes.LONG, - DataTypes.INTEGER + DataType.KEYWORD, + DataType.INTEGER, + DataType.KEYWORD, + DataType.TEXT, + DataType.TEXT, + DataType.KEYWORD, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.INTEGER ); } @@ -522,7 +528,9 @@ public void testDropUnsupportedFieldExplicit() { "float", "foo_type", "int", + "ip", "keyword", + "long", "point", "shape", "some.ambiguous", @@ -566,7 +574,9 @@ public void testDropUnsupportedPattern() { "float", "foo_type", "int", + "ip", "keyword", + "long", "point", "shape", "some.ambiguous", @@ -779,7 +789,9 @@ public void testDropSupportedDottedField() { "float", "foo_type", "int", + "ip", "keyword", + "long", "point", "shape", "some.ambiguous", @@ -1654,10 +1666,10 @@ public void testCounterTypes() { attributes.keySet(), equalTo(Set.of("network.connections", "network.bytes_in", "network.bytes_out", "network.message_in")) ); - assertThat(attributes.get("network.connections").dataType(), equalTo(DataTypes.LONG)); - assertThat(attributes.get("network.bytes_in").dataType(), equalTo(DataTypes.COUNTER_LONG)); - assertThat(attributes.get("network.bytes_out").dataType(), equalTo(DataTypes.COUNTER_LONG)); - assertThat(attributes.get("network.message_in").dataType(), equalTo(DataTypes.COUNTER_DOUBLE)); + assertThat(attributes.get("network.connections").dataType(), equalTo(DataType.LONG)); + assertThat(attributes.get("network.bytes_in").dataType(), equalTo(DataType.COUNTER_LONG)); + assertThat(attributes.get("network.bytes_out").dataType(), equalTo(DataType.COUNTER_LONG)); + assertThat(attributes.get("network.message_in").dataType(), equalTo(DataType.COUNTER_DOUBLE)); } public void testMissingAttributeException_InChainedEval() { @@ -1878,17 +1890,182 @@ public void testInOnText() { """, "mapping-multi-field-variation.json", "text"); } + public void testMvAppendValidation() { + String[][] fields = { + { "bool", "boolean" }, + { "int", "integer" }, + { "unsigned_long", "unsigned_long" }, + { "float", "double" }, + { "text", "text" }, + { "keyword", "keyword" }, + { "date", "datetime" }, + { "point", "geo_point" }, + { "shape", "geo_shape" }, + { "long", "long" }, + { "ip", "ip" }, + { "version", "version" } }; + + Supplier supplier = () -> randomInt(fields.length - 1); + int first = supplier.get(); + int second = randomValueOtherThan(first, supplier); + + String signature = "mv_append(" + fields[first][0] + ", " + fields[second][0] + ")"; + verifyUnsupported( + " from test | eval " + signature, + "second argument of [" + + signature + + "] must be [" + + fields[first][1] + + "], found value [" + + fields[second][0] + + "] type [" + + fields[second][1] + + "]" + ); + } + public void testLookup() { - var e = expectThrows(ParsingException.class, () -> analyze(""" + String query = """ FROM test | RENAME languages AS int | LOOKUP int_number_names ON int + """; + if (Build.current().isProductionRelease()) { + var e = expectThrows(VerificationException.class, () -> analyze(query)); + assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); + return; + } + LogicalPlan plan = analyze(query); + var limit = as(plan, Limit.class); + assertThat(limit.limit().fold(), equalTo(1000)); + + var lookup = as(limit.child(), Lookup.class); + assertThat(lookup.tableName().fold(), equalTo("int_number_names")); + assertMap(lookup.matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); + assertThat( + lookup.localRelation().output().stream().map(Object::toString).toList(), + matchesList().item(startsWith("int{f}")).item(startsWith("name{f}")) + ); + + var project = as(lookup.child(), EsqlProject.class); + assertThat(project.projections().stream().map(Object::toString).toList(), hasItem(matchesRegex("languages\\{f}#\\d+ AS int#\\d+"))); + + var esRelation = as(project.child(), EsRelation.class); + assertThat(esRelation.index().name(), equalTo("test")); + + // Lookup's output looks sensible too + assertMap( + lookup.output().stream().map(Object::toString).toList(), + matchesList().item(startsWith("_meta_field{f}")) + // TODO prune unused columns down through the join + .item(startsWith("emp_no{f}")) + .item(startsWith("first_name{f}")) + .item(startsWith("gender{f}")) + .item(startsWith("job{f}")) + .item(startsWith("job.raw{f}")) + /* + * Int is a reference here because we renamed it in project. + * If we hadn't it'd be a field and that'd be fine. + */ + .item(containsString("int{r}")) + .item(startsWith("last_name{f}")) + .item(startsWith("long_noidx{f}")) + .item(startsWith("salary{f}")) + /* + * It's important that name is returned as a *reference* here + * instead of a field. If it were a field we'd use SearchStats + * on it and discover that it doesn't exist in the index. It doesn't! + * We don't expect it to. It exists only in the lookup table. + */ + .item(containsString("name{r}")) + ); + } + + public void testLookupMissingField() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + FROM test + | LOOKUP int_number_names ON garbage """)); if (Build.current().isProductionRelease()) { assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); return; } - assertThat(e.getMessage(), containsString("LOOKUP not yet supported")); + assertThat(e.getMessage(), containsString("Unknown column in lookup target [garbage]")); + } + + public void testLookupMissingTable() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + FROM test + | LOOKUP garbage ON a + """)); + if (Build.current().isProductionRelease()) { + assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); + return; + } + assertThat(e.getMessage(), containsString("Unknown table [garbage]")); + } + + public void testLookupMatchTypeWrong() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + FROM test + | RENAME last_name AS int + | LOOKUP int_number_names ON int + """)); + if (Build.current().isProductionRelease()) { + assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); + return; + } + assertThat(e.getMessage(), containsString("column type mismatch, table column was [integer] and original column was [keyword]")); + } + + public void testImplicitCasting() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test | eval x = concat("2024", "-04", "-01") + 1 day + """)); + + assertThat( + e.getMessage(), + containsString("first argument of [concat(\"2024\", \"-04\", \"-01\") + 1 day] must be [datetime or numeric]") + ); + + e = expectThrows(VerificationException.class, () -> analyze(""" + from test | eval x = to_string(null) - 1 day + """)); + + assertThat(e.getMessage(), containsString("first argument of [to_string(null) - 1 day] must be [datetime or numeric]")); + + e = expectThrows(VerificationException.class, () -> analyze(""" + from test | eval x = concat("2024", "-04", "-01") + "1 day" + """)); + + assertThat( + e.getMessage(), + containsString("first argument of [concat(\"2024\", \"-04\", \"-01\") + \"1 day\"] must be [datetime or numeric]") + ); + + e = expectThrows(VerificationException.class, () -> analyze(""" + from test | eval x = 1 year - "2024-01-01" + 1 day + """)); + + assertThat( + e.getMessage(), + containsString( + "arguments are in unsupported order: cannot subtract a [DATETIME] value [\"2024-01-01\"] " + + "from a [DATE_PERIOD] amount [1 year]" + ) + ); + + e = expectThrows(VerificationException.class, () -> analyze(""" + from test | eval x = "2024-01-01" - 1 day - "2023-12-31" + """)); + + assertThat(e.getMessage(), containsString("[-] has arguments with incompatible types [datetime] and [datetime]")); + + e = expectThrows(VerificationException.class, () -> analyze(""" + from test | eval x = "2024-01-01" - 1 day + "2023-12-31" + """)); + + assertThat(e.getMessage(), containsString("[+] has arguments with incompatible types [datetime] and [datetime]")); } private void verifyUnsupported(String query, String errorMessage) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 4c3a415c5f706..07a364bf9196e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.parser.TypedParamValue; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -20,7 +19,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.loadMapping; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.matchesRegex; @@ -308,7 +307,7 @@ public void testMixedNumericalNonConvertibleTypesInIn() { } public void testUnsignedLongTypeMixInComparisons() { - List types = DataTypes.types() + List types = DataType.types() .stream() .filter(dt -> dt.isNumeric() && EsqlDataTypes.isRepresentable(dt) && dt != UNSIGNED_LONG) .map(DataType::typeName) @@ -346,7 +345,7 @@ public void testUnsignedLongTypeMixInComparisons() { } public void testUnsignedLongTypeMixInArithmetics() { - List types = DataTypes.types() + List types = DataType.types() .stream() .filter(dt -> dt.isNumeric() && EsqlDataTypes.isRepresentable(dt) && dt != UNSIGNED_LONG) .map(DataType::typeName) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java index dead11069d2e7..107c2af11c4f1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java @@ -46,7 +46,7 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.Mockito.mock; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java new file mode 100644 index 0000000000000..ce7aa789f89b1 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.test.AbstractWireTestCase; +import org.elasticsearch.xpack.esql.core.expression.Alias; +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.NameId; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.tree.SourceTests; +import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; +import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; +import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.in; + +public class AliasTests extends AbstractWireTestCase { + @Override + protected Alias createTestInstance() { + Source source = SourceTests.randomSource(); + String name = randomAlphaOfLength(5); + String qualifier = randomBoolean() ? null : randomAlphaOfLength(3); + // TODO better randomChild + Expression child = ReferenceAttributeTests.randomReferenceAttribute(); + boolean synthetic = randomBoolean(); + return new Alias(source, name, qualifier, child, new NameId(), synthetic); + } + + @Override + protected Alias mutateInstance(Alias instance) throws IOException { + Source source = instance.source(); + String name = instance.name(); + String qualifier = instance.qualifier(); + Expression child = instance.child(); + boolean synthetic = instance.synthetic(); + switch (between(0, 3)) { + case 0 -> name = randomAlphaOfLength(name.length() + 1); + case 1 -> qualifier = randomValueOtherThan(qualifier, () -> randomBoolean() ? null : randomAlphaOfLength(3)); + case 2 -> child = randomValueOtherThan(child, ReferenceAttributeTests::randomReferenceAttribute); + case 3 -> synthetic = false == synthetic; + } + return new Alias(source, name, qualifier, child, instance.id(), synthetic); + } + + @Override + protected Alias copyInstance(Alias instance, TransportVersion version) throws IOException { + return copyInstance( + instance, + getNamedWriteableRegistry(), + (out, v) -> new PlanStreamOutput(out, new PlanNameRegistry(), null).writeNamedWriteable(v), + in -> { + PlanStreamInput pin = new PlanStreamInput(in, new PlanNameRegistry(), in.namedWriteableRegistry(), null); + Alias deser = (Alias) pin.readNamedWriteable(NamedExpression.class); + assertThat(deser.id(), equalTo(pin.mapNameId(Long.parseLong(instance.id().toString())))); + return deser; + }, + version + ); + } + + @Override + protected final NamedWriteableRegistry getNamedWriteableRegistry() { + List entries = new ArrayList<>(NamedExpression.getNamedWriteables()); + entries.addAll(Attribute.getNamedWriteables()); + entries.add(UnsupportedAttribute.ENTRY); + entries.addAll(EsField.getNamedWriteables()); + return new NamedWriteableRegistry(entries); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java index cbac81700a81a..692ad6f3dcb27 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; @@ -167,7 +166,7 @@ public void testNot() throws Exception { } public void testLiteralHashSorting() throws Exception { - DataType type = randomFrom(DataTypes.types()); + DataType type = randomFrom(DataType.types()); List list = randomList(10, 1024, () -> new Literal(EMPTY, randomInt(), type)); List shuffle = new ArrayList<>(list); Collections.shuffle(shuffle, random()); @@ -183,9 +182,9 @@ public void testLiteralHashSorting() throws Exception { public void testInManual() throws Exception { FieldAttribute value = fieldAttribute(); - Literal a = new Literal(EMPTY, 1, DataTypes.INTEGER); - Literal b = new Literal(EMPTY, 2, DataTypes.INTEGER); - Literal c = new Literal(EMPTY, 3, DataTypes.INTEGER); + Literal a = new Literal(EMPTY, 1, DataType.INTEGER); + Literal b = new Literal(EMPTY, 2, DataType.INTEGER); + Literal c = new Literal(EMPTY, 3, DataType.INTEGER); In in = new In(EMPTY, value, asList(a, b, c)); In anotherIn = new In(EMPTY, value, asList(b, a, c)); @@ -196,7 +195,7 @@ public void testInManual() throws Exception { public void testIn() throws Exception { FieldAttribute value = fieldAttribute(); - List list = randomList(randomInt(1024), () -> new Literal(EMPTY, randomInt(), DataTypes.INTEGER)); + List list = randomList(randomInt(1024), () -> new Literal(EMPTY, randomInt(), DataType.INTEGER)); In in = new In(EMPTY, value, list); List shuffledList = new ArrayList<>(list); Collections.shuffle(shuffledList, random()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 3b7d36015dcbc..20c2b6df9710a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -49,7 +49,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.FunctionDefinition; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -291,7 +290,7 @@ public final void testEvaluate() { private Object toJavaObjectUnsignedLongAware(Block block, int position) { Object result; result = toJavaObject(block, position); - if (result != null && testCase.expectedType() == DataTypes.UNSIGNED_LONG) { + if (result != null && testCase.expectedType() == DataType.UNSIGNED_LONG) { assertThat(result, instanceOf(Long.class)); result = NumericUtils.unsignedLongAsBigInteger((Long) result); } @@ -547,7 +546,7 @@ public final void testFold() { if (testCase.foldingExceptionClass() == null) { Object result = nullOptimized.fold(); // Decode unsigned longs into BigIntegers - if (testCase.expectedType() == DataTypes.UNSIGNED_LONG && result != null) { + if (testCase.expectedType() == DataType.UNSIGNED_LONG && result != null) { result = NumericUtils.unsignedLongAsBigInteger((Long) result); } assertThat(result, testCase.getMatcher()); @@ -628,8 +627,8 @@ protected static List anyNullIsNull(boolean entirelyNullPreser return anyNullIsNull( testCaseSuppliers, (nullPosition, nullValueDataType, original) -> entirelyNullPreservesType == false - && nullValueDataType == DataTypes.NULL - && original.getData().size() == 1 ? DataTypes.NULL : original.expectedType(), + && nullValueDataType == DataType.NULL + && original.getData().size() == 1 ? DataType.NULL : original.expectedType(), (nullPosition, nullData, original) -> original ); } @@ -686,7 +685,7 @@ protected static List anyNullIsNull( if (firstTimeSeenSignature) { List typesWithNull = IntStream.range(0, original.types().size()) - .mapToObj(i -> i == finalNullPosition ? DataTypes.NULL : original.types().get(i)) + .mapToObj(i -> i == finalNullPosition ? DataType.NULL : original.types().get(i)) .toList(); boolean newSignature = uniqueSignatures.add(typesWithNull); if (newSignature) { @@ -698,7 +697,7 @@ protected static List anyNullIsNull( return new TestCaseSupplier.TestCase( data, equalTo("LiteralsEvaluator[lit=null]"), - expectedType.expectedType(finalNullPosition, DataTypes.NULL, oc), + expectedType.expectedType(finalNullPosition, DataType.NULL, oc), nullValue(), null, oc.getExpectedTypeError(), @@ -745,7 +744,7 @@ protected static List errorsForCasesWithoutExamples( * the full combinatorial explosions of all nulls - just a single null. * Hopefully , cases will function the same as , * cases. - */.filter(types -> types.stream().filter(t -> t == DataTypes.NULL).count() <= 1) + */.filter(types -> types.stream().filter(t -> t == DataType.NULL).count() <= 1) .map(types -> typeErrorSupplier(validPerPosition.size() != 1, validPerPosition, types, typeErrorMessageSupplier)) .forEach(suppliers::add); return suppliers; @@ -760,13 +759,13 @@ public static String errorMessageStringForBinaryOperators( return typeErrorMessage(includeOrdinal, validPerPosition, types); } catch (IllegalStateException e) { // This means all the positional args were okay, so the expected error is from the combination - if (types.get(0).equals(DataTypes.UNSIGNED_LONG)) { + if (types.get(0).equals(DataType.UNSIGNED_LONG)) { return "first argument of [] is [unsigned_long] and second is [" + types.get(1).typeName() + "]. [unsigned_long] can only be operated on together with another [unsigned_long]"; } - if (types.get(1).equals(DataTypes.UNSIGNED_LONG)) { + if (types.get(1).equals(DataType.UNSIGNED_LONG)) { return "first argument of [] is [" + types.get(0).typeName() + "] and second is [unsigned_long]. [unsigned_long] can only be operated on together with another [unsigned_long]"; @@ -908,186 +907,186 @@ protected static String typeErrorMessage(boolean includeOrdinal, List, String> NAMED_EXPECTED_TYPES = Map.ofEntries( Map.entry( - Set.of(DataTypes.DATE_PERIOD, DataTypes.DOUBLE, DataTypes.INTEGER, DataTypes.LONG, DataTypes.TIME_DURATION, DataTypes.NULL), + Set.of(DataType.DATE_PERIOD, DataType.DOUBLE, DataType.INTEGER, DataType.LONG, DataType.TIME_DURATION, DataType.NULL), "numeric, date_period or time_duration" ), - Map.entry(Set.of(DataTypes.DATETIME, DataTypes.NULL), "datetime"), - Map.entry(Set.of(DataTypes.DOUBLE, DataTypes.NULL), "double"), - Map.entry(Set.of(DataTypes.INTEGER, DataTypes.NULL), "integer"), - Map.entry(Set.of(DataTypes.IP, DataTypes.NULL), "ip"), - Map.entry(Set.of(DataTypes.LONG, DataTypes.INTEGER, DataTypes.UNSIGNED_LONG, DataTypes.DOUBLE, DataTypes.NULL), "numeric"), - Map.entry(Set.of(DataTypes.LONG, DataTypes.INTEGER, DataTypes.UNSIGNED_LONG, DataTypes.DOUBLE), "numeric"), - Map.entry(Set.of(DataTypes.KEYWORD, DataTypes.TEXT, DataTypes.VERSION, DataTypes.NULL), "string or version"), - Map.entry(Set.of(DataTypes.KEYWORD, DataTypes.TEXT, DataTypes.NULL), "string"), - Map.entry(Set.of(DataTypes.IP, DataTypes.KEYWORD, DataTypes.TEXT, DataTypes.NULL), "ip or string"), + Map.entry(Set.of(DataType.DATETIME, DataType.NULL), "datetime"), + Map.entry(Set.of(DataType.DOUBLE, DataType.NULL), "double"), + Map.entry(Set.of(DataType.INTEGER, DataType.NULL), "integer"), + Map.entry(Set.of(DataType.IP, DataType.NULL), "ip"), + Map.entry(Set.of(DataType.LONG, DataType.INTEGER, DataType.UNSIGNED_LONG, DataType.DOUBLE, DataType.NULL), "numeric"), + Map.entry(Set.of(DataType.LONG, DataType.INTEGER, DataType.UNSIGNED_LONG, DataType.DOUBLE), "numeric"), + Map.entry(Set.of(DataType.KEYWORD, DataType.TEXT, DataType.VERSION, DataType.NULL), "string or version"), + Map.entry(Set.of(DataType.KEYWORD, DataType.TEXT, DataType.NULL), "string"), + Map.entry(Set.of(DataType.IP, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "ip or string"), Map.entry(Set.copyOf(Arrays.asList(representableTypes())), "representable"), Map.entry(Set.copyOf(Arrays.asList(representableNonSpatialTypes())), "representableNonSpatial"), Map.entry( Set.of( - DataTypes.BOOLEAN, - DataTypes.DOUBLE, - DataTypes.INTEGER, - DataTypes.KEYWORD, - DataTypes.LONG, - DataTypes.TEXT, - DataTypes.UNSIGNED_LONG, - DataTypes.NULL + DataType.BOOLEAN, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL ), "boolean or numeric or string" ), Map.entry( Set.of( - DataTypes.DATETIME, - DataTypes.DOUBLE, - DataTypes.INTEGER, - DataTypes.KEYWORD, - DataTypes.LONG, - DataTypes.TEXT, - DataTypes.UNSIGNED_LONG, - DataTypes.NULL + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL ), "datetime or numeric or string" ), // What Add accepts Map.entry( Set.of( - DataTypes.DATE_PERIOD, - DataTypes.DATETIME, - DataTypes.DOUBLE, - DataTypes.INTEGER, - DataTypes.LONG, - DataTypes.NULL, - DataTypes.TIME_DURATION, - DataTypes.UNSIGNED_LONG + DataType.DATE_PERIOD, + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.LONG, + DataType.NULL, + DataType.TIME_DURATION, + DataType.UNSIGNED_LONG ), "datetime or numeric" ), Map.entry( Set.of( - DataTypes.BOOLEAN, - DataTypes.DATETIME, - DataTypes.DOUBLE, - DataTypes.INTEGER, - DataTypes.KEYWORD, - DataTypes.LONG, - DataTypes.TEXT, - DataTypes.UNSIGNED_LONG, - DataTypes.NULL + DataType.BOOLEAN, + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL ), "boolean or datetime or numeric or string" ), // to_int Map.entry( Set.of( - DataTypes.BOOLEAN, - DataTypes.COUNTER_INTEGER, - DataTypes.DATETIME, - DataTypes.DOUBLE, - DataTypes.INTEGER, - DataTypes.KEYWORD, - DataTypes.LONG, - DataTypes.TEXT, - DataTypes.UNSIGNED_LONG, - DataTypes.NULL + DataType.BOOLEAN, + DataType.COUNTER_INTEGER, + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL ), "boolean or counter_integer or datetime or numeric or string" ), // to_long Map.entry( Set.of( - DataTypes.BOOLEAN, - DataTypes.COUNTER_INTEGER, - DataTypes.COUNTER_LONG, - DataTypes.DATETIME, - DataTypes.DOUBLE, - DataTypes.INTEGER, - DataTypes.KEYWORD, - DataTypes.LONG, - DataTypes.TEXT, - DataTypes.UNSIGNED_LONG, - DataTypes.NULL + DataType.BOOLEAN, + DataType.COUNTER_INTEGER, + DataType.COUNTER_LONG, + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL ), "boolean or counter_integer or counter_long or datetime or numeric or string" ), // to_double Map.entry( Set.of( - DataTypes.BOOLEAN, - DataTypes.COUNTER_DOUBLE, - DataTypes.COUNTER_INTEGER, - DataTypes.COUNTER_LONG, - DataTypes.DATETIME, - DataTypes.DOUBLE, - DataTypes.INTEGER, - DataTypes.KEYWORD, - DataTypes.LONG, - DataTypes.TEXT, - DataTypes.UNSIGNED_LONG, - DataTypes.NULL + DataType.BOOLEAN, + DataType.COUNTER_DOUBLE, + DataType.COUNTER_INTEGER, + DataType.COUNTER_LONG, + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL ), "boolean or counter_double or counter_integer or counter_long or datetime or numeric or string" ), Map.entry( Set.of( - DataTypes.BOOLEAN, - DataTypes.CARTESIAN_POINT, - DataTypes.DATETIME, - DataTypes.DOUBLE, - DataTypes.GEO_POINT, - DataTypes.INTEGER, - DataTypes.KEYWORD, - DataTypes.LONG, - DataTypes.TEXT, - DataTypes.UNSIGNED_LONG, - DataTypes.NULL + DataType.BOOLEAN, + DataType.CARTESIAN_POINT, + DataType.DATETIME, + DataType.DOUBLE, + DataType.GEO_POINT, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL ), "boolean or cartesian_point or datetime or geo_point or numeric or string" ), Map.entry( Set.of( - DataTypes.DATETIME, - DataTypes.DOUBLE, - DataTypes.INTEGER, - DataTypes.IP, - DataTypes.KEYWORD, - DataTypes.LONG, - DataTypes.TEXT, - DataTypes.UNSIGNED_LONG, - DataTypes.VERSION, - DataTypes.NULL + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.IP, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.VERSION, + DataType.NULL ), "datetime, double, integer, ip, keyword, long, text, unsigned_long or version" ), Map.entry( Set.of( - DataTypes.BOOLEAN, - DataTypes.DATETIME, - DataTypes.DOUBLE, - DataTypes.GEO_POINT, - DataTypes.GEO_SHAPE, - DataTypes.INTEGER, - DataTypes.IP, - DataTypes.KEYWORD, - DataTypes.LONG, - DataTypes.TEXT, - DataTypes.UNSIGNED_LONG, - DataTypes.VERSION, - DataTypes.NULL + DataType.BOOLEAN, + DataType.DATETIME, + DataType.DOUBLE, + DataType.GEO_POINT, + DataType.GEO_SHAPE, + DataType.INTEGER, + DataType.IP, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.VERSION, + DataType.NULL ), "cartesian_point or datetime or geo_point or numeric or string" ), - Map.entry(Set.of(DataTypes.GEO_POINT, DataTypes.KEYWORD, DataTypes.TEXT, DataTypes.NULL), "geo_point or string"), - Map.entry(Set.of(DataTypes.CARTESIAN_POINT, DataTypes.KEYWORD, DataTypes.TEXT, DataTypes.NULL), "cartesian_point or string"), + Map.entry(Set.of(DataType.GEO_POINT, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "geo_point or string"), + Map.entry(Set.of(DataType.CARTESIAN_POINT, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "cartesian_point or string"), Map.entry( - Set.of(DataTypes.GEO_POINT, DataTypes.GEO_SHAPE, DataTypes.KEYWORD, DataTypes.TEXT, DataTypes.NULL), + Set.of(DataType.GEO_POINT, DataType.GEO_SHAPE, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "geo_point or geo_shape or string" ), Map.entry( - Set.of(DataTypes.CARTESIAN_POINT, DataTypes.CARTESIAN_SHAPE, DataTypes.KEYWORD, DataTypes.TEXT, DataTypes.NULL), + Set.of(DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "cartesian_point or cartesian_shape or string" ), - Map.entry(Set.of(DataTypes.GEO_POINT, DataTypes.CARTESIAN_POINT, DataTypes.NULL), "geo_point or cartesian_point"), - Map.entry(Set.of(DataTypes.DATE_PERIOD, DataTypes.TIME_DURATION, DataTypes.NULL), "dateperiod or timeduration") + Map.entry(Set.of(DataType.GEO_POINT, DataType.CARTESIAN_POINT, DataType.NULL), "geo_point or cartesian_point"), + Map.entry(Set.of(DataType.DATE_PERIOD, DataType.TIME_DURATION, DataType.NULL), "dateperiod or timeduration") ); // TODO: generate this message dynamically, a la AbstractConvertFunction#supportedTypesNames()? @@ -1107,7 +1106,7 @@ private static String expectedType(Set validTypes) { } protected static Stream representable() { - return DataTypes.types().stream().filter(EsqlDataTypes::isRepresentable); + return DataType.types().stream().filter(EsqlDataTypes::isRepresentable); } protected static DataType[] representableTypes() { @@ -1185,7 +1184,7 @@ private static Map, DataType> signatures() { if (tc.getExpectedTypeError() != null) { continue; } - if (tc.getData().stream().anyMatch(t -> t.type() == DataTypes.NULL)) { + if (tc.getData().stream().anyMatch(t -> t.type() == DataType.NULL)) { continue; } signatures.putIfAbsent(tc.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), tc.expectedType()); @@ -1602,6 +1601,6 @@ static Version randomVersion() { * All string types (keyword, text, match_only_text, etc). */ protected static DataType[] strings() { - return DataTypes.types().stream().filter(DataTypes::isString).toArray(DataType[]::new); + return DataType.types().stream().filter(DataType::isString).toArray(DataType[]::new); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java index ee542232aa30b..7ff87a682a789 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FieldAttributeTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.AbstractEsFieldTypeTests; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; public class FieldAttributeTests extends AbstractAttributeTestCase { @@ -21,7 +20,7 @@ static FieldAttribute createFieldAttribute(int maxDepth) { Source source = Source.EMPTY; FieldAttribute parent = maxDepth == 0 || randomBoolean() ? null : createFieldAttribute(maxDepth - 1); String name = randomAlphaOfLength(5); - DataType type = randomFrom(DataTypes.types()); + DataType type = randomFrom(DataType.types()); EsField field = AbstractEsFieldTypeTests.randomAnyEsField(maxDepth); String qualifier = randomBoolean() ? null : randomAlphaOfLength(3); Nullability nullability = randomFrom(Nullability.values()); @@ -47,7 +46,7 @@ protected FieldAttribute mutate(FieldAttribute instance) { switch (between(0, 6)) { case 0 -> parent = randomValueOtherThan(parent, () -> randomBoolean() ? null : createFieldAttribute(2)); case 1 -> name = randomAlphaOfLength(name.length() + 1); - case 2 -> type = randomValueOtherThan(type, () -> randomFrom(DataTypes.types())); + case 2 -> type = randomValueOtherThan(type, () -> randomFrom(DataType.types())); case 3 -> field = randomValueOtherThan(field, () -> AbstractEsFieldTypeTests.randomAnyEsField(3)); case 4 -> qualifier = randomValueOtherThan(qualifier, () -> randomBoolean() ? null : randomAlphaOfLength(3)); case 5 -> nullability = randomValueOtherThan(nullability, () -> randomFrom(Nullability.values())); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MetadataAttributeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MetadataAttributeTests.java index 16a83b42d10ab..573af9c17bb1d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MetadataAttributeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MetadataAttributeTests.java @@ -12,14 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; public class MetadataAttributeTests extends AbstractAttributeTestCase { @Override protected MetadataAttribute create() { Source source = Source.EMPTY; String name = randomAlphaOfLength(5); - DataType type = randomFrom(DataTypes.types()); + DataType type = randomFrom(DataType.types()); String qualifier = randomBoolean() ? null : randomAlphaOfLength(3); Nullability nullability = randomFrom(Nullability.values()); boolean synthetic = randomBoolean(); @@ -38,7 +37,7 @@ protected MetadataAttribute mutate(MetadataAttribute instance) { boolean searchable = instance.searchable(); switch (between(0, 5)) { case 0 -> name = randomAlphaOfLength(name.length() + 1); - case 1 -> type = randomValueOtherThan(type, () -> randomFrom(DataTypes.types())); + case 1 -> type = randomValueOtherThan(type, () -> randomFrom(DataType.types())); case 2 -> qualifier = randomValueOtherThan(qualifier, () -> randomBoolean() ? null : randomAlphaOfLength(3)); case 3 -> nullability = randomValueOtherThan(nullability, () -> randomFrom(Nullability.values())); case 4 -> synthetic = false == synthetic; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/ReferenceAttributeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/ReferenceAttributeTests.java index e248b741ff48d..31d1018bacc91 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/ReferenceAttributeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/ReferenceAttributeTests.java @@ -12,20 +12,23 @@ import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; public class ReferenceAttributeTests extends AbstractAttributeTestCase { - @Override - protected ReferenceAttribute create() { + public static ReferenceAttribute randomReferenceAttribute() { Source source = Source.EMPTY; String name = randomAlphaOfLength(5); - DataType type = randomFrom(DataTypes.types()); + DataType type = randomFrom(DataType.types()); String qualifier = randomBoolean() ? null : randomAlphaOfLength(3); Nullability nullability = randomFrom(Nullability.values()); boolean synthetic = randomBoolean(); return new ReferenceAttribute(source, name, type, qualifier, nullability, new NameId(), synthetic); } + @Override + protected ReferenceAttribute create() { + return randomReferenceAttribute(); + } + @Override protected ReferenceAttribute mutate(ReferenceAttribute instance) { Source source = instance.source(); @@ -36,7 +39,7 @@ protected ReferenceAttribute mutate(ReferenceAttribute instance) { boolean synthetic = instance.synthetic(); switch (between(0, 4)) { case 0 -> name = randomAlphaOfLength(name.length() + 1); - case 1 -> type = randomValueOtherThan(type, () -> randomFrom(DataTypes.types())); + case 1 -> type = randomValueOtherThan(type, () -> randomFrom(DataType.types())); case 2 -> qualifier = randomValueOtherThan(qualifier, () -> randomBoolean() ? null : randomAlphaOfLength(3)); case 3 -> nullability = randomValueOtherThan(nullability, () -> randomFrom(Nullability.values())); case 4 -> synthetic = false == synthetic; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 9b86482a1ec88..54c4f2ae07eca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -124,7 +123,7 @@ public String toString() { /** * Generate positive test cases for unary functions that operate on an {@code numeric} - * fields by casting them to {@link DataTypes#DOUBLE}s. + * fields by casting them to {@link DataType#DOUBLE}s. */ public static List forUnaryCastingToDouble( String name, @@ -139,8 +138,8 @@ public static List forUnaryCastingToDouble( List suppliers = new ArrayList<>(); forUnaryInt( suppliers, - eval + castToDoubleEvaluator(read, DataTypes.INTEGER) + "]", - DataTypes.DOUBLE, + eval + castToDoubleEvaluator(read, DataType.INTEGER) + "]", + DataType.DOUBLE, i -> expected.apply(Double.valueOf(i)), min.intValue(), max.intValue(), @@ -148,8 +147,8 @@ public static List forUnaryCastingToDouble( ); forUnaryLong( suppliers, - eval + castToDoubleEvaluator(read, DataTypes.LONG) + "]", - DataTypes.DOUBLE, + eval + castToDoubleEvaluator(read, DataType.LONG) + "]", + DataType.DOUBLE, i -> expected.apply(Double.valueOf(i)), min.longValue(), max.longValue(), @@ -157,20 +156,20 @@ public static List forUnaryCastingToDouble( ); forUnaryUnsignedLong( suppliers, - eval + castToDoubleEvaluator(read, DataTypes.UNSIGNED_LONG) + "]", - DataTypes.DOUBLE, + eval + castToDoubleEvaluator(read, DataType.UNSIGNED_LONG) + "]", + DataType.DOUBLE, ul -> expected.apply(ul.doubleValue()), BigInteger.valueOf((int) Math.ceil(min)), BigInteger.valueOf((int) Math.floor(max)), warnings ); - forUnaryDouble(suppliers, eval + read + "]", DataTypes.DOUBLE, expected::apply, min, max, warnings); + forUnaryDouble(suppliers, eval + read + "]", DataType.DOUBLE, expected::apply, min, max, warnings); return suppliers; } /** * Generate positive test cases for binary functions that operate on an {@code numeric} - * fields by casting them to {@link DataTypes#DOUBLE}s. + * fields by casting them to {@link DataType#DOUBLE}s. */ public static List forBinaryCastingToDouble( String name, @@ -216,7 +215,7 @@ public static List forBinaryCastingToDouble( ), (lhs, rhs) -> warnings, suppliers, - DataTypes.DOUBLE, + DataType.DOUBLE, false ); return suppliers; @@ -294,13 +293,13 @@ public record NumericTypeTestConfigs( NumericTypeTestConfig doubleStuff ) { public NumericTypeTestConfig get(DataType type) { - if (type == DataTypes.INTEGER) { + if (type == DataType.INTEGER) { return intStuff; } - if (type == DataTypes.LONG) { + if (type == DataType.LONG) { return longStuff; } - if (type == DataTypes.DOUBLE) { + if (type == DataType.DOUBLE) { return doubleStuff; } throw new IllegalArgumentException("bogus numeric type [" + type + "]"); @@ -311,30 +310,30 @@ public static DataType widen(DataType lhs, DataType rhs) { if (lhs == rhs) { return lhs; } - if (lhs == DataTypes.DOUBLE || rhs == DataTypes.DOUBLE) { - return DataTypes.DOUBLE; + if (lhs == DataType.DOUBLE || rhs == DataType.DOUBLE) { + return DataType.DOUBLE; } - if (lhs == DataTypes.LONG || rhs == DataTypes.LONG) { - return DataTypes.LONG; + if (lhs == DataType.LONG || rhs == DataType.LONG) { + return DataType.LONG; } throw new IllegalArgumentException("Invalid numeric widening lhs: [" + lhs + "] rhs: [" + rhs + "]"); } public static List getSuppliersForNumericType(DataType type, Number min, Number max, boolean includeZero) { - if (type == DataTypes.INTEGER) { + if (type == DataType.INTEGER) { return intCases(NumericUtils.saturatingIntValue(min), NumericUtils.saturatingIntValue(max), includeZero); } - if (type == DataTypes.LONG) { + if (type == DataType.LONG) { return longCases(min.longValue(), max.longValue(), includeZero); } - if (type == DataTypes.UNSIGNED_LONG) { + if (type == DataType.UNSIGNED_LONG) { return ulongCases( min instanceof BigInteger ? (BigInteger) min : BigInteger.valueOf(Math.max(min.longValue(), 0L)), max instanceof BigInteger ? (BigInteger) max : BigInteger.valueOf(Math.max(max.longValue(), 0L)), includeZero ); } - if (type == DataTypes.DOUBLE) { + if (type == DataType.DOUBLE) { return doubleCases(min.doubleValue(), max.doubleValue(), includeZero); } throw new IllegalArgumentException("bogus numeric type [" + type + "]"); @@ -348,7 +347,7 @@ public static List forBinaryComparisonWithWidening( boolean allowRhsZero ) { List suppliers = new ArrayList<>(); - List numericTypes = List.of(DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE); + List numericTypes = List.of(DataType.INTEGER, DataType.LONG, DataType.DOUBLE); for (DataType lhsType : numericTypes) { for (DataType rhsType : numericTypes) { @@ -371,7 +370,7 @@ public static List forBinaryComparisonWithWidening( (lhs, rhs) -> equalTo(evaluatorToString.apply(lhs, rhs)), warnings, suppliers, - DataTypes.BOOLEAN, + DataType.BOOLEAN, true ); } @@ -387,7 +386,7 @@ public static List forBinaryWithWidening( boolean allowRhsZero ) { List suppliers = new ArrayList<>(); - List numericTypes = List.of(DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE); + List numericTypes = List.of(DataType.INTEGER, DataType.LONG, DataType.DOUBLE); for (DataType lhsType : numericTypes) { for (DataType rhsType : numericTypes) { @@ -467,7 +466,7 @@ public static List forBinaryNotCasting( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#INTEGER}. + * Generate positive test cases for a unary function operating on an {@link DataType#INTEGER}. */ public static void forUnaryInt( List suppliers, @@ -501,7 +500,7 @@ public static void forUnaryInt( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#LONG}. + * Generate positive test cases for a unary function operating on an {@link DataType#LONG}. */ public static void forUnaryLong( List suppliers, @@ -535,7 +534,7 @@ public static void forUnaryLong( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#UNSIGNED_LONG}. + * Generate positive test cases for a unary function operating on an {@link DataType#UNSIGNED_LONG}. */ public static void forUnaryUnsignedLong( List suppliers, @@ -569,7 +568,7 @@ public static void forUnaryUnsignedLong( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#DOUBLE}. + * Generate positive test cases for a unary function operating on an {@link DataType#DOUBLE}. */ public static void forUnaryDouble( List suppliers, @@ -603,7 +602,7 @@ public static void forUnaryDouble( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#BOOLEAN}. + * Generate positive test cases for a unary function operating on an {@link DataType#BOOLEAN}. */ public static void forUnaryBoolean( List suppliers, @@ -616,7 +615,7 @@ public static void forUnaryBoolean( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#DATETIME}. + * Generate positive test cases for a unary function operating on an {@link DataType#DATETIME}. */ public static void forUnaryDatetime( List suppliers, @@ -636,7 +635,7 @@ public static void forUnaryDatetime( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#GEO_POINT}. + * Generate positive test cases for a unary function operating on an {@link DataType#GEO_POINT}. */ public static void forUnaryGeoPoint( List suppliers, @@ -649,7 +648,7 @@ public static void forUnaryGeoPoint( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#CARTESIAN_POINT}. + * Generate positive test cases for a unary function operating on an {@link DataType#CARTESIAN_POINT}. */ public static void forUnaryCartesianPoint( List suppliers, @@ -662,7 +661,7 @@ public static void forUnaryCartesianPoint( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#GEO_SHAPE}. + * Generate positive test cases for a unary function operating on an {@link DataType#GEO_SHAPE}. */ public static void forUnaryGeoShape( List suppliers, @@ -675,7 +674,7 @@ public static void forUnaryGeoShape( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#CARTESIAN_SHAPE}. + * Generate positive test cases for a unary function operating on an {@link DataType#CARTESIAN_SHAPE}. */ public static void forUnaryCartesianShape( List suppliers, @@ -688,7 +687,7 @@ public static void forUnaryCartesianShape( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#IP}. + * Generate positive test cases for a unary function operating on an {@link DataType#IP}. */ public static void forUnaryIp( List suppliers, @@ -701,7 +700,7 @@ public static void forUnaryIp( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#KEYWORD} and {@link DataTypes#TEXT}. + * Generate positive test cases for a unary function operating on an {@link DataType#KEYWORD} and {@link DataType#TEXT}. */ public static void forUnaryStrings( List suppliers, @@ -733,7 +732,7 @@ public static void forUnaryStrings( } /** - * Generate positive test cases for a unary function operating on an {@link DataTypes#VERSION}. + * Generate positive test cases for a unary function operating on an {@link DataType#VERSION}. */ public static void forUnaryVersion( List suppliers, @@ -824,23 +823,23 @@ public static void unary( public static List intCases(int min, int max, boolean includeZero) { List cases = new ArrayList<>(); if (0 <= max && 0 >= min && includeZero) { - cases.add(new TypedDataSupplier("<0 int>", () -> 0, DataTypes.INTEGER)); + cases.add(new TypedDataSupplier("<0 int>", () -> 0, DataType.INTEGER)); } int lower = Math.max(min, 1); int upper = Math.min(max, Integer.MAX_VALUE); if (lower < upper) { - cases.add(new TypedDataSupplier("", () -> ESTestCase.randomIntBetween(lower, upper), DataTypes.INTEGER)); + cases.add(new TypedDataSupplier("", () -> ESTestCase.randomIntBetween(lower, upper), DataType.INTEGER)); } else if (lower == upper) { - cases.add(new TypedDataSupplier("<" + lower + " int>", () -> lower, DataTypes.INTEGER)); + cases.add(new TypedDataSupplier("<" + lower + " int>", () -> lower, DataType.INTEGER)); } int lower1 = Math.max(min, Integer.MIN_VALUE); int upper1 = Math.min(max, -1); if (lower1 < upper1) { - cases.add(new TypedDataSupplier("", () -> ESTestCase.randomIntBetween(lower1, upper1), DataTypes.INTEGER)); + cases.add(new TypedDataSupplier("", () -> ESTestCase.randomIntBetween(lower1, upper1), DataType.INTEGER)); } else if (lower1 == upper1) { - cases.add(new TypedDataSupplier("<" + lower1 + " int>", () -> lower1, DataTypes.INTEGER)); + cases.add(new TypedDataSupplier("<" + lower1 + " int>", () -> lower1, DataType.INTEGER)); } return cases; } @@ -848,23 +847,23 @@ public static List intCases(int min, int max, boolean include public static List longCases(long min, long max, boolean includeZero) { List cases = new ArrayList<>(); if (0L <= max && 0L >= min && includeZero) { - cases.add(new TypedDataSupplier("<0 long>", () -> 0L, DataTypes.LONG)); + cases.add(new TypedDataSupplier("<0 long>", () -> 0L, DataType.LONG)); } long lower = Math.max(min, 1); long upper = Math.min(max, Long.MAX_VALUE); if (lower < upper) { - cases.add(new TypedDataSupplier("", () -> ESTestCase.randomLongBetween(lower, upper), DataTypes.LONG)); + cases.add(new TypedDataSupplier("", () -> ESTestCase.randomLongBetween(lower, upper), DataType.LONG)); } else if (lower == upper) { - cases.add(new TypedDataSupplier("<" + lower + " long>", () -> lower, DataTypes.LONG)); + cases.add(new TypedDataSupplier("<" + lower + " long>", () -> lower, DataType.LONG)); } long lower1 = Math.max(min, Long.MIN_VALUE); long upper1 = Math.min(max, -1); if (lower1 < upper1) { - cases.add(new TypedDataSupplier("", () -> ESTestCase.randomLongBetween(lower1, upper1), DataTypes.LONG)); + cases.add(new TypedDataSupplier("", () -> ESTestCase.randomLongBetween(lower1, upper1), DataType.LONG)); } else if (lower1 == upper1) { - cases.add(new TypedDataSupplier("<" + lower1 + " long>", () -> lower1, DataTypes.LONG)); + cases.add(new TypedDataSupplier("<" + lower1 + " long>", () -> lower1, DataType.LONG)); } return cases; @@ -875,7 +874,7 @@ public static List ulongCases(BigInteger min, BigInteger max, // Zero if (BigInteger.ZERO.compareTo(max) <= 0 && BigInteger.ZERO.compareTo(min) >= 0 && includeZero) { - cases.add(new TypedDataSupplier("<0 unsigned long>", () -> BigInteger.ZERO, DataTypes.UNSIGNED_LONG)); + cases.add(new TypedDataSupplier("<0 unsigned long>", () -> BigInteger.ZERO, DataType.UNSIGNED_LONG)); } // small values, less than Long.MAX_VALUE @@ -886,11 +885,11 @@ public static List ulongCases(BigInteger min, BigInteger max, new TypedDataSupplier( "", () -> ESTestCase.randomUnsignedLongBetween(lower1, upper1), - DataTypes.UNSIGNED_LONG + DataType.UNSIGNED_LONG ) ); } else if (lower1.compareTo(upper1) == 0) { - cases.add(new TypedDataSupplier("", () -> lower1, DataTypes.UNSIGNED_LONG)); + cases.add(new TypedDataSupplier("", () -> lower1, DataType.UNSIGNED_LONG)); } // Big values, greater than Long.MAX_VALUE @@ -901,11 +900,11 @@ public static List ulongCases(BigInteger min, BigInteger max, new TypedDataSupplier( "", () -> ESTestCase.randomUnsignedLongBetween(lower2, upper2), - DataTypes.UNSIGNED_LONG + DataType.UNSIGNED_LONG ) ); } else if (lower2.compareTo(upper2) == 0) { - cases.add(new TypedDataSupplier("", () -> lower2, DataTypes.UNSIGNED_LONG)); + cases.add(new TypedDataSupplier("", () -> lower2, DataType.UNSIGNED_LONG)); } return cases; } @@ -915,8 +914,8 @@ public static List doubleCases(double min, double max, boolea // Zeros if (0d <= max && 0d >= min && includeZero) { - cases.add(new TypedDataSupplier("<0 double>", () -> 0.0d, DataTypes.DOUBLE)); - cases.add(new TypedDataSupplier("<-0 double>", () -> -0.0d, DataTypes.DOUBLE)); + cases.add(new TypedDataSupplier("<0 double>", () -> 0.0d, DataType.DOUBLE)); + cases.add(new TypedDataSupplier("<-0 double>", () -> -0.0d, DataType.DOUBLE)); } // Positive small double @@ -927,11 +926,11 @@ public static List doubleCases(double min, double max, boolea new TypedDataSupplier( "", () -> ESTestCase.randomDoubleBetween(lower1, upper1, true), - DataTypes.DOUBLE + DataType.DOUBLE ) ); } else if (lower1 == upper1) { - cases.add(new TypedDataSupplier("", () -> lower1, DataTypes.DOUBLE)); + cases.add(new TypedDataSupplier("", () -> lower1, DataType.DOUBLE)); } // Negative small double @@ -942,11 +941,11 @@ public static List doubleCases(double min, double max, boolea new TypedDataSupplier( "", () -> ESTestCase.randomDoubleBetween(lower2, upper2, true), - DataTypes.DOUBLE + DataType.DOUBLE ) ); } else if (lower2 == upper2) { - cases.add(new TypedDataSupplier("", () -> lower2, DataTypes.DOUBLE)); + cases.add(new TypedDataSupplier("", () -> lower2, DataType.DOUBLE)); } // Positive big double @@ -954,10 +953,10 @@ public static List doubleCases(double min, double max, boolea double upper3 = Math.min(Double.MAX_VALUE, max); if (lower3 < upper3) { cases.add( - new TypedDataSupplier("", () -> ESTestCase.randomDoubleBetween(lower3, upper3, true), DataTypes.DOUBLE) + new TypedDataSupplier("", () -> ESTestCase.randomDoubleBetween(lower3, upper3, true), DataType.DOUBLE) ); } else if (lower3 == upper3) { - cases.add(new TypedDataSupplier("", () -> lower3, DataTypes.DOUBLE)); + cases.add(new TypedDataSupplier("", () -> lower3, DataType.DOUBLE)); } // Negative big double @@ -966,47 +965,47 @@ public static List doubleCases(double min, double max, boolea double upper4 = Math.min(-1, max); // because again, the interval from -1 to 0 is very high density if (lower4 < upper4) { cases.add( - new TypedDataSupplier("", () -> ESTestCase.randomDoubleBetween(lower4, upper4, true), DataTypes.DOUBLE) + new TypedDataSupplier("", () -> ESTestCase.randomDoubleBetween(lower4, upper4, true), DataType.DOUBLE) ); } else if (lower4 == upper4) { - cases.add(new TypedDataSupplier("", () -> lower4, DataTypes.DOUBLE)); + cases.add(new TypedDataSupplier("", () -> lower4, DataType.DOUBLE)); } return cases; } public static List booleanCases() { return List.of( - new TypedDataSupplier("", () -> true, DataTypes.BOOLEAN), - new TypedDataSupplier("", () -> false, DataTypes.BOOLEAN) + new TypedDataSupplier("", () -> true, DataType.BOOLEAN), + new TypedDataSupplier("", () -> false, DataType.BOOLEAN) ); } public static List dateCases() { return List.of( - new TypedDataSupplier("<1970-01-01T00:00:00Z>", () -> 0L, DataTypes.DATETIME), + new TypedDataSupplier("<1970-01-01T00:00:00Z>", () -> 0L, DataType.DATETIME), new TypedDataSupplier( "", () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11), // 1970-01-01T00:00:00Z - 2286-11-20T17:46:40Z - DataTypes.DATETIME + DataType.DATETIME ), new TypedDataSupplier( "", // 2286-11-20T17:46:40Z - +292278994-08-17T07:12:55.807Z () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE), - DataTypes.DATETIME + DataType.DATETIME ), new TypedDataSupplier( "", // very close to +292278994-08-17T07:12:55.807Z, the maximum supported millis since epoch () -> ESTestCase.randomLongBetween(Long.MAX_VALUE / 100 * 99, Long.MAX_VALUE), - DataTypes.DATETIME + DataType.DATETIME ) ); } public static List datePeriodCases() { return List.of( - new TypedDataSupplier("", () -> Period.ZERO, DataTypes.DATE_PERIOD, true), + new TypedDataSupplier("", () -> Period.ZERO, DataType.DATE_PERIOD, true), new TypedDataSupplier( "", () -> Period.of( @@ -1014,7 +1013,7 @@ public static List datePeriodCases() { ESTestCase.randomIntBetween(-13, 13), ESTestCase.randomIntBetween(-32, 32) ), - DataTypes.DATE_PERIOD, + DataType.DATE_PERIOD, true ) ); @@ -1022,11 +1021,11 @@ public static List datePeriodCases() { public static List timeDurationCases() { return List.of( - new TypedDataSupplier("", () -> Duration.ZERO, DataTypes.TIME_DURATION, true), + new TypedDataSupplier("", () -> Duration.ZERO, DataType.TIME_DURATION, true), new TypedDataSupplier( "", () -> Duration.ofMillis(ESTestCase.randomLongBetween(-604800000L, 604800000L)), // plus/minus 7 days - DataTypes.TIME_DURATION, + DataType.TIME_DURATION, true ) ); @@ -1050,7 +1049,7 @@ public static List cartesianShapeCases() { public static List geoPointCases(Supplier hasAlt) { return List.of( - new TypedDataSupplier("", () -> GEO.asWkb(GeometryTestUtils.randomPoint(hasAlt.get())), DataTypes.GEO_POINT) + new TypedDataSupplier("", () -> GEO.asWkb(GeometryTestUtils.randomPoint(hasAlt.get())), DataType.GEO_POINT) ); } @@ -1059,7 +1058,7 @@ public static List cartesianPointCases(Supplier hasA new TypedDataSupplier( "", () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(hasAlt.get())), - DataTypes.CARTESIAN_POINT + DataType.CARTESIAN_POINT ) ); } @@ -1069,7 +1068,7 @@ public static List geoShapeCases(Supplier hasAlt) { new TypedDataSupplier( "", () -> GEO.asWkb(GeometryTestUtils.randomGeometryWithoutCircle(0, hasAlt.get())), - DataTypes.GEO_SHAPE + DataType.GEO_SHAPE ) ); } @@ -1079,7 +1078,7 @@ public static List cartesianShapeCases(Supplier hasA new TypedDataSupplier( "", () -> CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(hasAlt.get())), - DataTypes.CARTESIAN_SHAPE + DataType.CARTESIAN_SHAPE ) ); } @@ -1089,10 +1088,10 @@ public static List ipCases() { new TypedDataSupplier( "<127.0.0.1 ip>", () -> new BytesRef(InetAddressPoint.encode(InetAddresses.forString("127.0.0.1"))), - DataTypes.IP + DataType.IP ), - new TypedDataSupplier("", () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(true))), DataTypes.IP), - new TypedDataSupplier("", () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(false))), DataTypes.IP) + new TypedDataSupplier("", () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(true))), DataType.IP), + new TypedDataSupplier("", () -> new BytesRef(InetAddressPoint.encode(ESTestCase.randomIp(false))), DataType.IP) ); } @@ -1130,18 +1129,18 @@ public static List versionCases(String prefix) { new TypedDataSupplier( "<" + prefix + "version major>", () -> new Version(Integer.toString(ESTestCase.between(0, 100))).toBytesRef(), - DataTypes.VERSION + DataType.VERSION ), new TypedDataSupplier( "<" + prefix + "version major.minor>", () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)).toBytesRef(), - DataTypes.VERSION + DataType.VERSION ), new TypedDataSupplier( "<" + prefix + "version major.minor.patch>", () -> new Version(ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100) + "." + ESTestCase.between(0, 100)) .toBytesRef(), - DataTypes.VERSION + DataType.VERSION ) ); } @@ -1150,61 +1149,61 @@ public static String getCastEvaluator(String original, DataType current, DataTyp if (current == target) { return original; } - if (target == DataTypes.LONG) { + if (target == DataType.LONG) { return castToLongEvaluator(original, current); } - if (target == DataTypes.UNSIGNED_LONG) { + if (target == DataType.UNSIGNED_LONG) { return castToUnsignedLongEvaluator(original, current); } - if (target == DataTypes.DOUBLE) { + if (target == DataType.DOUBLE) { return castToDoubleEvaluator(original, current); } throw new IllegalArgumentException("Invalid numeric cast to [" + target + "]"); } private static String castToLongEvaluator(String original, DataType current) { - if (current == DataTypes.LONG) { + if (current == DataType.LONG) { return original; } - if (current == DataTypes.INTEGER) { + if (current == DataType.INTEGER) { return "CastIntToLongEvaluator[v=" + original + "]"; } - if (current == DataTypes.DOUBLE) { + if (current == DataType.DOUBLE) { return "CastDoubleToLongEvaluator[v=" + original + "]"; } - if (current == DataTypes.UNSIGNED_LONG) { + if (current == DataType.UNSIGNED_LONG) { return "CastUnsignedLongToLong[v=" + original + "]"; } throw new UnsupportedOperationException(); } private static String castToUnsignedLongEvaluator(String original, DataType current) { - if (current == DataTypes.UNSIGNED_LONG) { + if (current == DataType.UNSIGNED_LONG) { return original; } - if (current == DataTypes.INTEGER) { + if (current == DataType.INTEGER) { return "CastIntToUnsignedLongEvaluator[v=" + original + "]"; } - if (current == DataTypes.LONG) { + if (current == DataType.LONG) { return "CastLongToUnsignedLongEvaluator[v=" + original + "]"; } - if (current == DataTypes.DOUBLE) { + if (current == DataType.DOUBLE) { return "CastDoubleToUnsignedLongEvaluator[v=" + original + "]"; } throw new UnsupportedOperationException(); } private static String castToDoubleEvaluator(String original, DataType current) { - if (current == DataTypes.DOUBLE) { + if (current == DataType.DOUBLE) { return original; } - if (current == DataTypes.INTEGER) { + if (current == DataType.INTEGER) { return "CastIntToDoubleEvaluator[v=" + original + "]"; } - if (current == DataTypes.LONG) { + if (current == DataType.LONG) { return "CastLongToDoubleEvaluator[v=" + original + "]"; } - if (current == DataTypes.UNSIGNED_LONG) { + if (current == DataType.UNSIGNED_LONG) { return "CastUnsignedLongToDoubleEvaluator[v=" + original + "]"; } throw new UnsupportedOperationException(); @@ -1379,7 +1378,7 @@ public TypedData get() { * Holds a data value and the intended parse type of that value */ public static class TypedData { - public static final TypedData NULL = new TypedData(null, DataTypes.NULL, ""); + public static final TypedData NULL = new TypedData(null, DataType.NULL, ""); private final Object data; private final DataType type; @@ -1393,7 +1392,7 @@ public static class TypedData { * @param forceLiteral should this data always be converted to a literal and never to a field reference? */ private TypedData(Object data, DataType type, String name, boolean forceLiteral) { - if (type == DataTypes.UNSIGNED_LONG && data instanceof BigInteger b) { + if (type == DataType.UNSIGNED_LONG && data instanceof BigInteger b) { this.data = NumericUtils.asLongUnsigned(b); } else { this.data = data; @@ -1447,7 +1446,7 @@ public TypedData forceValueToNull() { @Override public String toString() { - if (type == DataTypes.UNSIGNED_LONG && data instanceof Long longData) { + if (type == DataType.UNSIGNED_LONG && data instanceof Long longData) { return type.toString() + "(" + NumericUtils.unsignedLongAsBigInteger(longData).toString() + ")"; } return type.toString() + "(" + (data == null ? "null" : data.toString()) + ")"; @@ -1491,7 +1490,7 @@ public Object data() { * @return the data value being supplied, casting unsigned longs into BigIntegers correctly */ public Object getValue() { - if (type == DataTypes.UNSIGNED_LONG && data instanceof Long l) { + if (type == DataType.UNSIGNED_LONG && data instanceof Long l) { return NumericUtils.unsignedLongAsBigInteger(l); } return data; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java deleted file mode 100644 index 18957a7f115b8..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.scalar; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.tree.Location; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; -import org.hamcrest.Matcher; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Locale; -import java.util.Set; -import java.util.stream.Stream; - -import static org.hamcrest.Matchers.equalTo; - -/** - * Base class for function tests. - * @deprecated extends from {@link AbstractFunctionTestCase} instead - * and {@link AbstractFunctionTestCase#errorsForCasesWithoutExamples}. - */ -@Deprecated -public abstract class AbstractScalarFunctionTestCase extends AbstractFunctionTestCase { - /** - * Describe supported arguments. Build each argument with - * {@link #required} or {@link #optional}. - */ - protected abstract List argSpec(); - - /** - * The data type that applying this function to arguments of this type should produce. - */ - protected abstract DataType expectedType(List argTypes); - - /** - * Define a required argument. - */ - protected final ArgumentSpec required(DataType... validTypes) { - return new ArgumentSpec(false, withNullAndSorted(validTypes)); - } - - /** - * Define an optional argument. - */ - protected final ArgumentSpec optional(DataType... validTypes) { - return new ArgumentSpec(true, withNullAndSorted(validTypes)); - } - - private Set withNullAndSorted(DataType[] validTypes) { - Set realValidTypes = new LinkedHashSet<>(); - Arrays.stream(validTypes).sorted(Comparator.comparing(DataType::name)).forEach(realValidTypes::add); - realValidTypes.add(DataTypes.NULL); - return realValidTypes; - } - - public Set sortedTypesSet(DataType[] validTypes, DataType... additionalTypes) { - Set mergedSet = new LinkedHashSet<>(); - Stream.concat(Stream.of(validTypes), Stream.of(additionalTypes)) - .sorted(Comparator.comparing(DataType::name)) - .forEach(mergedSet::add); - return mergedSet; - } - - /** - * All integer types (long, int, short, byte). For passing to {@link #required} or {@link #optional}. - */ - protected static DataType[] integers() { - return DataTypes.types().stream().filter(DataType::isInteger).toArray(DataType[]::new); - } - - /** - * All rational types (double, float, whatever). For passing to {@link #required} or {@link #optional}. - */ - protected static DataType[] rationals() { - return DataTypes.types().stream().filter(DataType::isRational).toArray(DataType[]::new); - } - - /** - * All numeric types (integers and rationals.) For passing to {@link #required} or {@link #optional}. - */ - protected static DataType[] numerics() { - return DataTypes.types().stream().filter(DataType::isNumeric).toArray(DataType[]::new); - } - - protected final DataType[] representableNumerics() { - // TODO numeric should only include representable numbers but that is a change for a followup - return DataTypes.types().stream().filter(DataType::isNumeric).filter(EsqlDataTypes::isRepresentable).toArray(DataType[]::new); - } - - protected record ArgumentSpec(boolean optional, Set validTypes) {} - - public final void testResolveType() { - List specs = argSpec(); - for (int mutArg = 0; mutArg < specs.size(); mutArg++) { - for (DataType mutArgType : DataTypes.types()) { - List args = new ArrayList<>(specs.size()); - for (int arg = 0; arg < specs.size(); arg++) { - if (mutArg == arg) { - args.add(new Literal(new Source(Location.EMPTY, "arg" + arg), "", mutArgType)); - } else { - args.add(new Literal(new Source(Location.EMPTY, "arg" + arg), "", specs.get(arg).validTypes.iterator().next())); - } - } - assertResolution(specs, args, mutArg, mutArgType, specs.get(mutArg).validTypes.contains(mutArgType)); - int optionalIdx = specs.size() - 1; - while (optionalIdx > 0 && specs.get(optionalIdx).optional()) { - args.remove(optionalIdx--); - assertResolution( - specs, - args, - mutArg, - mutArgType, - args.size() <= mutArg || specs.get(mutArg).validTypes.contains(mutArgType) - ); - } - } - } - } - - private void assertResolution(List specs, List args, int mutArg, DataType mutArgType, boolean shouldBeValid) { - Expression exp = build(new Source(Location.EMPTY, "exp"), args); - logger.info("checking {} is {}", exp.nodeString(), shouldBeValid ? "valid" : "invalid"); - if (shouldBeValid) { - assertResolveTypeValid(exp, expectedType(args.stream().map(Expression::dataType).toList())); - return; - } - Expression.TypeResolution resolution = exp.typeResolved(); - assertFalse(exp.nodeString(), resolution.resolved()); - assertThat(exp.nodeString(), resolution.message(), badTypeError(specs, mutArg, mutArgType)); - } - - protected Matcher badTypeError(List spec, int badArgPosition, DataType badArgType) { - String ordinal = spec.size() == 1 - ? "" - : TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " "; - return equalTo( - ordinal - + "argument of [exp] must be [" - + expectedTypeName(spec.get(badArgPosition).validTypes()) - + "], found value [arg" - + badArgPosition - + "] type [" - + badArgType.typeName() - + "]" - ); - } - - private String expectedTypeName(Set validTypes) { - List withoutNull = validTypes.stream().filter(t -> t != DataTypes.NULL).toList(); - if (withoutNull.equals(Arrays.asList(strings()))) { - return "string"; - } - if (withoutNull.equals(Arrays.asList(integers())) || withoutNull.equals(List.of(DataTypes.INTEGER))) { - return "integer"; - } - if (withoutNull.equals(Arrays.asList(rationals()))) { - return "double"; - } - if (withoutNull.equals(Arrays.asList(numerics())) || withoutNull.equals(Arrays.asList(representableNumerics()))) { - return "numeric"; - } - if (withoutNull.equals(List.of(DataTypes.DATETIME))) { - return "datetime"; - } - if (withoutNull.equals(List.of(DataTypes.IP))) { - return "ip"; - } - List negations = Stream.concat(Stream.of(numerics()), Stream.of(DataTypes.DATE_PERIOD, DataTypes.TIME_DURATION)) - .sorted(Comparator.comparing(DataType::name)) - .toList(); - if (withoutNull.equals(negations)) { - return "numeric, date_period or time_duration"; - } - if (validTypes.equals(Set.copyOf(Arrays.asList(representableTypes())))) { - return "representable"; - } - if (validTypes.equals(Set.copyOf(Arrays.asList(representableNonSpatialTypes())))) { - return "representableNonSpatial"; - } - throw new IllegalArgumentException("can't guess expected type for " + validTypes); - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NamedExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NamedExpressionTests.java index 536a24e6ce5c4..7843ad7eac8b7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NamedExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NamedExpressionTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Location; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; @@ -53,7 +53,7 @@ public void testArithmeticFunctionName() { } public void testNameForArithmeticFunctionAppliedOnTableColumn() { - FieldAttribute fa = new FieldAttribute(EMPTY, "myField", new EsField("myESField", DataTypes.INTEGER, emptyMap(), true)); + FieldAttribute fa = new FieldAttribute(EMPTY, "myField", new EsField("myESField", DataType.INTEGER, emptyMap(), true)); String e = "myField + 10"; Add add = new Add(s(e), fa, l(10)); assertEquals(e, add.sourceText()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/VaragsTestCaseBuilder.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/VaragsTestCaseBuilder.java index 55a8c39756fcc..86cce2a66baf8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/VaragsTestCaseBuilder.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/VaragsTestCaseBuilder.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -203,16 +202,16 @@ private void strings(List suppliers) { int paramCount = count; suppliers.add( new TestCaseSupplier( - testCaseName(paramCount, multivalued, DataTypes.KEYWORD), - dataTypes(paramCount, DataTypes.KEYWORD), - () -> stringCase(DataTypes.KEYWORD, paramCount, multivalued) + testCaseName(paramCount, multivalued, DataType.KEYWORD), + dataTypes(paramCount, DataType.KEYWORD), + () -> stringCase(DataType.KEYWORD, paramCount, multivalued) ) ); suppliers.add( new TestCaseSupplier( - testCaseName(paramCount, multivalued, DataTypes.TEXT), - dataTypes(paramCount, DataTypes.TEXT), - () -> stringCase(DataTypes.TEXT, paramCount, multivalued) + testCaseName(paramCount, multivalued, DataType.TEXT), + dataTypes(paramCount, DataType.TEXT), + () -> stringCase(DataType.TEXT, paramCount, multivalued) ) ); } @@ -240,8 +239,8 @@ private void longs(List suppliers) { int paramCount = count; suppliers.add( new TestCaseSupplier( - testCaseName(paramCount, multivalued, DataTypes.LONG), - dataTypes(paramCount, DataTypes.LONG), + testCaseName(paramCount, multivalued, DataType.LONG), + dataTypes(paramCount, DataType.LONG), () -> longCase(paramCount, multivalued) ) ); @@ -257,14 +256,14 @@ private TestCaseSupplier.TestCase longCase(int paramCount, boolean multivalued) List d = ESTestCase.randomList(1, 4, () -> ESTestCase.randomLong()); data[p] = d.stream().mapToLong(Long::longValue).toArray(); typedData.add( - new TestCaseSupplier.TypedData(Arrays.stream(data[p]).mapToObj(Long::valueOf).toList(), DataTypes.LONG, "field" + p) + new TestCaseSupplier.TypedData(Arrays.stream(data[p]).mapToObj(Long::valueOf).toList(), DataType.LONG, "field" + p) ); } else { data[p] = new long[] { ESTestCase.randomLong() }; - typedData.add(new TestCaseSupplier.TypedData(data[p][0], DataTypes.LONG, "field" + p)); + typedData.add(new TestCaseSupplier.TypedData(data[p][0], DataType.LONG, "field" + p)); } } - return testCase(typedData, expectedEvaluatorPrefix.apply("Long"), DataTypes.LONG, expectedLong.apply(data)); + return testCase(typedData, expectedEvaluatorPrefix.apply("Long"), DataType.LONG, expectedLong.apply(data)); } private void ints(List suppliers) { @@ -273,8 +272,8 @@ private void ints(List suppliers) { int paramCount = count; suppliers.add( new TestCaseSupplier( - testCaseName(paramCount, multivalued, DataTypes.INTEGER), - dataTypes(paramCount, DataTypes.INTEGER), + testCaseName(paramCount, multivalued, DataType.INTEGER), + dataTypes(paramCount, DataType.INTEGER), () -> intCase(paramCount, multivalued) ) ); @@ -289,13 +288,13 @@ private TestCaseSupplier.TestCase intCase(int paramCount, boolean multivalued) { if (multivalued) { List d = ESTestCase.randomList(1, 4, () -> ESTestCase.randomInt()); data[p] = d.stream().mapToInt(Integer::intValue).toArray(); - typedData.add(new TestCaseSupplier.TypedData(d, DataTypes.INTEGER, "field" + p)); + typedData.add(new TestCaseSupplier.TypedData(d, DataType.INTEGER, "field" + p)); } else { data[p] = new int[] { ESTestCase.randomInt() }; - typedData.add(new TestCaseSupplier.TypedData(data[p][0], DataTypes.INTEGER, "field" + p)); + typedData.add(new TestCaseSupplier.TypedData(data[p][0], DataType.INTEGER, "field" + p)); } } - return testCase(typedData, expectedEvaluatorPrefix.apply("Int"), DataTypes.INTEGER, expectedInt.apply(data)); + return testCase(typedData, expectedEvaluatorPrefix.apply("Int"), DataType.INTEGER, expectedInt.apply(data)); } private void booleans(List suppliers) { @@ -304,8 +303,8 @@ private void booleans(List suppliers) { int paramCount = count; suppliers.add( new TestCaseSupplier( - testCaseName(paramCount, multivalued, DataTypes.BOOLEAN), - dataTypes(paramCount, DataTypes.BOOLEAN), + testCaseName(paramCount, multivalued, DataType.BOOLEAN), + dataTypes(paramCount, DataType.BOOLEAN), () -> booleanCase(paramCount, multivalued) ) ); @@ -325,13 +324,13 @@ private TestCaseSupplier.TestCase booleanCase(int paramCount, boolean multivalue data[p][i] = ESTestCase.randomBoolean(); paramData.add(data[p][i]); } - typedData.add(new TestCaseSupplier.TypedData(paramData, DataTypes.BOOLEAN, "field" + p)); + typedData.add(new TestCaseSupplier.TypedData(paramData, DataType.BOOLEAN, "field" + p)); } else { data[p] = new boolean[] { ESTestCase.randomBoolean() }; - typedData.add(new TestCaseSupplier.TypedData(data[p][0], DataTypes.BOOLEAN, "field" + p)); + typedData.add(new TestCaseSupplier.TypedData(data[p][0], DataType.BOOLEAN, "field" + p)); } } - return testCase(typedData, expectedEvaluatorPrefix.apply("Boolean"), DataTypes.BOOLEAN, expectedBoolean.apply(data)); + return testCase(typedData, expectedEvaluatorPrefix.apply("Boolean"), DataType.BOOLEAN, expectedBoolean.apply(data)); } private String testCaseName(int count, boolean multivalued, DataType type) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseExtraTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseExtraTests.java index 85d2ed7b30173..80a3cd48e5147 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseExtraTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseExtraTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import java.util.List; @@ -27,38 +27,38 @@ public void testElseValueExplicit() { assertThat( new Case( Source.synthetic("case"), - field("first_cond", DataTypes.BOOLEAN), - List.of(field("v", DataTypes.LONG), field("e", DataTypes.LONG)) + field("first_cond", DataType.BOOLEAN), + List.of(field("v", DataType.LONG), field("e", DataType.LONG)) ).children(), - equalTo(List.of(field("first_cond", DataTypes.BOOLEAN), field("v", DataTypes.LONG), field("e", DataTypes.LONG))) + equalTo(List.of(field("first_cond", DataType.BOOLEAN), field("v", DataType.LONG), field("e", DataType.LONG))) ); } public void testElseValueImplied() { assertThat( - new Case(Source.synthetic("case"), field("first_cond", DataTypes.BOOLEAN), List.of(field("v", DataTypes.LONG))).children(), - equalTo(List.of(field("first_cond", DataTypes.BOOLEAN), field("v", DataTypes.LONG))) + new Case(Source.synthetic("case"), field("first_cond", DataType.BOOLEAN), List.of(field("v", DataType.LONG))).children(), + equalTo(List.of(field("first_cond", DataType.BOOLEAN), field("v", DataType.LONG))) ); } public void testPartialFoldDropsFirstFalse() { Case c = new Case( Source.synthetic("case"), - new Literal(Source.EMPTY, false, DataTypes.BOOLEAN), - List.of(field("first", DataTypes.LONG), field("last_cond", DataTypes.BOOLEAN), field("last", DataTypes.LONG)) + new Literal(Source.EMPTY, false, DataType.BOOLEAN), + List.of(field("first", DataType.LONG), field("last_cond", DataType.BOOLEAN), field("last", DataType.LONG)) ); assertThat(c.foldable(), equalTo(false)); assertThat( c.partiallyFold(), - equalTo(new Case(Source.synthetic("case"), field("last_cond", DataTypes.BOOLEAN), List.of(field("last", DataTypes.LONG)))) + equalTo(new Case(Source.synthetic("case"), field("last_cond", DataType.BOOLEAN), List.of(field("last", DataType.LONG)))) ); } public void testPartialFoldNoop() { Case c = new Case( Source.synthetic("case"), - field("first_cond", DataTypes.BOOLEAN), - List.of(field("first", DataTypes.LONG), field("last", DataTypes.LONG)) + field("first_cond", DataType.BOOLEAN), + List.of(field("first", DataType.LONG), field("last", DataType.LONG)) ); assertThat(c.foldable(), equalTo(false)); assertThat(c.partiallyFold(), sameInstance(c)); @@ -67,22 +67,22 @@ public void testPartialFoldNoop() { public void testPartialFoldFirst() { Case c = new Case( Source.synthetic("case"), - new Literal(Source.EMPTY, true, DataTypes.BOOLEAN), - List.of(field("first", DataTypes.LONG), field("last", DataTypes.LONG)) + new Literal(Source.EMPTY, true, DataType.BOOLEAN), + List.of(field("first", DataType.LONG), field("last", DataType.LONG)) ); assertThat(c.foldable(), equalTo(false)); - assertThat(c.partiallyFold(), equalTo(field("first", DataTypes.LONG))); + assertThat(c.partiallyFold(), equalTo(field("first", DataType.LONG))); } public void testPartialFoldFirstAfterKeepingUnknown() { Case c = new Case( Source.synthetic("case"), - field("keep_me_cond", DataTypes.BOOLEAN), + field("keep_me_cond", DataType.BOOLEAN), List.of( - field("keep_me", DataTypes.LONG), - new Literal(Source.EMPTY, true, DataTypes.BOOLEAN), - field("first", DataTypes.LONG), - field("last", DataTypes.LONG) + field("keep_me", DataType.LONG), + new Literal(Source.EMPTY, true, DataType.BOOLEAN), + field("first", DataType.LONG), + field("last", DataType.LONG) ) ); assertThat(c.foldable(), equalTo(false)); @@ -91,8 +91,8 @@ public void testPartialFoldFirstAfterKeepingUnknown() { equalTo( new Case( Source.synthetic("case"), - field("keep_me_cond", DataTypes.BOOLEAN), - List.of(field("keep_me", DataTypes.LONG), field("first", DataTypes.LONG)) + field("keep_me_cond", DataType.BOOLEAN), + List.of(field("keep_me", DataType.LONG), field("first", DataType.LONG)) ) ) ); @@ -101,57 +101,57 @@ public void testPartialFoldFirstAfterKeepingUnknown() { public void testPartialFoldSecond() { Case c = new Case( Source.synthetic("case"), - new Literal(Source.EMPTY, false, DataTypes.BOOLEAN), + new Literal(Source.EMPTY, false, DataType.BOOLEAN), List.of( - field("first", DataTypes.LONG), - new Literal(Source.EMPTY, true, DataTypes.BOOLEAN), - field("second", DataTypes.LONG), - field("last", DataTypes.LONG) + field("first", DataType.LONG), + new Literal(Source.EMPTY, true, DataType.BOOLEAN), + field("second", DataType.LONG), + field("last", DataType.LONG) ) ); assertThat(c.foldable(), equalTo(false)); - assertThat(c.partiallyFold(), equalTo(field("second", DataTypes.LONG))); + assertThat(c.partiallyFold(), equalTo(field("second", DataType.LONG))); } public void testPartialFoldSecondAfterDroppingFalse() { Case c = new Case( Source.synthetic("case"), - new Literal(Source.EMPTY, false, DataTypes.BOOLEAN), + new Literal(Source.EMPTY, false, DataType.BOOLEAN), List.of( - field("first", DataTypes.LONG), - new Literal(Source.EMPTY, true, DataTypes.BOOLEAN), - field("second", DataTypes.LONG), - field("last", DataTypes.LONG) + field("first", DataType.LONG), + new Literal(Source.EMPTY, true, DataType.BOOLEAN), + field("second", DataType.LONG), + field("last", DataType.LONG) ) ); assertThat(c.foldable(), equalTo(false)); - assertThat(c.partiallyFold(), equalTo(field("second", DataTypes.LONG))); + assertThat(c.partiallyFold(), equalTo(field("second", DataType.LONG))); } public void testPartialFoldLast() { Case c = new Case( Source.synthetic("case"), - new Literal(Source.EMPTY, false, DataTypes.BOOLEAN), + new Literal(Source.EMPTY, false, DataType.BOOLEAN), List.of( - field("first", DataTypes.LONG), - new Literal(Source.EMPTY, false, DataTypes.BOOLEAN), - field("second", DataTypes.LONG), - field("last", DataTypes.LONG) + field("first", DataType.LONG), + new Literal(Source.EMPTY, false, DataType.BOOLEAN), + field("second", DataType.LONG), + field("last", DataType.LONG) ) ); assertThat(c.foldable(), equalTo(false)); - assertThat(c.partiallyFold(), equalTo(field("last", DataTypes.LONG))); + assertThat(c.partiallyFold(), equalTo(field("last", DataType.LONG))); } public void testPartialFoldLastAfterKeepingUnknown() { Case c = new Case( Source.synthetic("case"), - field("keep_me_cond", DataTypes.BOOLEAN), + field("keep_me_cond", DataType.BOOLEAN), List.of( - field("keep_me", DataTypes.LONG), - new Literal(Source.EMPTY, false, DataTypes.BOOLEAN), - field("first", DataTypes.LONG), - field("last", DataTypes.LONG) + field("keep_me", DataType.LONG), + new Literal(Source.EMPTY, false, DataType.BOOLEAN), + field("first", DataType.LONG), + field("last", DataType.LONG) ) ); assertThat(c.foldable(), equalTo(false)); @@ -160,8 +160,8 @@ public void testPartialFoldLastAfterKeepingUnknown() { equalTo( new Case( Source.synthetic("case"), - field("keep_me_cond", DataTypes.BOOLEAN), - List.of(field("keep_me", DataTypes.LONG), field("last", DataTypes.LONG)) + field("keep_me_cond", DataType.BOOLEAN), + List.of(field("keep_me", DataType.LONG), field("last", DataType.LONG)) ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index e14b69d627f2e..f24955eb4804a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -47,158 +46,158 @@ public CaseTests(@Name("TestCase") Supplier testCaseS @ParametersFactory public static Iterable parameters() { return parameterSuppliersFromTypedData( - List.of(new TestCaseSupplier("keyword", List.of(DataTypes.BOOLEAN, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { + List.of(new TestCaseSupplier("keyword", List.of(DataType.BOOLEAN, DataType.KEYWORD, DataType.KEYWORD), () -> { List typedData = List.of( - new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), - new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") + new TestCaseSupplier.TypedData(true, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(new BytesRef("a"), DataType.KEYWORD, "a"), + new TestCaseSupplier.TypedData(new BytesRef("b"), DataType.KEYWORD, "b") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=Attribute[channel=2]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("a")) ); - }), new TestCaseSupplier("text", List.of(DataTypes.BOOLEAN, DataTypes.TEXT), () -> { + }), new TestCaseSupplier("text", List.of(DataType.BOOLEAN, DataType.TEXT), () -> { List typedData = List.of( - new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.TEXT, "trueValue") + new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(new BytesRef("a"), DataType.TEXT, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.TEXT, + DataType.TEXT, nullValue() ); - }), new TestCaseSupplier("boolean", List.of(DataTypes.BOOLEAN, DataTypes.BOOLEAN), () -> { + }), new TestCaseSupplier("boolean", List.of(DataType.BOOLEAN, DataType.BOOLEAN), () -> { List typedData = List.of( - new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "trueValue") + new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=BOOLEAN, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, nullValue() ); - }), new TestCaseSupplier("date", List.of(DataTypes.BOOLEAN, DataTypes.DATETIME), () -> { + }), new TestCaseSupplier("date", List.of(DataType.BOOLEAN, DataType.DATETIME), () -> { long value = randomNonNegativeLong(); List typedData = List.of( - new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "trueValue") + new TestCaseSupplier.TypedData(true, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataType.DATETIME, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(value) ); - }), new TestCaseSupplier("double", List.of(DataTypes.BOOLEAN, DataTypes.DOUBLE), () -> { + }), new TestCaseSupplier("double", List.of(DataType.BOOLEAN, DataType.DOUBLE), () -> { double value = randomDouble(); List typedData = List.of( - new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(value, DataTypes.DOUBLE, "trueValue") + new TestCaseSupplier.TypedData(true, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataType.DOUBLE, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=DOUBLE, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(value) ); - }), new TestCaseSupplier("integer", List.of(DataTypes.BOOLEAN, DataTypes.INTEGER), () -> { + }), new TestCaseSupplier("integer", List.of(DataType.BOOLEAN, DataType.INTEGER), () -> { int value = randomInt(); List typedData = List.of( - new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(value, DataTypes.INTEGER, "trueValue") + new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataType.INTEGER, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=INT, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.INTEGER, + DataType.INTEGER, nullValue() ); - }), new TestCaseSupplier("long", List.of(DataTypes.BOOLEAN, DataTypes.LONG), () -> { + }), new TestCaseSupplier("long", List.of(DataType.BOOLEAN, DataType.LONG), () -> { long value = randomLong(); List typedData = List.of( - new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(value, DataTypes.LONG, "trueValue") + new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataType.LONG, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.LONG, + DataType.LONG, nullValue() ); - }), new TestCaseSupplier("unsigned_long", List.of(DataTypes.BOOLEAN, DataTypes.UNSIGNED_LONG), () -> { + }), new TestCaseSupplier("unsigned_long", List.of(DataType.BOOLEAN, DataType.UNSIGNED_LONG), () -> { BigInteger value = randomUnsignedLongBetween(BigInteger.ZERO, UNSIGNED_LONG_MAX); List typedData = List.of( - new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(value, DataTypes.UNSIGNED_LONG, "trueValue") + new TestCaseSupplier.TypedData(true, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataType.UNSIGNED_LONG, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, equalTo(value) ); - }), new TestCaseSupplier("ip", List.of(DataTypes.BOOLEAN, DataTypes.IP), () -> { - BytesRef value = (BytesRef) randomLiteral(DataTypes.IP).value(); + }), new TestCaseSupplier("ip", List.of(DataType.BOOLEAN, DataType.IP), () -> { + BytesRef value = (BytesRef) randomLiteral(DataType.IP).value(); List typedData = List.of( - new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(value, DataTypes.IP, "trueValue") + new TestCaseSupplier.TypedData(true, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataType.IP, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.IP, + DataType.IP, equalTo(value) ); - }), new TestCaseSupplier("version", List.of(DataTypes.BOOLEAN, DataTypes.VERSION), () -> { - BytesRef value = (BytesRef) randomLiteral(DataTypes.VERSION).value(); + }), new TestCaseSupplier("version", List.of(DataType.BOOLEAN, DataType.VERSION), () -> { + BytesRef value = (BytesRef) randomLiteral(DataType.VERSION).value(); List typedData = List.of( - new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(value, DataTypes.VERSION, "trueValue") + new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataType.VERSION, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.VERSION, + DataType.VERSION, nullValue() ); - }), new TestCaseSupplier("cartesian_point", List.of(DataTypes.BOOLEAN, DataTypes.CARTESIAN_POINT), () -> { - BytesRef value = (BytesRef) randomLiteral(DataTypes.CARTESIAN_POINT).value(); + }), new TestCaseSupplier("cartesian_point", List.of(DataType.BOOLEAN, DataType.CARTESIAN_POINT), () -> { + BytesRef value = (BytesRef) randomLiteral(DataType.CARTESIAN_POINT).value(); List typedData = List.of( - new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(value, DataTypes.CARTESIAN_POINT, "trueValue") + new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataType.CARTESIAN_POINT, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.CARTESIAN_POINT, + DataType.CARTESIAN_POINT, nullValue() ); - }), new TestCaseSupplier("geo_point", List.of(DataTypes.BOOLEAN, DataTypes.GEO_POINT), () -> { - BytesRef value = (BytesRef) randomLiteral(DataTypes.GEO_POINT).value(); + }), new TestCaseSupplier("geo_point", List.of(DataType.BOOLEAN, DataType.GEO_POINT), () -> { + BytesRef value = (BytesRef) randomLiteral(DataType.GEO_POINT).value(); List typedData = List.of( - new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(value, DataTypes.GEO_POINT, "trueValue") + new TestCaseSupplier.TypedData(true, DataType.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataType.GEO_POINT, "trueValue") ); return new TestCaseSupplier.TestCase( typedData, "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", - DataTypes.GEO_POINT, + DataType.GEO_POINT, equalTo(value) ); })) @@ -271,15 +270,15 @@ public void testCase(Function toValue) { assertEquals(3, toValue.apply(caseExpr(false, 1, false, 2, 3))); assertNull(toValue.apply(caseExpr(true, null, 1))); assertEquals(1, toValue.apply(caseExpr(false, null, 1))); - assertEquals(1, toValue.apply(caseExpr(false, field("ignored", DataTypes.INTEGER), 1))); - assertEquals(1, toValue.apply(caseExpr(true, 1, field("ignored", DataTypes.INTEGER)))); + assertEquals(1, toValue.apply(caseExpr(false, field("ignored", DataType.INTEGER), 1))); + assertEquals(1, toValue.apply(caseExpr(true, 1, field("ignored", DataType.INTEGER)))); } public void testIgnoreLeadingNulls() { - assertEquals(DataTypes.INTEGER, resolveType(false, null, 1)); - assertEquals(DataTypes.INTEGER, resolveType(false, null, false, null, false, 2, null)); - assertEquals(DataTypes.NULL, resolveType(false, null, null)); - assertEquals(DataTypes.BOOLEAN, resolveType(false, null, field("bool", DataTypes.BOOLEAN))); + assertEquals(DataType.INTEGER, resolveType(false, null, 1)); + assertEquals(DataType.INTEGER, resolveType(false, null, false, null, false, 2, null)); + assertEquals(DataType.NULL, resolveType(false, null, null)); + assertEquals(DataType.BOOLEAN, resolveType(false, null, field("bool", DataType.BOOLEAN))); } public void testCaseWithInvalidCondition() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java index 38092791f6d6a..9376849d8136c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.VaragsTestCaseBuilder; @@ -43,14 +43,14 @@ public static Iterable parameters() { suppliers.add( new TestCaseSupplier( "(a, b)", - List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), + List.of(DataType.KEYWORD, DataType.KEYWORD), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), - new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") + new TestCaseSupplier.TypedData(new BytesRef("a"), DataType.KEYWORD, "a"), + new TestCaseSupplier.TypedData(new BytesRef("b"), DataType.KEYWORD, "b") ), "GreatestBytesRefEvaluator[values=[MvMax[field=Attribute[channel=0]], MvMax[field=Attribute[channel=1]]]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("b")) ) ) @@ -58,14 +58,14 @@ public static Iterable parameters() { suppliers.add( new TestCaseSupplier( "(a, b)", - List.of(DataTypes.VERSION, DataTypes.VERSION), + List.of(DataType.VERSION, DataType.VERSION), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("1"), DataTypes.VERSION, "a"), - new TestCaseSupplier.TypedData(new BytesRef("2"), DataTypes.VERSION, "b") + new TestCaseSupplier.TypedData(new BytesRef("1"), DataType.VERSION, "a"), + new TestCaseSupplier.TypedData(new BytesRef("2"), DataType.VERSION, "b") ), "GreatestBytesRefEvaluator[values=[MvMax[field=Attribute[channel=0]], MvMax[field=Attribute[channel=1]]]]", - DataTypes.VERSION, + DataType.VERSION, equalTo(new BytesRef("2")) ) ) @@ -73,14 +73,14 @@ public static Iterable parameters() { suppliers.add( new TestCaseSupplier( "(a, b)", - List.of(DataTypes.IP, DataTypes.IP), + List.of(DataType.IP, DataType.IP), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("127.0.0.1"), DataTypes.IP, "a"), - new TestCaseSupplier.TypedData(new BytesRef("127.0.0.2"), DataTypes.IP, "b") + new TestCaseSupplier.TypedData(new BytesRef("127.0.0.1"), DataType.IP, "a"), + new TestCaseSupplier.TypedData(new BytesRef("127.0.0.2"), DataType.IP, "b") ), "GreatestBytesRefEvaluator[values=[MvMax[field=Attribute[channel=0]], MvMax[field=Attribute[channel=1]]]]", - DataTypes.IP, + DataType.IP, equalTo(new BytesRef("127.0.0.2")) ) ) @@ -88,14 +88,14 @@ public static Iterable parameters() { suppliers.add( new TestCaseSupplier( "(a, b)", - List.of(DataTypes.DOUBLE, DataTypes.DOUBLE), + List.of(DataType.DOUBLE, DataType.DOUBLE), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(1d, DataTypes.DOUBLE, "a"), - new TestCaseSupplier.TypedData(2d, DataTypes.DOUBLE, "b") + new TestCaseSupplier.TypedData(1d, DataType.DOUBLE, "a"), + new TestCaseSupplier.TypedData(2d, DataType.DOUBLE, "b") ), "GreatestDoubleEvaluator[values=[MvMax[field=Attribute[channel=0]], MvMax[field=Attribute[channel=1]]]]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(2d) ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java index cadfb1e6d1c6f..0881b871c30f6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.VaragsTestCaseBuilder; @@ -42,14 +42,14 @@ public static Iterable parameters() { suppliers.add( new TestCaseSupplier( "(a, b)", - List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), + List.of(DataType.KEYWORD, DataType.KEYWORD), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), - new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") + new TestCaseSupplier.TypedData(new BytesRef("a"), DataType.KEYWORD, "a"), + new TestCaseSupplier.TypedData(new BytesRef("b"), DataType.KEYWORD, "b") ), "LeastBytesRefEvaluator[values=[MvMin[field=Attribute[channel=0]], MvMin[field=Attribute[channel=1]]]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("a")) ) ) @@ -57,14 +57,14 @@ public static Iterable parameters() { suppliers.add( new TestCaseSupplier( "(a, b)", - List.of(DataTypes.VERSION, DataTypes.VERSION), + List.of(DataType.VERSION, DataType.VERSION), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("1"), DataTypes.VERSION, "a"), - new TestCaseSupplier.TypedData(new BytesRef("2"), DataTypes.VERSION, "b") + new TestCaseSupplier.TypedData(new BytesRef("1"), DataType.VERSION, "a"), + new TestCaseSupplier.TypedData(new BytesRef("2"), DataType.VERSION, "b") ), "LeastBytesRefEvaluator[values=[MvMin[field=Attribute[channel=0]], MvMin[field=Attribute[channel=1]]]]", - DataTypes.VERSION, + DataType.VERSION, equalTo(new BytesRef("1")) ) ) @@ -72,14 +72,14 @@ public static Iterable parameters() { suppliers.add( new TestCaseSupplier( "(a, b)", - List.of(DataTypes.IP, DataTypes.IP), + List.of(DataType.IP, DataType.IP), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("127.0.0.1"), DataTypes.IP, "a"), - new TestCaseSupplier.TypedData(new BytesRef("127.0.0.2"), DataTypes.IP, "b") + new TestCaseSupplier.TypedData(new BytesRef("127.0.0.1"), DataType.IP, "a"), + new TestCaseSupplier.TypedData(new BytesRef("127.0.0.2"), DataType.IP, "b") ), "LeastBytesRefEvaluator[values=[MvMin[field=Attribute[channel=0]], MvMin[field=Attribute[channel=1]]]]", - DataTypes.IP, + DataType.IP, equalTo(new BytesRef("127.0.0.1")) ) ) @@ -87,14 +87,14 @@ public static Iterable parameters() { suppliers.add( new TestCaseSupplier( "(a, b)", - List.of(DataTypes.DOUBLE, DataTypes.DOUBLE), + List.of(DataType.DOUBLE, DataType.DOUBLE), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(1d, DataTypes.DOUBLE, "a"), - new TestCaseSupplier.TypedData(2d, DataTypes.DOUBLE, "b") + new TestCaseSupplier.TypedData(1d, DataType.DOUBLE, "a"), + new TestCaseSupplier.TypedData(2d, DataType.DOUBLE, "b") ), "LeastDoubleEvaluator[values=[MvMin[field=Attribute[channel=0]], MvMin[field=Attribute[channel=1]]]]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(1d) ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java index 62b0070e8a336..2096d9cec75b1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -35,22 +35,22 @@ public FromBase64Tests(@Name("TestCase") Supplier tes @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.KEYWORD), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.KEYWORD), () -> { BytesRef input = new BytesRef(randomAlphaOfLength(6)); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(input, DataTypes.KEYWORD, "string")), + List.of(new TestCaseSupplier.TypedData(input, DataType.KEYWORD, "string")), "FromBase64Evaluator[field=Attribute[channel=0]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(Base64.getDecoder().decode(input.utf8ToString().getBytes(StandardCharsets.UTF_8)))) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.TEXT), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.TEXT), () -> { BytesRef input = new BytesRef(randomAlphaOfLength(54)); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(input, DataTypes.TEXT, "string")), + List.of(new TestCaseSupplier.TypedData(input, DataType.TEXT, "string")), "FromBase64Evaluator[field=Attribute[channel=0]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(Base64.getDecoder().decode(input.utf8ToString().getBytes(StandardCharsets.UTF_8)))) ); })); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java index fe25e65befab5..dd35e04708c9f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -35,22 +35,22 @@ public ToBase64Tests(@Name("TestCase") Supplier testC @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.KEYWORD), () -> { - BytesRef input = (BytesRef) randomLiteral(DataTypes.KEYWORD).value(); + suppliers.add(new TestCaseSupplier(List.of(DataType.KEYWORD), () -> { + BytesRef input = (BytesRef) randomLiteral(DataType.KEYWORD).value(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(input, DataTypes.KEYWORD, "string")), + List.of(new TestCaseSupplier.TypedData(input, DataType.KEYWORD, "string")), "ToBase64Evaluator[field=Attribute[channel=0]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(Base64.getEncoder().encode(input.utf8ToString().getBytes(StandardCharsets.UTF_8)))) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.TEXT), () -> { - BytesRef input = (BytesRef) randomLiteral(DataTypes.TEXT).value(); + suppliers.add(new TestCaseSupplier(List.of(DataType.TEXT), () -> { + BytesRef input = (BytesRef) randomLiteral(DataType.TEXT).value(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(input, DataTypes.TEXT, "string")), + List.of(new TestCaseSupplier.TypedData(input, DataType.TEXT, "string")), "ToBase64Evaluator[field=Attribute[channel=0]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(Base64.getEncoder().encode(input.utf8ToString().getBytes(StandardCharsets.UTF_8)))) ); })); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java index 8c37f7055066f..3a25ad6b56d0c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -34,12 +34,12 @@ public static Iterable parameters() { final String read = "Attribute[channel=0]"; final List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryBoolean(suppliers, read, DataTypes.BOOLEAN, b -> b, emptyList()); + TestCaseSupplier.forUnaryBoolean(suppliers, read, DataType.BOOLEAN, b -> b, emptyList()); TestCaseSupplier.forUnaryInt( suppliers, "ToBooleanFromIntEvaluator[field=" + read + "]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, i -> i != 0, Integer.MIN_VALUE, Integer.MAX_VALUE, @@ -48,7 +48,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, "ToBooleanFromLongEvaluator[field=" + read + "]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, l -> l != 0, Long.MIN_VALUE, Long.MAX_VALUE, @@ -57,7 +57,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, "ToBooleanFromUnsignedLongEvaluator[field=" + read + "]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, ul -> ul.compareTo(BigInteger.ZERO) != 0, BigInteger.ZERO, UNSIGNED_LONG_MAX, @@ -66,7 +66,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "ToBooleanFromDoubleEvaluator[field=" + read + "]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, d -> d != 0d, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, @@ -75,7 +75,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryStrings( suppliers, "ToBooleanFromStringEvaluator[field=" + read + "]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, bytesRef -> String.valueOf(bytesRef).toLowerCase(Locale.ROOT).equals("true"), emptyList() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java index 6e63a14a0ad7c..b520e559c45d7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -40,12 +39,12 @@ public static Iterable parameters() { final Function evaluatorName = s -> "ToCartesianPoint" + s + "Evaluator[field=" + attribute + "]"; final List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryCartesianPoint(suppliers, attribute, DataTypes.CARTESIAN_POINT, v -> v, List.of()); + TestCaseSupplier.forUnaryCartesianPoint(suppliers, attribute, DataType.CARTESIAN_POINT, v -> v, List.of()); // random strings that don't look like a cartesian point TestCaseSupplier.forUnaryStrings( suppliers, evaluatorName.apply("FromString"), - DataTypes.CARTESIAN_POINT, + DataType.CARTESIAN_POINT, bytesRef -> null, bytesRef -> { var exception = expectThrows(Exception.class, () -> CARTESIAN.wktToWkb(bytesRef.utf8ToString())); @@ -56,7 +55,7 @@ public static Iterable parameters() { } ); // strings that are cartesian point representations - for (DataType dt : List.of(DataTypes.KEYWORD, DataTypes.TEXT)) { + for (DataType dt : List.of(DataType.KEYWORD, DataType.TEXT)) { TestCaseSupplier.unary( suppliers, evaluatorName.apply("FromString"), @@ -67,7 +66,7 @@ public static Iterable parameters() { dt ) ), - DataTypes.CARTESIAN_POINT, + DataType.CARTESIAN_POINT, bytesRef -> CARTESIAN.wktToWkb(((BytesRef) bytesRef).utf8ToString()), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java index 9ffb8bd82a0a8..9eb1155a209a1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -40,13 +39,13 @@ public static Iterable parameters() { final Function evaluatorName = s -> "ToCartesianShape" + s + "Evaluator[field=" + attribute + "]"; final List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryCartesianPoint(suppliers, attribute, DataTypes.CARTESIAN_SHAPE, v -> v, List.of()); - TestCaseSupplier.forUnaryCartesianShape(suppliers, attribute, DataTypes.CARTESIAN_SHAPE, v -> v, List.of()); + TestCaseSupplier.forUnaryCartesianPoint(suppliers, attribute, DataType.CARTESIAN_SHAPE, v -> v, List.of()); + TestCaseSupplier.forUnaryCartesianShape(suppliers, attribute, DataType.CARTESIAN_SHAPE, v -> v, List.of()); // random strings that don't look like a cartesian shape TestCaseSupplier.forUnaryStrings( suppliers, evaluatorName.apply("FromString"), - DataTypes.CARTESIAN_SHAPE, + DataType.CARTESIAN_SHAPE, bytesRef -> null, bytesRef -> { var exception = expectThrows(Exception.class, () -> CARTESIAN.wktToWkb(bytesRef.utf8ToString())); @@ -57,7 +56,7 @@ public static Iterable parameters() { } ); // strings that are cartesian_shape representations - for (DataType dt : List.of(DataTypes.KEYWORD, DataTypes.TEXT)) { + for (DataType dt : List.of(DataType.KEYWORD, DataType.TEXT)) { TestCaseSupplier.unary( suppliers, evaluatorName.apply("FromString"), @@ -68,7 +67,7 @@ public static Iterable parameters() { dt ) ), - DataTypes.CARTESIAN_SHAPE, + DataType.CARTESIAN_SHAPE, bytesRef -> CARTESIAN.wktToWkb(((BytesRef) bytesRef).utf8ToString()), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java index 4660a9a1c86a2..0ef931710422e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -36,22 +36,22 @@ public static Iterable parameters() { final String read = "Attribute[channel=0]"; final List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryDatetime(suppliers, read, DataTypes.DATETIME, Instant::toEpochMilli, emptyList()); + TestCaseSupplier.forUnaryDatetime(suppliers, read, DataType.DATETIME, Instant::toEpochMilli, emptyList()); TestCaseSupplier.forUnaryInt( suppliers, "ToLongFromIntEvaluator[field=" + read + "]", - DataTypes.DATETIME, + DataType.DATETIME, i -> ((Integer) i).longValue(), Integer.MIN_VALUE, Integer.MAX_VALUE, emptyList() ); - TestCaseSupplier.forUnaryLong(suppliers, read, DataTypes.DATETIME, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, emptyList()); + TestCaseSupplier.forUnaryLong(suppliers, read, DataType.DATETIME, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, emptyList()); TestCaseSupplier.forUnaryUnsignedLong( suppliers, "ToLongFromUnsignedLongEvaluator[field=" + read + "]", - DataTypes.DATETIME, + DataType.DATETIME, BigInteger::longValueExact, BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), @@ -60,7 +60,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, "ToLongFromUnsignedLongEvaluator[field=" + read + "]", - DataTypes.DATETIME, + DataType.DATETIME, bi -> null, BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.TWO), UNSIGNED_LONG_MAX, @@ -72,7 +72,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "ToLongFromDoubleEvaluator[field=" + read + "]", - DataTypes.DATETIME, + DataType.DATETIME, d -> null, Double.NEGATIVE_INFINITY, -9.223372036854777E18, // a "convenient" value smaller than `(double) Long.MIN_VALUE` (== ...776E18) @@ -84,7 +84,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "ToLongFromDoubleEvaluator[field=" + read + "]", - DataTypes.DATETIME, + DataType.DATETIME, d -> null, 9.223372036854777E18, // a "convenient" value larger than `(double) Long.MAX_VALUE` (== ...776E18) Double.POSITIVE_INFINITY, @@ -96,7 +96,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryStrings( suppliers, "ToDatetimeFromStringEvaluator[field=" + read + "]", - DataTypes.DATETIME, + DataType.DATETIME, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -114,10 +114,10 @@ public static Iterable parameters() { "", // millis past "0001-01-01T00:00:00.000Z" to match the default formatter () -> new BytesRef(randomDateString(-62135596800000L, 253402300799999L)), - DataTypes.KEYWORD + DataType.KEYWORD ) ), - DataTypes.DATETIME, + DataType.DATETIME, bytesRef -> DEFAULT_DATE_TIME_FORMATTER.parseMillis(((BytesRef) bytesRef).utf8ToString()), emptyList() ); @@ -129,10 +129,10 @@ public static Iterable parameters() { "", // millis before "0001-01-01T00:00:00.000Z" () -> new BytesRef(randomDateString(Long.MIN_VALUE, -62135596800001L)), - DataTypes.KEYWORD + DataType.KEYWORD ) ), - DataTypes.DATETIME, + DataType.DATETIME, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -149,10 +149,10 @@ public static Iterable parameters() { "", // millis before "0001-01-01T00:00:00.000Z" () -> new BytesRef(randomDateString(253402300800000L, Long.MAX_VALUE)), - DataTypes.KEYWORD + DataType.KEYWORD ) ), - DataTypes.DATETIME, + DataType.DATETIME, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java index 2368d52659898..b7cb03879fd6f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -36,7 +36,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, evaluatorName.apply("ToDoubleFromIntEvaluator"), - DataTypes.DOUBLE, + DataType.DOUBLE, Math::toDegrees, Integer.MIN_VALUE, Integer.MAX_VALUE, @@ -45,7 +45,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, evaluatorName.apply("ToDoubleFromLongEvaluator"), - DataTypes.DOUBLE, + DataType.DOUBLE, Math::toDegrees, Long.MIN_VALUE, Long.MAX_VALUE, @@ -54,13 +54,13 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, evaluatorName.apply("ToDoubleFromUnsignedLongEvaluator"), - DataTypes.DOUBLE, + DataType.DOUBLE, ul -> Math.toDegrees(ul.doubleValue()), BigInteger.ZERO, UNSIGNED_LONG_MAX, List.of() ); - TestCaseSupplier.forUnaryDouble(suppliers, "ToDegreesEvaluator[field=Attribute[channel=0]]", DataTypes.DOUBLE, d -> { + TestCaseSupplier.forUnaryDouble(suppliers, "ToDegreesEvaluator[field=Attribute[channel=0]]", DataType.DOUBLE, d -> { double deg = Math.toDegrees(d); return Double.isNaN(deg) || Double.isInfinite(deg) ? null : deg; }, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, d -> { @@ -76,12 +76,12 @@ public static Iterable parameters() { suppliers, "ToDegreesEvaluator[field=Attribute[channel=0]]", List.of( - new TestCaseSupplier.TypedDataSupplier("Double.MAX_VALUE", () -> Double.MAX_VALUE, DataTypes.DOUBLE), - new TestCaseSupplier.TypedDataSupplier("-Double.MAX_VALUE", () -> -Double.MAX_VALUE, DataTypes.DOUBLE), - new TestCaseSupplier.TypedDataSupplier("Double.POSITIVE_INFINITY", () -> Double.POSITIVE_INFINITY, DataTypes.DOUBLE), - new TestCaseSupplier.TypedDataSupplier("Double.NEGATIVE_INFINITY", () -> Double.NEGATIVE_INFINITY, DataTypes.DOUBLE) + new TestCaseSupplier.TypedDataSupplier("Double.MAX_VALUE", () -> Double.MAX_VALUE, DataType.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("-Double.MAX_VALUE", () -> -Double.MAX_VALUE, DataType.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("Double.POSITIVE_INFINITY", () -> Double.POSITIVE_INFINITY, DataType.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("Double.NEGATIVE_INFINITY", () -> Double.NEGATIVE_INFINITY, DataType.DOUBLE) ), - DataTypes.DOUBLE, + DataType.DOUBLE, d -> null, d -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java index 24f7952c67f6a..6438a8422a664 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; @@ -41,23 +41,23 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, read, - DataTypes.DOUBLE, + DataType.DOUBLE, d -> d, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, List.of() ); - TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.DOUBLE, b -> b ? 1d : 0d, List.of()); + TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataType.DOUBLE, b -> b ? 1d : 0d, List.of()); TestCaseSupplier.forUnaryDatetime( suppliers, evaluatorName.apply("Long"), - DataTypes.DOUBLE, + DataType.DOUBLE, i -> (double) i.toEpochMilli(), List.of() ); // random strings that don't look like a double - TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("String"), DataTypes.DOUBLE, bytesRef -> null, bytesRef -> { + TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("String"), DataType.DOUBLE, bytesRef -> null, bytesRef -> { var exception = expectThrows( InvalidArgumentException.class, () -> EsqlDataTypeConverter.stringToDouble(bytesRef.utf8ToString()) @@ -70,7 +70,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, evaluatorName.apply("UnsignedLong"), - DataTypes.DOUBLE, + DataType.DOUBLE, BigInteger::doubleValue, BigInteger.ZERO, UNSIGNED_LONG_MAX, @@ -79,7 +79,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, evaluatorName.apply("Long"), - DataTypes.DOUBLE, + DataType.DOUBLE, l -> (double) l, Long.MIN_VALUE, Long.MAX_VALUE, @@ -88,7 +88,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, evaluatorName.apply("Int"), - DataTypes.DOUBLE, + DataType.DOUBLE, i -> (double) i, Integer.MIN_VALUE, Integer.MAX_VALUE, @@ -105,11 +105,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.DOUBLE, + DataType.DOUBLE, bytesRef -> Double.valueOf(((BytesRef) bytesRef).utf8ToString()), List.of() ); @@ -117,24 +117,24 @@ public static Iterable parameters() { TestCaseSupplier.unary( suppliers, "Attribute[channel=0]", - List.of(new TestCaseSupplier.TypedDataSupplier("counter", ESTestCase::randomDouble, DataTypes.COUNTER_DOUBLE)), - DataTypes.DOUBLE, + List.of(new TestCaseSupplier.TypedDataSupplier("counter", ESTestCase::randomDouble, DataType.COUNTER_DOUBLE)), + DataType.DOUBLE, l -> l, List.of() ); TestCaseSupplier.unary( suppliers, evaluatorName.apply("Integer"), - List.of(new TestCaseSupplier.TypedDataSupplier("counter", () -> randomInt(1000), DataTypes.COUNTER_INTEGER)), - DataTypes.DOUBLE, + List.of(new TestCaseSupplier.TypedDataSupplier("counter", () -> randomInt(1000), DataType.COUNTER_INTEGER)), + DataType.DOUBLE, l -> ((Integer) l).doubleValue(), List.of() ); TestCaseSupplier.unary( suppliers, evaluatorName.apply("Long"), - List.of(new TestCaseSupplier.TypedDataSupplier("counter", () -> randomLongBetween(1, 1000), DataTypes.COUNTER_LONG)), - DataTypes.DOUBLE, + List.of(new TestCaseSupplier.TypedDataSupplier("counter", () -> randomLongBetween(1, 1000), DataType.COUNTER_LONG)), + DataType.DOUBLE, l -> ((Long) l).doubleValue(), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java index 4b7d78980a440..e1af4441b3c5f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -40,9 +39,9 @@ public static Iterable parameters() { final Function evaluatorName = s -> "ToGeoPoint" + s + "Evaluator[field=" + attribute + "]"; final List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryGeoPoint(suppliers, attribute, DataTypes.GEO_POINT, v -> v, List.of()); + TestCaseSupplier.forUnaryGeoPoint(suppliers, attribute, DataType.GEO_POINT, v -> v, List.of()); // random strings that don't look like a geo point - TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("FromString"), DataTypes.GEO_POINT, bytesRef -> null, bytesRef -> { + TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("FromString"), DataType.GEO_POINT, bytesRef -> null, bytesRef -> { var exception = expectThrows(Exception.class, () -> GEO.wktToWkb(bytesRef.utf8ToString())); return List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -50,7 +49,7 @@ public static Iterable parameters() { ); }); // strings that are geo point representations - for (DataType dt : List.of(DataTypes.KEYWORD, DataTypes.TEXT)) { + for (DataType dt : List.of(DataType.KEYWORD, DataType.TEXT)) { TestCaseSupplier.unary( suppliers, evaluatorName.apply("FromString"), @@ -61,7 +60,7 @@ public static Iterable parameters() { dt ) ), - DataTypes.GEO_POINT, + DataType.GEO_POINT, bytesRef -> GEO.wktToWkb(((BytesRef) bytesRef).utf8ToString()), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java index 0b1f01ee24183..291708e94888c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -40,10 +39,10 @@ public static Iterable parameters() { final Function evaluatorName = s -> "ToGeoShape" + s + "Evaluator[field=" + attribute + "]"; final List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryGeoPoint(suppliers, attribute, DataTypes.GEO_SHAPE, v -> v, List.of()); - TestCaseSupplier.forUnaryGeoShape(suppliers, attribute, DataTypes.GEO_SHAPE, v -> v, List.of()); + TestCaseSupplier.forUnaryGeoPoint(suppliers, attribute, DataType.GEO_SHAPE, v -> v, List.of()); + TestCaseSupplier.forUnaryGeoShape(suppliers, attribute, DataType.GEO_SHAPE, v -> v, List.of()); // random strings that don't look like a geo shape - TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("FromString"), DataTypes.GEO_SHAPE, bytesRef -> null, bytesRef -> { + TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("FromString"), DataType.GEO_SHAPE, bytesRef -> null, bytesRef -> { var exception = expectThrows(Exception.class, () -> GEO.wktToWkb(bytesRef.utf8ToString())); return List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -51,7 +50,7 @@ public static Iterable parameters() { ); }); // strings that are geo_shape representations - for (DataType dt : List.of(DataTypes.KEYWORD, DataTypes.TEXT)) { + for (DataType dt : List.of(DataType.KEYWORD, DataType.TEXT)) { TestCaseSupplier.unary( suppliers, evaluatorName.apply("FromString"), @@ -62,7 +61,7 @@ public static Iterable parameters() { dt ) ), - DataTypes.GEO_SHAPE, + DataType.GEO_SHAPE, bytesRef -> GEO.wktToWkb(((BytesRef) bytesRef).utf8ToString()), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java index 8f106db3e97a0..415d9ea0a4a70 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -38,13 +38,13 @@ public static Iterable parameters() { List suppliers = new ArrayList<>(); // convert from IP to IP - TestCaseSupplier.forUnaryIp(suppliers, read, DataTypes.IP, v -> v, List.of()); + TestCaseSupplier.forUnaryIp(suppliers, read, DataType.IP, v -> v, List.of()); // convert random string (i.e. not an IP representation) to IP `null`, with warnings. TestCaseSupplier.forUnaryStrings( suppliers, stringEvaluator, - DataTypes.IP, + DataType.IP, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -57,7 +57,7 @@ public static Iterable parameters() { suppliers, stringEvaluator, validIPsAsStrings(), - DataTypes.IP, + DataType.IP, bytesRef -> parseIP(((BytesRef) bytesRef).utf8ToString()), emptyList() ); @@ -73,16 +73,16 @@ protected Expression build(Source source, List args) { private static List validIPsAsStrings() { return List.of( - new TestCaseSupplier.TypedDataSupplier("<127.0.0.1 ip>", () -> new BytesRef("127.0.0.1"), DataTypes.KEYWORD), + new TestCaseSupplier.TypedDataSupplier("<127.0.0.1 ip>", () -> new BytesRef("127.0.0.1"), DataType.KEYWORD), new TestCaseSupplier.TypedDataSupplier( "", () -> new BytesRef(NetworkAddress.format(ESTestCase.randomIp(true))), - DataTypes.KEYWORD + DataType.KEYWORD ), new TestCaseSupplier.TypedDataSupplier( "", () -> new BytesRef(NetworkAddress.format(ESTestCase.randomIp(false))), - DataTypes.TEXT + DataType.TEXT ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java index ffac75f1ac338..83bdaf2f2d304 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -38,16 +38,16 @@ public static Iterable parameters() { Function evaluatorName = s -> "ToIntegerFrom" + s + "Evaluator[field=" + read + "]"; List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryInt(suppliers, read, DataTypes.INTEGER, i -> i, Integer.MIN_VALUE, Integer.MAX_VALUE, List.of()); + TestCaseSupplier.forUnaryInt(suppliers, read, DataType.INTEGER, i -> i, Integer.MIN_VALUE, Integer.MAX_VALUE, List.of()); - TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.INTEGER, b -> b ? 1 : 0, List.of()); + TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataType.INTEGER, b -> b ? 1 : 0, List.of()); // datetimes that fall within Integer's range TestCaseSupplier.unary( suppliers, evaluatorName.apply("Long"), dateCases(0, Integer.MAX_VALUE), - DataTypes.INTEGER, + DataType.INTEGER, l -> ((Long) l).intValue(), List.of() ); @@ -56,7 +56,7 @@ public static Iterable parameters() { suppliers, evaluatorName.apply("Long"), dateCases(Integer.MAX_VALUE + 1L, Long.MAX_VALUE), - DataTypes.INTEGER, + DataType.INTEGER, l -> null, l -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -67,7 +67,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryStrings( suppliers, evaluatorName.apply("String"), - DataTypes.INTEGER, + DataType.INTEGER, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -80,7 +80,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, evaluatorName.apply("Double"), - DataTypes.INTEGER, + DataType.INTEGER, d -> safeToInt(Math.round(d)), Integer.MIN_VALUE, Integer.MAX_VALUE, @@ -90,7 +90,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, evaluatorName.apply("Double"), - DataTypes.INTEGER, + DataType.INTEGER, d -> null, Double.NEGATIVE_INFINITY, Integer.MIN_VALUE - 1d, @@ -103,7 +103,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, evaluatorName.apply("Double"), - DataTypes.INTEGER, + DataType.INTEGER, d -> null, Integer.MAX_VALUE + 1d, Double.POSITIVE_INFINITY, @@ -117,7 +117,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, evaluatorName.apply("UnsignedLong"), - DataTypes.INTEGER, + DataType.INTEGER, BigInteger::intValue, BigInteger.ZERO, BigInteger.valueOf(Integer.MAX_VALUE), @@ -127,7 +127,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, evaluatorName.apply("UnsignedLong"), - DataTypes.INTEGER, + DataType.INTEGER, ul -> null, BigInteger.valueOf(Integer.MAX_VALUE).add(BigInteger.ONE), UNSIGNED_LONG_MAX, @@ -142,7 +142,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, evaluatorName.apply("Long"), - DataTypes.INTEGER, + DataType.INTEGER, l -> (int) l, Integer.MIN_VALUE, Integer.MAX_VALUE, @@ -152,7 +152,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, evaluatorName.apply("Long"), - DataTypes.INTEGER, + DataType.INTEGER, l -> null, Long.MIN_VALUE, Integer.MIN_VALUE - 1L, @@ -166,7 +166,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, evaluatorName.apply("Long"), - DataTypes.INTEGER, + DataType.INTEGER, l -> null, Integer.MAX_VALUE + 1L, Long.MAX_VALUE, @@ -186,11 +186,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.INTEGER, + DataType.INTEGER, bytesRef -> Integer.valueOf(((BytesRef) bytesRef).utf8ToString()), List.of() ); @@ -204,11 +204,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.INTEGER, + DataType.INTEGER, bytesRef -> safeToInt(Math.round(Double.parseDouble(((BytesRef) bytesRef).utf8ToString()))), List.of() ); @@ -222,11 +222,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.INTEGER, + DataType.INTEGER, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -245,11 +245,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.INTEGER, + DataType.INTEGER, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -262,8 +262,8 @@ public static Iterable parameters() { TestCaseSupplier.unary( suppliers, "Attribute[channel=0]", - List.of(new TestCaseSupplier.TypedDataSupplier("counter", ESTestCase::randomInt, DataTypes.COUNTER_INTEGER)), - DataTypes.INTEGER, + List.of(new TestCaseSupplier.TypedDataSupplier("counter", ESTestCase::randomInt, DataType.COUNTER_INTEGER)), + DataType.INTEGER, l -> l, List.of() ); @@ -279,13 +279,13 @@ protected Expression build(Source source, List args) { private static List dateCases(long min, long max) { List dataSuppliers = new ArrayList<>(2); if (min == 0L) { - dataSuppliers.add(new TestCaseSupplier.TypedDataSupplier("<1970-01-01T00:00:00Z>", () -> 0L, DataTypes.DATETIME)); + dataSuppliers.add(new TestCaseSupplier.TypedDataSupplier("<1970-01-01T00:00:00Z>", () -> 0L, DataType.DATETIME)); } if (max <= Integer.MAX_VALUE) { - dataSuppliers.add(new TestCaseSupplier.TypedDataSupplier("<1970-01-25T20:31:23.647Z>", () -> 2147483647L, DataTypes.DATETIME)); + dataSuppliers.add(new TestCaseSupplier.TypedDataSupplier("<1970-01-25T20:31:23.647Z>", () -> 2147483647L, DataType.DATETIME)); } dataSuppliers.add( - new TestCaseSupplier.TypedDataSupplier("", () -> ESTestCase.randomLongBetween(min, max), DataTypes.DATETIME) + new TestCaseSupplier.TypedDataSupplier("", () -> ESTestCase.randomLongBetween(min, max), DataType.DATETIME) ); return dataSuppliers; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java index adb2d68cf526a..92b0bb192e2aa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -37,17 +37,17 @@ public static Iterable parameters() { Function evaluatorName = s -> "ToLongFrom" + s + "Evaluator[field=" + read + "]"; List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryLong(suppliers, read, DataTypes.LONG, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, List.of()); + TestCaseSupplier.forUnaryLong(suppliers, read, DataType.LONG, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, List.of()); - TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.LONG, b -> b ? 1L : 0L, List.of()); + TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataType.LONG, b -> b ? 1L : 0L, List.of()); // datetimes - TestCaseSupplier.forUnaryDatetime(suppliers, read, DataTypes.LONG, Instant::toEpochMilli, List.of()); + TestCaseSupplier.forUnaryDatetime(suppliers, read, DataType.LONG, Instant::toEpochMilli, List.of()); // random strings that don't look like a long TestCaseSupplier.forUnaryStrings( suppliers, evaluatorName.apply("String"), - DataTypes.LONG, + DataType.LONG, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -60,7 +60,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, evaluatorName.apply("Double"), - DataTypes.LONG, + DataType.LONG, Math::round, Long.MIN_VALUE, Long.MAX_VALUE, @@ -70,7 +70,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, evaluatorName.apply("Double"), - DataTypes.LONG, + DataType.LONG, d -> null, Double.NEGATIVE_INFINITY, Long.MIN_VALUE - 1d, @@ -83,7 +83,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, evaluatorName.apply("Double"), - DataTypes.LONG, + DataType.LONG, d -> null, Long.MAX_VALUE + 1d, Double.POSITIVE_INFINITY, @@ -97,7 +97,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, evaluatorName.apply("UnsignedLong"), - DataTypes.LONG, + DataType.LONG, BigInteger::longValue, BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), @@ -106,7 +106,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, evaluatorName.apply("UnsignedLong"), - DataTypes.LONG, + DataType.LONG, ul -> null, BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE), UNSIGNED_LONG_MAX, @@ -121,7 +121,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, evaluatorName.apply("Int"), - DataTypes.LONG, + DataType.LONG, l -> (long) l, Integer.MIN_VALUE, Integer.MAX_VALUE, @@ -138,11 +138,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.LONG, + DataType.LONG, bytesRef -> Long.valueOf(((BytesRef) bytesRef).utf8ToString()), List.of() ); @@ -156,11 +156,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.LONG, + DataType.LONG, bytesRef -> Math.round(Double.parseDouble(((BytesRef) bytesRef).utf8ToString())), List.of() ); @@ -174,11 +174,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.LONG, + DataType.LONG, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -197,11 +197,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.LONG, + DataType.LONG, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -214,16 +214,16 @@ public static Iterable parameters() { TestCaseSupplier.unary( suppliers, "Attribute[channel=0]", - List.of(new TestCaseSupplier.TypedDataSupplier("counter", ESTestCase::randomNonNegativeLong, DataTypes.COUNTER_LONG)), - DataTypes.LONG, + List.of(new TestCaseSupplier.TypedDataSupplier("counter", ESTestCase::randomNonNegativeLong, DataType.COUNTER_LONG)), + DataType.LONG, l -> l, List.of() ); TestCaseSupplier.unary( suppliers, evaluatorName.apply("Integer"), - List.of(new TestCaseSupplier.TypedDataSupplier("counter", ESTestCase::randomInt, DataTypes.COUNTER_INTEGER)), - DataTypes.LONG, + List.of(new TestCaseSupplier.TypedDataSupplier("counter", ESTestCase::randomInt, DataType.COUNTER_INTEGER)), + DataType.LONG, l -> ((Integer) l).longValue(), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java index f2c3378a5408a..67951b46d03b5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -36,7 +36,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, evaluatorName.apply("ToDoubleFromIntEvaluator"), - DataTypes.DOUBLE, + DataType.DOUBLE, Math::toRadians, Integer.MIN_VALUE, Integer.MAX_VALUE, @@ -45,7 +45,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, evaluatorName.apply("ToDoubleFromLongEvaluator"), - DataTypes.DOUBLE, + DataType.DOUBLE, Math::toRadians, Long.MIN_VALUE, Long.MAX_VALUE, @@ -54,7 +54,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, evaluatorName.apply("ToDoubleFromUnsignedLongEvaluator"), - DataTypes.DOUBLE, + DataType.DOUBLE, ul -> Math.toRadians(ul.doubleValue()), BigInteger.ZERO, UNSIGNED_LONG_MAX, @@ -63,7 +63,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "ToRadiansEvaluator[field=Attribute[channel=0]]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::toRadians, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java index 6e41930fce2e0..511df557ff842 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -40,7 +40,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, "ToStringFromIntEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, i -> new BytesRef(Integer.toString(i)), Integer.MIN_VALUE, Integer.MAX_VALUE, @@ -49,7 +49,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, "ToStringFromLongEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, l -> new BytesRef(Long.toString(l)), Long.MIN_VALUE, Long.MAX_VALUE, @@ -58,7 +58,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, "ToStringFromUnsignedLongEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, ul -> new BytesRef(ul.toString()), BigInteger.ZERO, UNSIGNED_LONG_MAX, @@ -67,7 +67,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "ToStringFromDoubleEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, d -> new BytesRef(Double.toString(d)), Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, @@ -76,57 +76,57 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryBoolean( suppliers, "ToStringFromBooleanEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, b -> new BytesRef(b.toString()), List.of() ); TestCaseSupplier.forUnaryDatetime( suppliers, "ToStringFromDatetimeEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, i -> new BytesRef(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(i.toEpochMilli())), List.of() ); TestCaseSupplier.forUnaryGeoPoint( suppliers, "ToStringFromGeoPointEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, wkb -> new BytesRef(GEO.wkbToWkt(wkb)), List.of() ); TestCaseSupplier.forUnaryCartesianPoint( suppliers, "ToStringFromCartesianPointEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, wkb -> new BytesRef(CARTESIAN.wkbToWkt(wkb)), List.of() ); TestCaseSupplier.forUnaryGeoShape( suppliers, "ToStringFromGeoShapeEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, wkb -> new BytesRef(GEO.wkbToWkt(wkb)), List.of() ); TestCaseSupplier.forUnaryCartesianShape( suppliers, "ToStringFromCartesianShapeEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, wkb -> new BytesRef(CARTESIAN.wkbToWkt(wkb)), List.of() ); TestCaseSupplier.forUnaryIp( suppliers, "ToStringFromIPEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, ip -> new BytesRef(DocValueFormat.IP.format(ip)), List.of() ); - TestCaseSupplier.forUnaryStrings(suppliers, read, DataTypes.KEYWORD, bytesRef -> bytesRef, List.of()); + TestCaseSupplier.forUnaryStrings(suppliers, read, DataType.KEYWORD, bytesRef -> bytesRef, List.of()); TestCaseSupplier.forUnaryVersion( suppliers, "ToStringFromVersionEvaluator[field=" + read + "]", - DataTypes.KEYWORD, + DataType.KEYWORD, v -> new BytesRef(v.toString()), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java index e7e27e34e0a70..4182f99d316fc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -42,7 +42,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, read, - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, n -> n, BigInteger.ZERO, UNSIGNED_LONG_MAX, @@ -52,7 +52,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryBoolean( suppliers, evaluatorName.apply("Boolean"), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, b -> b ? BigInteger.ONE : BigInteger.ZERO, List.of() ); @@ -61,12 +61,12 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDatetime( suppliers, evaluatorName.apply("Long"), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, instant -> BigInteger.valueOf(instant.toEpochMilli()), List.of() ); // random strings that don't look like an unsigned_long - TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("String"), DataTypes.UNSIGNED_LONG, bytesRef -> null, bytesRef -> { + TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("String"), DataType.UNSIGNED_LONG, bytesRef -> null, bytesRef -> { // BigDecimal, used to parse unsigned_longs will throw NFEs with different messages depending on empty string, first // non-number character after a number-looking like prefix, or string starting with "e", maybe others -- safer to take // this shortcut here. @@ -80,7 +80,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, evaluatorName.apply("Double"), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, d -> BigDecimal.valueOf(d).toBigInteger(), // note: not: new BigDecimal(d).toBigInteger 0d, UNSIGNED_LONG_MAX_AS_DOUBLE, @@ -90,7 +90,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, evaluatorName.apply("Double"), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, d -> null, Double.NEGATIVE_INFINITY, -1d, @@ -103,7 +103,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, evaluatorName.apply("Double"), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, d -> null, UNSIGNED_LONG_MAX_AS_DOUBLE + 10e5, Double.POSITIVE_INFINITY, @@ -117,7 +117,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, evaluatorName.apply("Long"), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, BigInteger::valueOf, 0L, Long.MAX_VALUE, @@ -127,7 +127,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, evaluatorName.apply("Long"), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, unused -> null, Long.MIN_VALUE, -1L, @@ -141,7 +141,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, evaluatorName.apply("Int"), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, BigInteger::valueOf, 0, Integer.MAX_VALUE, @@ -151,7 +151,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, evaluatorName.apply("Int"), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, unused -> null, Integer.MIN_VALUE, -1, @@ -171,11 +171,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, bytesRef -> safeToUnsignedLong(((BytesRef) bytesRef).utf8ToString()), List.of() ); @@ -189,11 +189,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, bytesRef -> safeToUnsignedLong(((BytesRef) bytesRef).utf8ToString()), List.of() ); @@ -207,11 +207,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", @@ -230,11 +230,11 @@ public static Iterable parameters() { tds -> new TestCaseSupplier.TypedDataSupplier( tds.name() + "as string", () -> new BytesRef(tds.supplier().get().toString()), - DataTypes.KEYWORD + DataType.KEYWORD ) ) .toList(), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java index 16eac2a6f1712..a397de64aeea8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.versionfield.Version; @@ -36,7 +35,7 @@ public static Iterable parameters() { List suppliers = new ArrayList<>(); // Converting and IP to an IP doesn't change anything. Everything should succeed. - TestCaseSupplier.forUnaryVersion(suppliers, read, DataTypes.VERSION, Version::toBytesRef, List.of()); + TestCaseSupplier.forUnaryVersion(suppliers, read, DataType.VERSION, Version::toBytesRef, List.of()); // None of the random strings ever look like versions so they should all become "invalid" versions: // https://github.com/elastic/elasticsearch/issues/98989 @@ -44,7 +43,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryStrings( suppliers, stringEvaluator, - DataTypes.VERSION, + DataType.VERSION, bytesRef -> new Version(bytesRef.utf8ToString()).toBytesRef(), List.of() ); @@ -55,7 +54,7 @@ public static Iterable parameters() { suppliers, read, TestCaseSupplier.versionCases(inputType.typeName() + " "), - DataTypes.VERSION, + DataType.VERSION, bytesRef -> new Version((BytesRef) bytesRef).toBytesRef(), List.of() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java index f2f5846bf0901..89cfda5c4bce5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -39,66 +39,66 @@ public static Iterable parameters() { List.of( new TestCaseSupplier( "Date Diff In Seconds - OK", - List.of(DataTypes.KEYWORD, DataTypes.DATETIME, DataTypes.DATETIME), + List.of(DataType.KEYWORD, DataType.DATETIME, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.KEYWORD, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataType.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataType.DATETIME, "endTimestamp") ), "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(88170) ) ), new TestCaseSupplier( "Date Diff In Seconds with text- OK", - List.of(DataTypes.TEXT, DataTypes.DATETIME, DataTypes.DATETIME), + List.of(DataType.TEXT, DataType.DATETIME, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.TEXT, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataType.DATETIME, "endTimestamp") ), "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(88170) ) ), new TestCaseSupplier( "Date Diff Error Type unit", - List.of(DataTypes.INTEGER, DataTypes.DATETIME, DataTypes.DATETIME), + List.of(DataType.INTEGER, DataType.DATETIME, DataType.DATETIME), () -> TestCaseSupplier.TestCase.typeError( List.of( - new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.INTEGER, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataType.INTEGER, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataType.DATETIME, "endTimestamp") ), "first argument of [] must be [string], found value [unit] type [integer]" ) ), new TestCaseSupplier( "Date Diff Error Type startTimestamp", - List.of(DataTypes.TEXT, DataTypes.INTEGER, DataTypes.DATETIME), + List.of(DataType.TEXT, DataType.INTEGER, DataType.DATETIME), () -> TestCaseSupplier.TestCase.typeError( List.of( - new TestCaseSupplier.TypedData(new BytesRef("minutes"), DataTypes.TEXT, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.INTEGER, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + new TestCaseSupplier.TypedData(new BytesRef("minutes"), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataType.INTEGER, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataType.DATETIME, "endTimestamp") ), "second argument of [] must be [datetime], found value [startTimestamp] type [integer]" ) ), new TestCaseSupplier( "Date Diff Error Type endTimestamp", - List.of(DataTypes.TEXT, DataTypes.DATETIME, DataTypes.INTEGER), + List.of(DataType.TEXT, DataType.DATETIME, DataType.INTEGER), () -> TestCaseSupplier.TestCase.typeError( List.of( - new TestCaseSupplier.TypedData(new BytesRef("minutes"), DataTypes.TEXT, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.INTEGER, "endTimestamp") + new TestCaseSupplier.TypedData(new BytesRef("minutes"), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataType.INTEGER, "endTimestamp") ), "third argument of [] must be [datetime], found value [endTimestamp] type [integer]" ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index e2dc759b96832..221f3fd51a545 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractConfigurationFunctionTestCase; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; @@ -45,39 +45,39 @@ public static Iterable parameters() { true, List.of( new TestCaseSupplier( - List.of(DataTypes.KEYWORD, DataTypes.DATETIME), + List.of(DataType.KEYWORD, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataTypes.KEYWORD, "chrono"), - new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "date") + new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataType.KEYWORD, "chrono"), + new TestCaseSupplier.TypedData(1687944333000L, DataType.DATETIME, "date") ), "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", - DataTypes.LONG, + DataType.LONG, equalTo(2023L) ) ), new TestCaseSupplier( - List.of(DataTypes.TEXT, DataTypes.DATETIME), + List.of(DataType.TEXT, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataTypes.TEXT, "chrono"), - new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "date") + new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataType.TEXT, "chrono"), + new TestCaseSupplier.TypedData(1687944333000L, DataType.DATETIME, "date") ), "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", - DataTypes.LONG, + DataType.LONG, equalTo(2023L) ) ), new TestCaseSupplier( - List.of(DataTypes.KEYWORD, DataTypes.DATETIME), + List.of(DataType.KEYWORD, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("not a unit"), DataTypes.KEYWORD, "chrono"), - new TestCaseSupplier.TypedData(0L, DataTypes.DATETIME, "date") + new TestCaseSupplier.TypedData(new BytesRef("not a unit"), DataType.KEYWORD, "chrono"), + new TestCaseSupplier.TypedData(0L, DataType.DATETIME, "date") ), "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", - DataTypes.LONG, + DataType.LONG, is(nullValue()) ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning( @@ -98,8 +98,8 @@ public void testAllChronoFields() { for (ChronoField value : ChronoField.values()) { DateExtract instance = new DateExtract( Source.EMPTY, - new Literal(Source.EMPTY, new BytesRef(value.name()), DataTypes.KEYWORD), - new Literal(Source.EMPTY, epochMilli, DataTypes.DATETIME), + new Literal(Source.EMPTY, new BytesRef(value.name()), DataType.KEYWORD), + new Literal(Source.EMPTY, epochMilli, DataType.DATETIME), EsqlTestUtils.TEST_CFG ); @@ -119,8 +119,8 @@ public void testInvalidChrono() { () -> evaluator( new DateExtract( Source.EMPTY, - new Literal(Source.EMPTY, new BytesRef(chrono), DataTypes.KEYWORD), - field("str", DataTypes.DATETIME), + new Literal(Source.EMPTY, new BytesRef(chrono), DataType.KEYWORD), + field("str", DataType.DATETIME), null ) ).get(driverContext) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java index 26f0e88ff2fce..6e1b5caa710e1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractConfigurationFunctionTestCase; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; @@ -37,26 +37,26 @@ public static Iterable parameters() { true, List.of( new TestCaseSupplier( - List.of(DataTypes.KEYWORD, DataTypes.DATETIME), + List.of(DataType.KEYWORD, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataTypes.KEYWORD, "formatter"), - new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "val") + new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataType.KEYWORD, "formatter"), + new TestCaseSupplier.TypedData(1687944333000L, DataType.DATETIME, "val") ), "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(BytesRefs.toBytesRef("2023")) ) ), new TestCaseSupplier( - List.of(DataTypes.TEXT, DataTypes.DATETIME), + List.of(DataType.TEXT, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataTypes.TEXT, "formatter"), - new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "val") + new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataType.TEXT, "formatter"), + new TestCaseSupplier.TypedData(1687944333000L, DataType.DATETIME, "val") ), "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(BytesRefs.toBytesRef("2023")) ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index 19c5ca0de72f5..8906994c6d7eb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -42,66 +42,66 @@ public static Iterable parameters() { List.of( new TestCaseSupplier( "Basic Case", - List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), + List.of(DataType.KEYWORD, DataType.KEYWORD), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.KEYWORD, "first"), - new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.KEYWORD, "second") + new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataType.KEYWORD, "first"), + new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataType.KEYWORD, "second") ), "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(1683244800000L) ) ), new TestCaseSupplier( "With Text", - List.of(DataTypes.KEYWORD, DataTypes.TEXT), + List.of(DataType.KEYWORD, DataType.TEXT), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.KEYWORD, "first"), - new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.TEXT, "second") + new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataType.KEYWORD, "first"), + new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataType.TEXT, "second") ), "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(1683244800000L) ) ), new TestCaseSupplier( "With Both Text", - List.of(DataTypes.TEXT, DataTypes.TEXT), + List.of(DataType.TEXT, DataType.TEXT), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.TEXT, "first"), - new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.TEXT, "second") + new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataType.TEXT, "first"), + new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataType.TEXT, "second") ), "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(1683244800000L) ) ), new TestCaseSupplier( "With keyword", - List.of(DataTypes.TEXT, DataTypes.KEYWORD), + List.of(DataType.TEXT, DataType.KEYWORD), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.TEXT, "first"), - new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.KEYWORD, "second") + new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataType.TEXT, "first"), + new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataType.KEYWORD, "second") ), "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(1683244800000L) ) ), new TestCaseSupplier( - List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), + List.of(DataType.KEYWORD, DataType.KEYWORD), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("not a format"), DataTypes.KEYWORD, "first"), - new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.KEYWORD, "second") + new TestCaseSupplier.TypedData(new BytesRef("not a format"), DataType.KEYWORD, "first"), + new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataType.KEYWORD, "second") ), "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", - DataTypes.DATETIME, + DataType.DATETIME, is(nullValue()) ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning( @@ -114,15 +114,15 @@ public static Iterable parameters() { ) ), new TestCaseSupplier( - List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), + List.of(DataType.KEYWORD, DataType.KEYWORD), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.KEYWORD, "first"), - new TestCaseSupplier.TypedData(new BytesRef("not a date"), DataTypes.KEYWORD, "second") + new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataType.KEYWORD, "first"), + new TestCaseSupplier.TypedData(new BytesRef("not a date"), DataType.KEYWORD, "second") ), "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", - DataTypes.DATETIME, + DataType.DATETIME, is(nullValue()) ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning( @@ -144,8 +144,8 @@ public void testInvalidPattern() { () -> evaluator( new DateParse( Source.EMPTY, - new Literal(Source.EMPTY, new BytesRef(pattern), DataTypes.KEYWORD), - field("str", DataTypes.KEYWORD) + new Literal(Source.EMPTY, new BytesRef(pattern), DataType.KEYWORD), + field("str", DataType.KEYWORD) ) ).get(driverContext) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java index f33f8d4ba1041..b627d7cd88908 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.Rounding; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -139,14 +139,14 @@ public void testDateTruncFunction() { private static TestCaseSupplier ofDatePeriod(Period period, long value, String expectedDate) { return new TestCaseSupplier( - List.of(DataTypes.DATE_PERIOD, DataTypes.DATETIME), + List.of(DataType.DATE_PERIOD, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(period, DataTypes.DATE_PERIOD, "interval"), - new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "date") + new TestCaseSupplier.TypedData(period, DataType.DATE_PERIOD, "interval"), + new TestCaseSupplier.TypedData(value, DataType.DATETIME, "date") ), "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(toMillis(expectedDate)) ) ); @@ -154,21 +154,21 @@ private static TestCaseSupplier ofDatePeriod(Period period, long value, String e private static TestCaseSupplier ofDuration(Duration duration, long value, String expectedDate) { return new TestCaseSupplier( - List.of(DataTypes.TIME_DURATION, DataTypes.DATETIME), + List.of(DataType.TIME_DURATION, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(duration, DataTypes.TIME_DURATION, "interval"), - new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "date") + new TestCaseSupplier.TypedData(duration, DataType.TIME_DURATION, "interval"), + new TestCaseSupplier.TypedData(value, DataType.DATETIME, "date") ), "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(toMillis(expectedDate)) ) ); } private static TestCaseSupplier randomSecond() { - return new TestCaseSupplier("random second", List.of(DataTypes.TIME_DURATION, DataTypes.DATETIME), () -> { + return new TestCaseSupplier("random second", List.of(DataType.TIME_DURATION, DataType.DATETIME), () -> { String dateFragment = randomIntBetween(2000, 2050) + "-" + pad(randomIntBetween(1, 12)) @@ -182,11 +182,11 @@ private static TestCaseSupplier randomSecond() { + pad(randomIntBetween(0, 59)); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(Duration.ofSeconds(1), DataTypes.TIME_DURATION, "interval"), - new TestCaseSupplier.TypedData(toMillis(dateFragment + ".38Z"), DataTypes.DATETIME, "date") + new TestCaseSupplier.TypedData(Duration.ofSeconds(1), DataType.TIME_DURATION, "interval"), + new TestCaseSupplier.TypedData(toMillis(dateFragment + ".38Z"), DataType.DATETIME, "date") ), "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(toMillis(dateFragment + ".00Z")) ); }); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java index c86c5938dc4ba..2aaca179b2bc4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -35,50 +35,50 @@ public static Iterable parameters() { var suppliers = List.of( new TestCaseSupplier( - List.of(DataTypes.IP, DataTypes.KEYWORD), + List.of(DataType.IP, DataType.KEYWORD), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataTypes.KEYWORD, "cidrs") + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataType.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataType.KEYWORD, "cidrs") ), "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(true) ) ), new TestCaseSupplier( - List.of(DataTypes.IP, DataTypes.TEXT), + List.of(DataType.IP, DataType.TEXT), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataTypes.TEXT, "cidrs") + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataType.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataType.TEXT, "cidrs") ), "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(true) ) ), new TestCaseSupplier( - List.of(DataTypes.IP, DataTypes.KEYWORD), + List.of(DataType.IP, DataType.KEYWORD), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataTypes.KEYWORD, "cidrs") + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataType.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataType.KEYWORD, "cidrs") ), "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(false) ) ), new TestCaseSupplier( - List.of(DataTypes.IP, DataTypes.TEXT), + List.of(DataType.IP, DataType.TEXT), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataTypes.TEXT, "cidrs") + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataType.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataType.TEXT, "cidrs") ), "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(false) ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixTests.java index e46eaea849bb5..063a057134d7e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; @@ -35,19 +35,19 @@ public static Iterable parameters() { var suppliers = List.of( // V4 new TestCaseSupplier( - List.of(DataTypes.IP, DataTypes.INTEGER, DataTypes.INTEGER), + List.of(DataType.IP, DataType.INTEGER, DataType.INTEGER), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("1.2.3.4"), DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(24, DataTypes.INTEGER, "prefixLengthV4"), - new TestCaseSupplier.TypedData(ESTestCase.randomIntBetween(0, 128), DataTypes.INTEGER, "prefixLengthV6") + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("1.2.3.4"), DataType.IP, "ip"), + new TestCaseSupplier.TypedData(24, DataType.INTEGER, "prefixLengthV4"), + new TestCaseSupplier.TypedData(ESTestCase.randomIntBetween(0, 128), DataType.INTEGER, "prefixLengthV6") ), "IpPrefixEvaluator[ip=Attribute[channel=0], prefixLengthV4=Attribute[channel=1], prefixLengthV6=Attribute[channel=2]]", - DataTypes.IP, + DataType.IP, equalTo(EsqlDataTypeConverter.stringToIP("1.2.3.0")) ) ), - new TestCaseSupplier(List.of(DataTypes.IP, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + new TestCaseSupplier(List.of(DataType.IP, DataType.INTEGER, DataType.INTEGER), () -> { var randomIp = randomIp(true); var randomPrefix = randomIntBetween(0, 32); var cidrString = InetAddresses.toCidrString(randomIp, randomPrefix); @@ -59,31 +59,31 @@ public static Iterable parameters() { return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(ipParameter, DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(randomPrefix, DataTypes.INTEGER, "prefixLengthV4"), - new TestCaseSupplier.TypedData(ESTestCase.randomIntBetween(0, 128), DataTypes.INTEGER, "prefixLengthV6") + new TestCaseSupplier.TypedData(ipParameter, DataType.IP, "ip"), + new TestCaseSupplier.TypedData(randomPrefix, DataType.INTEGER, "prefixLengthV4"), + new TestCaseSupplier.TypedData(ESTestCase.randomIntBetween(0, 128), DataType.INTEGER, "prefixLengthV6") ), "IpPrefixEvaluator[ip=Attribute[channel=0], prefixLengthV4=Attribute[channel=1], prefixLengthV6=Attribute[channel=2]]", - DataTypes.IP, + DataType.IP, equalTo(expectedPrefix) ); }), // V6 new TestCaseSupplier( - List.of(DataTypes.IP, DataTypes.INTEGER, DataTypes.INTEGER), + List.of(DataType.IP, DataType.INTEGER, DataType.INTEGER), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("::ff"), DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(ESTestCase.randomIntBetween(0, 32), DataTypes.INTEGER, "prefixLengthV4"), - new TestCaseSupplier.TypedData(127, DataTypes.INTEGER, "prefixLengthV6") + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("::ff"), DataType.IP, "ip"), + new TestCaseSupplier.TypedData(ESTestCase.randomIntBetween(0, 32), DataType.INTEGER, "prefixLengthV4"), + new TestCaseSupplier.TypedData(127, DataType.INTEGER, "prefixLengthV6") ), "IpPrefixEvaluator[ip=Attribute[channel=0], prefixLengthV4=Attribute[channel=1], prefixLengthV6=Attribute[channel=2]]", - DataTypes.IP, + DataType.IP, equalTo(EsqlDataTypeConverter.stringToIP("::fe")) ) ), - new TestCaseSupplier(List.of(DataTypes.IP, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + new TestCaseSupplier(List.of(DataType.IP, DataType.INTEGER, DataType.INTEGER), () -> { var randomIp = randomIp(false); var randomPrefix = randomIntBetween(0, 128); var cidrString = InetAddresses.toCidrString(randomIp, randomPrefix); @@ -95,12 +95,12 @@ public static Iterable parameters() { return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(ipParameter, DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(ESTestCase.randomIntBetween(0, 32), DataTypes.INTEGER, "prefixLengthV4"), - new TestCaseSupplier.TypedData(randomPrefix, DataTypes.INTEGER, "prefixLengthV6") + new TestCaseSupplier.TypedData(ipParameter, DataType.IP, "ip"), + new TestCaseSupplier.TypedData(ESTestCase.randomIntBetween(0, 32), DataType.INTEGER, "prefixLengthV4"), + new TestCaseSupplier.TypedData(randomPrefix, DataType.INTEGER, "prefixLengthV6") ), "IpPrefixEvaluator[ip=Attribute[channel=0], prefixLengthV4=Attribute[channel=1], prefixLengthV6=Attribute[channel=2]]", - DataTypes.IP, + DataType.IP, equalTo(expectedPrefix) ); }) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java index 3c1b85d51515a..63642a01fa117 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java @@ -13,9 +13,8 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import java.math.BigInteger; import java.util.ArrayList; @@ -24,43 +23,43 @@ import static org.hamcrest.Matchers.equalTo; -public class AbsTests extends AbstractScalarFunctionTestCase { +public class AbsTests extends AbstractFunctionTestCase { @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.INTEGER), () -> { int arg = randomInt(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(arg, DataTypes.INTEGER, "arg")), + List.of(new TestCaseSupplier.TypedData(arg, DataType.INTEGER, "arg")), "AbsIntEvaluator[fieldVal=Attribute[channel=0]]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(Math.abs(arg)) ); })); TestCaseSupplier.forUnaryUnsignedLong( suppliers, "Attribute[channel=0]", - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, (n) -> n, BigInteger.ZERO, UNSIGNED_LONG_MAX, List.of() ); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.LONG), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.LONG), () -> { long arg = randomLong(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(arg, DataTypes.LONG, "arg")), + List.of(new TestCaseSupplier.TypedData(arg, DataType.LONG, "arg")), "AbsLongEvaluator[fieldVal=Attribute[channel=0]]", - DataTypes.LONG, + DataType.LONG, equalTo(Math.abs(arg)) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.DOUBLE), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.DOUBLE), () -> { double arg = randomDouble(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(arg, DataTypes.DOUBLE, "arg")), + List.of(new TestCaseSupplier.TypedData(arg, DataType.DOUBLE, "arg")), "AbsDoubleEvaluator[fieldVal=Attribute[channel=0]]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(Math.abs(arg)) ); })); @@ -75,14 +74,4 @@ public AbsTests(@Name("TestCase") Supplier testCaseSu protected Expression build(Source source, List args) { return new Abs(source, args.get(0)); } - - @Override - protected List argSpec() { - return List.of(required(numerics())); - } - - @Override - protected DataType expectedType(List argTypes) { - return argTypes.get(0); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java index b9b672a2111c0..c4e614be94438 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; @@ -44,7 +43,7 @@ public static Iterable parameters() { suppliers, "fixed date with period", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-01-01T00:00:00.00Z"), - DataTypes.DATE_PERIOD, + DataType.DATE_PERIOD, Period.ofYears(1), "[YEAR_OF_CENTURY in Z][fixed to midnight]" ); @@ -52,22 +51,22 @@ public static Iterable parameters() { suppliers, "fixed date with duration", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z"), - DataTypes.TIME_DURATION, + DataType.TIME_DURATION, Duration.ofDays(1L), "[86400000 in Z][fixed]" ); - numberCases(suppliers, "fixed long", DataTypes.LONG, () -> 100L); - numberCasesWithSpan(suppliers, "fixed long with span", DataTypes.LONG, () -> 100L); - numberCases(suppliers, "fixed int", DataTypes.INTEGER, () -> 100); - numberCasesWithSpan(suppliers, "fixed int with span", DataTypes.INTEGER, () -> 100); - numberCases(suppliers, "fixed double", DataTypes.DOUBLE, () -> 100.0); - numberCasesWithSpan(suppliers, "fixed double with span", DataTypes.DOUBLE, () -> 100.); + numberCases(suppliers, "fixed long", DataType.LONG, () -> 100L); + numberCasesWithSpan(suppliers, "fixed long with span", DataType.LONG, () -> 100L); + numberCases(suppliers, "fixed int", DataType.INTEGER, () -> 100); + numberCasesWithSpan(suppliers, "fixed int with span", DataType.INTEGER, () -> 100); + numberCases(suppliers, "fixed double", DataType.DOUBLE, () -> 100.0); + numberCasesWithSpan(suppliers, "fixed double with span", DataType.DOUBLE, () -> 100.); // TODO make errorsForCasesWithoutExamples do something sensible for 4+ parameters return parameterSuppliersFromTypedData( anyNullIsNull( suppliers, - (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL - ? DataTypes.NULL + (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataType.NULL + ? DataType.NULL : original.expectedType(), (nullPosition, nullData, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") ) @@ -75,22 +74,22 @@ public static Iterable parameters() { } // TODO once we cast above the functions we can drop these - private static final DataType[] DATE_BOUNDS_TYPE = new DataType[] { DataTypes.DATETIME }; + private static final DataType[] DATE_BOUNDS_TYPE = new DataType[] { DataType.DATETIME }; private static void dateCases(List suppliers, String name, LongSupplier date) { for (DataType fromType : DATE_BOUNDS_TYPE) { for (DataType toType : DATE_BOUNDS_TYPE) { - suppliers.add(new TestCaseSupplier(name, List.of(DataTypes.DATETIME, DataTypes.INTEGER, fromType, toType), () -> { + suppliers.add(new TestCaseSupplier(name, List.of(DataType.DATETIME, DataType.INTEGER, fromType, toType), () -> { List args = new ArrayList<>(); - args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataTypes.DATETIME, "field")); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataType.DATETIME, "field")); // TODO more "from" and "to" and "buckets" - args.add(new TestCaseSupplier.TypedData(50, DataTypes.INTEGER, "buckets").forceLiteral()); + args.add(new TestCaseSupplier.TypedData(50, DataType.INTEGER, "buckets").forceLiteral()); args.add(dateBound("from", fromType, "2023-02-01T00:00:00.00Z")); args.add(dateBound("to", toType, "2023-03-01T09:00:00.00Z")); return new TestCaseSupplier.TestCase( args, "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", - DataTypes.DATETIME, + DataType.DATETIME, dateResultsMatcher(args) ); })); @@ -100,7 +99,7 @@ private static void dateCases(List suppliers, String name, Lon private static TestCaseSupplier.TypedData dateBound(String name, DataType type, String date) { Object value; - if (type == DataTypes.DATETIME) { + if (type == DataType.DATETIME) { value = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date); } else { value = new BytesRef(date); @@ -116,36 +115,36 @@ private static void dateCasesWithSpan( Object span, String spanStr ) { - suppliers.add(new TestCaseSupplier(name, List.of(DataTypes.DATETIME, spanType), () -> { + suppliers.add(new TestCaseSupplier(name, List.of(DataType.DATETIME, spanType), () -> { List args = new ArrayList<>(); - args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataTypes.DATETIME, "field")); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataType.DATETIME, "field")); args.add(new TestCaseSupplier.TypedData(span, spanType, "buckets").forceLiteral()); return new TestCaseSupplier.TestCase( args, "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding" + spanStr + "]", - DataTypes.DATETIME, + DataType.DATETIME, dateResultsMatcher(args) ); })); } - private static final DataType[] NUMBER_BOUNDS_TYPES = new DataType[] { DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE }; + private static final DataType[] NUMBER_BOUNDS_TYPES = new DataType[] { DataType.INTEGER, DataType.LONG, DataType.DOUBLE }; private static void numberCases(List suppliers, String name, DataType numberType, Supplier number) { for (DataType fromType : NUMBER_BOUNDS_TYPES) { for (DataType toType : NUMBER_BOUNDS_TYPES) { - suppliers.add(new TestCaseSupplier(name, List.of(numberType, DataTypes.INTEGER, fromType, toType), () -> { + suppliers.add(new TestCaseSupplier(name, List.of(numberType, DataType.INTEGER, fromType, toType), () -> { List args = new ArrayList<>(); args.add(new TestCaseSupplier.TypedData(number.get(), "field")); // TODO more "from" and "to" and "buckets" - args.add(new TestCaseSupplier.TypedData(50, DataTypes.INTEGER, "buckets").forceLiteral()); + args.add(new TestCaseSupplier.TypedData(50, DataType.INTEGER, "buckets").forceLiteral()); args.add(numericBound("from", fromType, 0.0)); args.add(numericBound("to", toType, 1000.0)); // TODO more number types for "from" and "to" String attr = "Attribute[channel=0]"; - if (numberType == DataTypes.INTEGER) { + if (numberType == DataType.INTEGER) { attr = "CastIntToDoubleEvaluator[v=" + attr + "]"; - } else if (numberType == DataTypes.LONG) { + } else if (numberType == DataType.LONG) { attr = "CastLongToDoubleEvaluator[v=" + attr + "]"; } return new TestCaseSupplier.TestCase( @@ -154,7 +153,7 @@ private static void numberCases(List suppliers, String name, D + attr + ", " + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, + DataType.DOUBLE, dateResultsMatcher(args) ); })); @@ -164,9 +163,9 @@ private static void numberCases(List suppliers, String name, D private static TestCaseSupplier.TypedData numericBound(String name, DataType type, double value) { Number v; - if (type == DataTypes.INTEGER) { + if (type == DataType.INTEGER) { v = (int) value; - } else if (type == DataTypes.LONG) { + } else if (type == DataType.LONG) { v = (long) value; } else { v = value; @@ -175,14 +174,14 @@ private static TestCaseSupplier.TypedData numericBound(String name, DataType typ } private static void numberCasesWithSpan(List suppliers, String name, DataType numberType, Supplier number) { - suppliers.add(new TestCaseSupplier(name, List.of(numberType, DataTypes.DOUBLE), () -> { + suppliers.add(new TestCaseSupplier(name, List.of(numberType, DataType.DOUBLE), () -> { List args = new ArrayList<>(); args.add(new TestCaseSupplier.TypedData(number.get(), "field")); - args.add(new TestCaseSupplier.TypedData(50., DataTypes.DOUBLE, "span").forceLiteral()); + args.add(new TestCaseSupplier.TypedData(50., DataType.DOUBLE, "span").forceLiteral()); String attr = "Attribute[channel=0]"; - if (numberType == DataTypes.INTEGER) { + if (numberType == DataType.INTEGER) { attr = "CastIntToDoubleEvaluator[v=" + attr + "]"; - } else if (numberType == DataTypes.LONG) { + } else if (numberType == DataType.LONG) { attr = "CastLongToDoubleEvaluator[v=" + attr + "]"; } return new TestCaseSupplier.TestCase( @@ -191,7 +190,7 @@ private static void numberCasesWithSpan(List suppliers, String + attr + ", " + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, + DataType.DOUBLE, dateResultsMatcher(args) ); })); @@ -203,7 +202,7 @@ private static TestCaseSupplier.TypedData keywordDateLiteral(String name, DataTy } private static Matcher dateResultsMatcher(List typedData) { - if (typedData.get(0).type() == DataTypes.DATETIME) { + if (typedData.get(0).type() == DataType.DATETIME) { long millis = ((Number) typedData.get(0).data()).longValue(); return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java index 8c9ff78bcdba8..14d6075f5cbe3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -37,7 +37,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, "CbrtIntEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::cbrt, Integer.MIN_VALUE, Integer.MAX_VALUE, @@ -46,7 +46,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, "CbrtLongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::cbrt, Long.MIN_VALUE, Long.MAX_VALUE, @@ -55,7 +55,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, "CbrtUnsignedLongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, ul -> Math.cbrt(unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), BigInteger.ZERO, UNSIGNED_LONG_MAX, @@ -64,7 +64,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "CbrtDoubleEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::cbrt, Double.MIN_VALUE, Double.MAX_VALUE, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java index 70b8eafb88d24..735113c34ca1b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java @@ -13,9 +13,8 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import java.math.BigInteger; import java.util.ArrayList; @@ -24,7 +23,7 @@ import static org.hamcrest.Matchers.equalTo; -public class CeilTests extends AbstractScalarFunctionTestCase { +public class CeilTests extends AbstractFunctionTestCase { public CeilTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -32,28 +31,28 @@ public CeilTests(@Name("TestCase") Supplier testCaseS @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.addAll(List.of(new TestCaseSupplier("large double value", () -> { + suppliers.addAll(List.of(new TestCaseSupplier("large double value", List.of(DataType.DOUBLE), () -> { double arg = 1 / randomDouble(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(arg, DataTypes.DOUBLE, "arg")), + List.of(new TestCaseSupplier.TypedData(arg, DataType.DOUBLE, "arg")), "CeilDoubleEvaluator[val=Attribute[channel=0]]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(Math.ceil(arg)) ); - }), new TestCaseSupplier("integer value", () -> { + }), new TestCaseSupplier("integer value", List.of(DataType.INTEGER), () -> { int arg = randomInt(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(arg, DataTypes.INTEGER, "arg")), + List.of(new TestCaseSupplier.TypedData(arg, DataType.INTEGER, "arg")), "Attribute[channel=0]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(arg) ); - }), new TestCaseSupplier("long value", () -> { + }), new TestCaseSupplier("long value", List.of(DataType.LONG), () -> { long arg = randomLong(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(arg, DataTypes.LONG, "arg")), + List.of(new TestCaseSupplier.TypedData(arg, DataType.LONG, "arg")), "Attribute[channel=0]", - DataTypes.LONG, + DataType.LONG, equalTo(arg) ); }))); @@ -61,23 +60,13 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, "Attribute[channel=0]", - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, (n) -> n, BigInteger.ZERO, UNSIGNED_LONG_MAX, List.of() ); - return parameterSuppliersFromTypedData(suppliers); - } - - @Override - protected DataType expectedType(List argTypes) { - return argTypes.get(0); - } - - @Override - protected List argSpec() { - return List.of(required(numerics())); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, suppliers))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java index 01d848ea9609b..8eb0b80fc21d7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java @@ -14,7 +14,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -33,9 +33,9 @@ public ETests(@Name("TestCase") Supplier testCaseSupp public static Iterable parameters() { return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("E Test", () -> { return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(1, DataTypes.INTEGER, "foo")), + List.of(new TestCaseSupplier.TypedData(1, DataType.INTEGER, "foo")), "LiteralsEvaluator[lit=2.718281828459045]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(Math.E) ); }))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java index cb4fd8a403ed1..62c23369cc436 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -30,12 +30,12 @@ public FloorTests(@Name("TestCase") Supplier testCase public static Iterable parameters() { String read = "Attribute[channel=0]"; List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryInt(suppliers, read, DataTypes.INTEGER, i -> i, Integer.MIN_VALUE, Integer.MAX_VALUE, List.of()); - TestCaseSupplier.forUnaryLong(suppliers, read, DataTypes.LONG, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, List.of()); + TestCaseSupplier.forUnaryInt(suppliers, read, DataType.INTEGER, i -> i, Integer.MIN_VALUE, Integer.MAX_VALUE, List.of()); + TestCaseSupplier.forUnaryLong(suppliers, read, DataType.LONG, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, List.of()); TestCaseSupplier.forUnaryUnsignedLong( suppliers, read, - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, ul -> ul, BigInteger.ZERO, UNSIGNED_LONG_MAX, @@ -44,7 +44,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "FloorDoubleEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::floor, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java index 95bc853c890c2..64329d7824b74 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -37,7 +37,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, "Log10IntEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::log10, 1, Integer.MAX_VALUE, @@ -46,7 +46,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, "Log10LongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::log10, 1L, Long.MAX_VALUE, @@ -55,7 +55,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, "Log10UnsignedLongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, ul -> Math.log10(ul == null ? null : unsignedLongToDouble(bigIntegerToUnsignedLong(ul))), BigInteger.ONE, UNSIGNED_LONG_MAX, @@ -64,7 +64,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "Log10DoubleEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::log10, Double.MIN_VALUE, Double.POSITIVE_INFINITY, @@ -78,7 +78,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, "Log10IntEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, k -> null, Integer.MIN_VALUE, 0, @@ -90,7 +90,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, "Log10LongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, k -> null, Long.MIN_VALUE, 0L, @@ -102,7 +102,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, "Log10UnsignedLongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, k -> null, BigInteger.ZERO, BigInteger.ZERO, @@ -114,7 +114,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "Log10DoubleEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, k -> null, Double.NEGATIVE_INFINITY, 0d, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java index e884b63c52591..ce53fdbfc1851 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java @@ -13,14 +13,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import java.util.List; import java.util.function.Supplier; -public class LogTests extends AbstractScalarFunctionTestCase { +public class LogTests extends AbstractFunctionTestCase { public LogTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -55,7 +54,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "LogConstantEvaluator[value=Attribute[channel=0]]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::log, Math.nextUp(0d), Math.nextDown(1d), @@ -70,12 +69,12 @@ public static Iterable parameters() { "value", (b, l) -> Math.log10(l) / Math.log10(b), List.of( - new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataTypes.DOUBLE), - new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataTypes.DOUBLE) + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataType.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataType.DOUBLE) ), List.of( - new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataTypes.DOUBLE), - new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataTypes.DOUBLE) + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataType.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataType.DOUBLE) ), List.of() ) @@ -143,12 +142,12 @@ public static Iterable parameters() { "value", (b, l) -> Math.log10(l) / Math.log10(b), List.of( - new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataTypes.DOUBLE), - new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataTypes.DOUBLE) + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataType.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataType.DOUBLE) ), List.of( - new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(1d), DataTypes.DOUBLE), - new TestCaseSupplier.TypedDataSupplier("", () -> Double.MAX_VALUE, DataTypes.DOUBLE) + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(1d), DataType.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Double.MAX_VALUE, DataType.DOUBLE) ), List.of() ) @@ -162,12 +161,12 @@ public static Iterable parameters() { "value", (b, l) -> Math.log10(l) / Math.log10(b), List.of( - new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(1d), DataTypes.DOUBLE), - new TestCaseSupplier.TypedDataSupplier("", () -> Double.MAX_VALUE, DataTypes.DOUBLE) + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(1d), DataType.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Double.MAX_VALUE, DataType.DOUBLE) ), List.of( - new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataTypes.DOUBLE), - new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataTypes.DOUBLE) + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataType.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataType.DOUBLE) ), List.of() ) @@ -195,16 +194,6 @@ public static Iterable parameters() { return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); } - @Override - protected DataType expectedType(List argTypes) { - return DataTypes.DOUBLE; - } - - @Override - protected List argSpec() { - return List.of(optional(numerics()), required(numerics())); - } - @Override protected Expression build(Source source, List args) { return new Log(source, args.get(0), args.size() > 1 ? args.get(1) : null); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java index ed5a80cb81b43..2c1322abf8cda 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractConfigurationFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; @@ -42,7 +42,7 @@ public static Iterable parameters() { () -> new TestCaseSupplier.TestCase( List.of(), matchesPattern("LiteralsEvaluator\\[lit=.*\\]"), - DataTypes.DATETIME, + DataType.DATETIME, equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()) ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java index bee3a495a5aa5..c21082b905962 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -33,9 +33,9 @@ public PiTests(@Name("TestCase") Supplier testCaseSup public static Iterable parameters() { return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Pi Test", () -> { return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(1, DataTypes.INTEGER, "foo")), + List.of(new TestCaseSupplier.TypedData(1, DataType.INTEGER, "foo")), "LiteralsEvaluator[lit=3.141592653589793]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(Math.PI) ); }))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java index acc1a3a10ba8d..545e7c14ff2b2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java @@ -13,14 +13,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import java.util.List; import java.util.function.Supplier; -public class PowTests extends AbstractScalarFunctionTestCase { +public class PowTests extends AbstractFunctionTestCase { public PowTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -50,8 +49,8 @@ public static Iterable parameters() { // 143^143 is still representable, but 144^144 is infinite TestCaseSupplier.castToDoubleSuppliersFromRange(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY), List.of( - new TestCaseSupplier.TypedDataSupplier("<0 double>", () -> 0d, DataTypes.DOUBLE), - new TestCaseSupplier.TypedDataSupplier("<-0 double>", () -> -0d, DataTypes.DOUBLE) + new TestCaseSupplier.TypedDataSupplier("<0 double>", () -> 0d, DataType.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("<-0 double>", () -> -0d, DataType.DOUBLE) ), List.of() ) @@ -81,16 +80,6 @@ public static Iterable parameters() { return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); } - @Override - protected DataType expectedType(List argTypes) { - return DataTypes.DOUBLE; - } - - @Override - protected List argSpec() { - return List.of(required(numerics()), required(numerics())); - } - @Override protected Expression build(Source source, List args) { return new Pow(source, args.get(0), args.get(1)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index 115ce6b7019c6..5e19d5f606034 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.operator.math.Maths; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -26,7 +25,6 @@ import java.util.function.Function; import java.util.function.Supplier; -import static org.elasticsearch.test.ESTestCase.randomDouble; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -42,7 +40,7 @@ public static Iterable parameters() { suppliers.add( supplier( "", - DataTypes.DOUBLE, + DataType.DOUBLE, () -> 1 / randomDouble(), "RoundDoubleNoDecimalsEvaluator[val=Attribute[channel=0]]", d -> Maths.round(d, 0) @@ -51,9 +49,9 @@ public static Iterable parameters() { suppliers.add( supplier( ", ", - DataTypes.DOUBLE, + DataType.DOUBLE, () -> 1 / randomDouble(), - DataTypes.INTEGER, + DataType.INTEGER, () -> between(-30, 30), "RoundDoubleEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", Maths::round @@ -67,29 +65,29 @@ public static Iterable parameters() { (nullPosition, nullData, original) -> original ); - suppliers.add(new TestCaseSupplier("two doubles", List.of(DataTypes.DOUBLE, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("two doubles", List.of(DataType.DOUBLE, DataType.INTEGER), () -> { double number1 = 1 / randomDouble(); double number2 = 1 / randomDouble(); int precision = between(-30, 30); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(List.of(number1, number2), DataTypes.DOUBLE, "number"), - new TestCaseSupplier.TypedData(precision, DataTypes.INTEGER, "decimals") + new TestCaseSupplier.TypedData(List.of(number1, number2), DataType.DOUBLE, "number"), + new TestCaseSupplier.TypedData(precision, DataType.INTEGER, "decimals") ), "RoundDoubleEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", - DataTypes.DOUBLE, + DataType.DOUBLE, is(nullValue()) ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); })); // Integer or Long without a decimals parameter is a noop - suppliers.add(supplier("", DataTypes.INTEGER, ESTestCase::randomInt, "Attribute[channel=0]", Function.identity())); - suppliers.add(supplier("", DataTypes.LONG, ESTestCase::randomLong, "Attribute[channel=0]", Function.identity())); + suppliers.add(supplier("", DataType.INTEGER, ESTestCase::randomInt, "Attribute[channel=0]", Function.identity())); + suppliers.add(supplier("", DataType.LONG, ESTestCase::randomLong, "Attribute[channel=0]", Function.identity())); suppliers.add( supplier( "", - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, ESTestCase::randomLong, "Attribute[channel=0]", NumericUtils::unsignedLongAsBigInteger @@ -136,7 +134,7 @@ public static Iterable parameters() { private static TestCaseSupplier supplier(double v, double expected) { return supplier( "round(" + v + ") -> " + expected, - DataTypes.DOUBLE, + DataType.DOUBLE, () -> v, "RoundDoubleNoDecimalsEvaluator[val=Attribute[channel=0]]", value -> expected @@ -146,9 +144,9 @@ private static TestCaseSupplier supplier(double v, double expected) { private static TestCaseSupplier supplier(double v, int decimals, double expected) { return supplier( "round(" + v + ", " + decimals + ") -> " + expected, - DataTypes.DOUBLE, + DataType.DOUBLE, () -> v, - DataTypes.INTEGER, + DataType.INTEGER, () -> decimals, "RoundDoubleEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", (value, de) -> expected @@ -158,9 +156,9 @@ private static TestCaseSupplier supplier(double v, int decimals, double expected private static TestCaseSupplier supplier(long v, int decimals, long expected) { return supplier( "round(" + v + "L, " + decimals + ") -> " + expected, - DataTypes.LONG, + DataType.LONG, () -> v, - DataTypes.INTEGER, + DataType.INTEGER, () -> decimals, "RoundLongEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", (value, de) -> expected @@ -170,9 +168,9 @@ private static TestCaseSupplier supplier(long v, int decimals, long expected) { private static TestCaseSupplier supplier(int v, int decimals, int expected) { return supplier( "round(" + v + ", " + decimals + ") -> " + expected, - DataTypes.INTEGER, + DataType.INTEGER, () -> v, - DataTypes.INTEGER, + DataType.INTEGER, () -> decimals, "RoundIntEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", (value, de) -> expected diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java index 8a6d88fb399a1..89c2d07c4470a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -34,7 +34,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, "SignumIntEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, i -> (double) Math.signum(i), Integer.MIN_VALUE, Integer.MAX_VALUE, @@ -44,7 +44,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, "SignumLongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, l -> (double) Math.signum(l), Long.MIN_VALUE, Long.MAX_VALUE, @@ -54,7 +54,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, "SignumUnsignedLongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, ul -> Math.signum(NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), BigInteger.ZERO, UNSIGNED_LONG_MAX, @@ -63,7 +63,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "SignumDoubleEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::signum, -Double.MAX_VALUE, Double.MAX_VALUE, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java index c7fbe713fab0c..a1d5b8523175c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -37,7 +37,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, "SqrtIntEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::sqrt, 0, Integer.MAX_VALUE, @@ -46,7 +46,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, "SqrtLongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::sqrt, 0, Long.MAX_VALUE, @@ -55,7 +55,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryUnsignedLong( suppliers, "SqrtUnsignedLongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, ul -> Math.sqrt(ul == null ? null : unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), BigInteger.ZERO, UNSIGNED_LONG_MAX, @@ -64,7 +64,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "SqrtDoubleEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, Math::sqrt, -0d, Double.MAX_VALUE, @@ -76,7 +76,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, "SqrtIntEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, k -> null, Integer.MIN_VALUE, -1, @@ -88,7 +88,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, "SqrtLongEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, k -> null, Long.MIN_VALUE, -1, @@ -100,7 +100,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "SqrtDoubleEvaluator[val=" + read + "]", - DataTypes.DOUBLE, + DataType.DOUBLE, k -> null, Double.NEGATIVE_INFINITY, -Double.MIN_VALUE, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java index 57448df4ec788..aa64dfc6af90d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -33,9 +33,9 @@ public TauTests(@Name("TestCase") Supplier testCaseSu public static Iterable parameters() { return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Tau Test", () -> { return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(1, DataTypes.INTEGER, "foo")), + List.of(new TestCaseSupplier.TypedData(1, DataType.INTEGER, "foo")), "LiteralsEvaluator[lit=6.283185307179586]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(Tau.TAU) ); }))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index ce7cd68198753..2ea79d8a165c6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; @@ -50,7 +49,7 @@ protected static void booleans( String evaluatorName, BiFunction, Matcher> matcher ) { - booleans(cases, name, evaluatorName, DataTypes.BOOLEAN, matcher); + booleans(cases, name, evaluatorName, DataType.BOOLEAN, matcher); } /** @@ -66,9 +65,9 @@ protected static void booleans( cases.add( new TestCaseSupplier( name + "(false)", - List.of(DataTypes.BOOLEAN), + List.of(DataType.BOOLEAN), () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(false), DataTypes.BOOLEAN, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(false), DataType.BOOLEAN, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, Stream.of(false)) @@ -78,9 +77,9 @@ protected static void booleans( cases.add( new TestCaseSupplier( name + "(true)", - List.of(DataTypes.BOOLEAN), + List.of(DataType.BOOLEAN), () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(true), DataTypes.BOOLEAN, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(true), DataType.BOOLEAN, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, Stream.of(true)) @@ -88,11 +87,11 @@ protected static void booleans( ) ); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.BOOLEAN), () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataType.BOOLEAN), () -> { List mvData = randomList(2, 100, ESTestCase::randomBoolean); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(mvData, DataTypes.BOOLEAN, "field")), + List.of(new TestCaseSupplier.TypedData(mvData, DataType.BOOLEAN, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(mvData.size(), mvData.stream()) @@ -123,8 +122,8 @@ protected static void bytesRefs( Function expectedDataType, BiFunction, Matcher> matcher ) { - for (DataType type : new DataType[] { DataTypes.KEYWORD, DataTypes.TEXT, DataTypes.IP, DataTypes.VERSION }) { - if (type != DataTypes.IP) { + for (DataType type : new DataType[] { DataType.KEYWORD, DataType.TEXT, DataType.IP, DataType.VERSION }) { + if (type != DataType.IP) { cases.add( new TestCaseSupplier( name + "(empty " + type.typeName() + ")", @@ -171,7 +170,7 @@ protected static void doubles( String evaluatorName, BiFunction> matcher ) { - doubles(cases, name, evaluatorName, DataTypes.DOUBLE, matcher); + doubles(cases, name, evaluatorName, DataType.DOUBLE, matcher); } /** @@ -187,30 +186,30 @@ protected static void doubles( cases.add( new TestCaseSupplier( name + "(0.0)", - List.of(DataTypes.DOUBLE), + List.of(DataType.DOUBLE), () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(0.0), DataTypes.DOUBLE, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(0.0), DataType.DOUBLE, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, DoubleStream.of(0.0)) ) ) ); - cases.add(new TestCaseSupplier(name + "(double)", List.of(DataTypes.DOUBLE), () -> { + cases.add(new TestCaseSupplier(name + "(double)", List.of(DataType.DOUBLE), () -> { double mvData = randomDouble(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(mvData), DataTypes.DOUBLE, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(mvData), DataType.DOUBLE, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, DoubleStream.of(mvData)) ); })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.DOUBLE), () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataType.DOUBLE), () -> { List mvData = randomList(1, 100, ESTestCase::randomDouble); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(mvData, DataTypes.DOUBLE, "field")), + List.of(new TestCaseSupplier.TypedData(mvData, DataType.DOUBLE, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(mvData.size(), mvData.stream().mapToDouble(Double::doubleValue)) @@ -228,7 +227,7 @@ protected static void ints( String evaluatorName, BiFunction> matcher ) { - ints(cases, name, evaluatorName, DataTypes.INTEGER, matcher); + ints(cases, name, evaluatorName, DataType.INTEGER, matcher); } /** @@ -244,30 +243,30 @@ protected static void ints( cases.add( new TestCaseSupplier( name + "(0)", - List.of(DataTypes.INTEGER), + List.of(DataType.INTEGER), () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(0), DataTypes.INTEGER, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(0), DataType.INTEGER, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, IntStream.of(0)) ) ) ); - cases.add(new TestCaseSupplier(name + "(int)", List.of(DataTypes.INTEGER), () -> { + cases.add(new TestCaseSupplier(name + "(int)", List.of(DataType.INTEGER), () -> { int data = randomInt(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(data), DataTypes.INTEGER, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(data), DataType.INTEGER, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, IntStream.of(data)) ); })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.INTEGER), () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataType.INTEGER), () -> { List mvData = randomList(1, 100, ESTestCase::randomInt); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(mvData, DataTypes.INTEGER, "field")), + List.of(new TestCaseSupplier.TypedData(mvData, DataType.INTEGER, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(mvData.size(), mvData.stream().mapToInt(Integer::intValue)) @@ -285,7 +284,7 @@ protected static void longs( String evaluatorName, BiFunction> matcher ) { - longs(cases, name, evaluatorName, DataTypes.LONG, matcher); + longs(cases, name, evaluatorName, DataType.LONG, matcher); } /** @@ -301,30 +300,30 @@ protected static void longs( cases.add( new TestCaseSupplier( name + "(0L)", - List.of(DataTypes.LONG), + List.of(DataType.LONG), () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(0L), DataTypes.LONG, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(0L), DataType.LONG, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, LongStream.of(0L)) ) ) ); - cases.add(new TestCaseSupplier(name + "(long)", List.of(DataTypes.LONG), () -> { + cases.add(new TestCaseSupplier(name + "(long)", List.of(DataType.LONG), () -> { long data = randomLong(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(data), DataTypes.LONG, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(data), DataType.LONG, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, LongStream.of(data)) ); })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.LONG), () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataType.LONG), () -> { List mvData = randomList(1, 100, ESTestCase::randomLong); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(mvData, DataTypes.LONG, "field")), + List.of(new TestCaseSupplier.TypedData(mvData, DataType.LONG, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(mvData.size(), mvData.stream().mapToLong(Long::longValue)) @@ -342,7 +341,7 @@ protected static void dateTimes( String evaluatorName, BiFunction> matcher ) { - dateTimes(cases, name, evaluatorName, DataTypes.DATETIME, matcher); + dateTimes(cases, name, evaluatorName, DataType.DATETIME, matcher); } /** @@ -358,30 +357,30 @@ protected static void dateTimes( cases.add( new TestCaseSupplier( name + "(epoch)", - List.of(DataTypes.DATETIME), + List.of(DataType.DATETIME), () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(0L), DataTypes.DATETIME, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(0L), DataType.DATETIME, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, LongStream.of(0L)) ) ) ); - cases.add(new TestCaseSupplier(name + "(date)", List.of(DataTypes.DATETIME), () -> { + cases.add(new TestCaseSupplier(name + "(date)", List.of(DataType.DATETIME), () -> { long data = randomLong(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(data), DataTypes.DATETIME, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(data), DataType.DATETIME, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, LongStream.of(data)) ); })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.DATETIME), () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataType.DATETIME), () -> { List mvData = randomList(1, 100, ESTestCase::randomLong); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(mvData, DataTypes.DATETIME, "field")), + List.of(new TestCaseSupplier.TypedData(mvData, DataType.DATETIME, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(mvData.size(), mvData.stream().mapToLong(Long::longValue)) @@ -400,7 +399,7 @@ protected static void geoPoints( String evaluatorName, BiFunction, Matcher> matcher ) { - geoPoints(cases, name, evaluatorName, DataTypes.GEO_POINT, matcher); + geoPoints(cases, name, evaluatorName, DataType.GEO_POINT, matcher); } /** @@ -415,7 +414,7 @@ protected static void geoPoints( DataType expectedDataType, BiFunction, Matcher> matcher ) { - spatial(cases, name, evaluatorName, DataTypes.GEO_POINT, expectedDataType, GEO, GeometryTestUtils::randomPoint, matcher); + spatial(cases, name, evaluatorName, DataType.GEO_POINT, expectedDataType, GEO, GeometryTestUtils::randomPoint, matcher); } /** @@ -428,7 +427,7 @@ protected static void cartesianPoints( String evaluatorName, BiFunction, Matcher> matcher ) { - cartesianPoints(cases, name, evaluatorName, DataTypes.CARTESIAN_POINT, matcher); + cartesianPoints(cases, name, evaluatorName, DataType.CARTESIAN_POINT, matcher); } /** @@ -443,7 +442,7 @@ protected static void cartesianPoints( DataType expectedDataType, BiFunction, Matcher> matcher ) { - spatial(cases, name, evaluatorName, DataTypes.CARTESIAN_POINT, expectedDataType, CARTESIAN, ShapeTestUtils::randomPoint, matcher); + spatial(cases, name, evaluatorName, DataType.CARTESIAN_POINT, expectedDataType, CARTESIAN, ShapeTestUtils::randomPoint, matcher); } /** @@ -462,7 +461,7 @@ protected static void geoShape( cases, name, evaluatorName, - DataTypes.GEO_SHAPE, + DataType.GEO_SHAPE, expectedDataType, GEO, () -> rarely() ? GeometryTestUtils.randomGeometry(randomBoolean()) : GeometryTestUtils.randomPoint(), @@ -486,7 +485,7 @@ protected static void cartesianShape( cases, name, evaluatorName, - DataTypes.CARTESIAN_SHAPE, + DataType.CARTESIAN_SHAPE, expectedDataType, CARTESIAN, () -> rarely() ? ShapeTestUtils.randomGeometry(randomBoolean()) : ShapeTestUtils.randomPoint(), @@ -539,7 +538,7 @@ protected static void unsignedLongs( String evaluatorName, BiFunction, Matcher> matcher ) { - unsignedLongs(cases, name, evaluatorName, DataTypes.UNSIGNED_LONG, matcher); + unsignedLongs(cases, name, evaluatorName, DataType.UNSIGNED_LONG, matcher); } /** @@ -555,12 +554,12 @@ protected static void unsignedLongs( cases.add( new TestCaseSupplier( name + "(0UL)", - List.of(DataTypes.UNSIGNED_LONG), + List.of(DataType.UNSIGNED_LONG), () -> new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData( List.of(NumericUtils.asLongUnsigned(BigInteger.ZERO)), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, "field" ) ), @@ -570,21 +569,21 @@ protected static void unsignedLongs( ) ) ); - cases.add(new TestCaseSupplier(name + "(unsigned long)", List.of(DataTypes.UNSIGNED_LONG), () -> { + cases.add(new TestCaseSupplier(name + "(unsigned long)", List.of(DataType.UNSIGNED_LONG), () -> { long data = randomLong(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(data), DataTypes.UNSIGNED_LONG, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(data), DataType.UNSIGNED_LONG, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(1, Stream.of(NumericUtils.unsignedLongAsBigInteger(data))) ); })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.UNSIGNED_LONG), () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataType.UNSIGNED_LONG), () -> { List mvData = randomList(1, 100, ESTestCase::randomLong); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(mvData, DataTypes.UNSIGNED_LONG, "field")), + List.of(new TestCaseSupplier.TypedData(mvData, DataType.UNSIGNED_LONG, "field")), evaluatorName + "[field=Attribute[channel=0]]", expectedDataType, matcher.apply(mvData.size(), mvData.stream().map(NumericUtils::unsignedLongAsBigInteger)) @@ -621,7 +620,7 @@ private static > void putInOrder(List mvData, Block.M protected final DataType[] representableNumerics() { // TODO numeric should only include representable numbers but that is a change for a followup - return DataTypes.types().stream().filter(DataType::isNumeric).filter(EsqlDataTypes::isRepresentable).toArray(DataType[]::new); + return DataType.types().stream().filter(DataType::isNumeric).filter(EsqlDataTypes::isRepresentable).toArray(DataType[]::new); } protected DataType expectedType(List argTypes) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java new file mode 100644 index 0000000000000..6361360652a87 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java @@ -0,0 +1,296 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; +import static org.hamcrest.Matchers.equalTo; + +public class MvAppendTests extends AbstractFunctionTestCase { + public MvAppendTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + booleans(suppliers); + ints(suppliers); + longs(suppliers); + doubles(suppliers); + bytesRefs(suppliers); + nulls(suppliers); + return parameterSuppliersFromTypedData(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new MvAppend(source, args.get(0), args.get(1)); + } + + private static void booleans(List suppliers) { + suppliers.add(new TestCaseSupplier(List.of(DataType.BOOLEAN, DataType.BOOLEAN), () -> { + List field1 = randomList(1, 10, () -> randomBoolean()); + List field2 = randomList(1, 10, () -> randomBoolean()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.BOOLEAN, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.BOOLEAN, "field2") + ), + "MvAppendBooleanEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.BOOLEAN, + equalTo(result) + ); + })); + } + + private static void ints(List suppliers) { + suppliers.add(new TestCaseSupplier(List.of(DataType.INTEGER, DataType.INTEGER), () -> { + List field1 = randomList(1, 10, () -> randomInt()); + List field2 = randomList(1, 10, () -> randomInt()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.INTEGER, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.INTEGER, "field2") + ), + "MvAppendIntEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.INTEGER, + equalTo(result) + ); + })); + } + + private static void longs(List suppliers) { + suppliers.add(new TestCaseSupplier(List.of(DataType.LONG, DataType.LONG), () -> { + List field1 = randomList(1, 10, () -> randomLong()); + List field2 = randomList(1, 10, () -> randomLong()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.LONG, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.LONG, "field2") + ), + "MvAppendLongEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.LONG, + equalTo(result) + ); + })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.DATETIME, DataType.DATETIME), () -> { + List field1 = randomList(1, 10, () -> randomLong()); + List field2 = randomList(1, 10, () -> randomLong()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.DATETIME, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.DATETIME, "field2") + ), + "MvAppendLongEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.DATETIME, + equalTo(result) + ); + })); + } + + private static void doubles(List suppliers) { + suppliers.add(new TestCaseSupplier(List.of(DataType.DOUBLE, DataType.DOUBLE), () -> { + List field1 = randomList(1, 10, () -> randomDouble()); + List field2 = randomList(1, 10, () -> randomDouble()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.DOUBLE, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.DOUBLE, "field2") + ), + "MvAppendDoubleEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.DOUBLE, + equalTo(result) + ); + })); + } + + private static void bytesRefs(List suppliers) { + suppliers.add(new TestCaseSupplier(List.of(DataType.KEYWORD, DataType.KEYWORD), () -> { + List field1 = randomList(1, 10, () -> randomLiteral(DataType.KEYWORD).value()); + List field2 = randomList(1, 10, () -> randomLiteral(DataType.KEYWORD).value()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.KEYWORD, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.KEYWORD, "field2") + ), + "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.KEYWORD, + equalTo(result) + ); + })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.TEXT, DataType.TEXT), () -> { + List field1 = randomList(1, 10, () -> randomLiteral(DataType.TEXT).value()); + List field2 = randomList(1, 10, () -> randomLiteral(DataType.TEXT).value()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.TEXT, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.TEXT, "field2") + ), + "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.TEXT, + equalTo(result) + ); + })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.IP, DataType.IP), () -> { + List field1 = randomList(1, 10, () -> randomLiteral(DataType.IP).value()); + List field2 = randomList(1, 10, () -> randomLiteral(DataType.IP).value()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.IP, "field"), + new TestCaseSupplier.TypedData(field2, DataType.IP, "field") + ), + "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.IP, + equalTo(result) + ); + })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.VERSION, DataType.VERSION), () -> { + List field1 = randomList(1, 10, () -> randomLiteral(DataType.VERSION).value()); + List field2 = randomList(1, 10, () -> randomLiteral(DataType.VERSION).value()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.VERSION, "field"), + new TestCaseSupplier.TypedData(field2, DataType.VERSION, "field") + ), + "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.VERSION, + equalTo(result) + ); + })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.GEO_POINT, DataType.GEO_POINT), () -> { + List field1 = randomList(1, 10, () -> new BytesRef(GEO.asWkt(GeometryTestUtils.randomPoint()))); + List field2 = randomList(1, 10, () -> new BytesRef(GEO.asWkt(GeometryTestUtils.randomPoint()))); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.GEO_POINT, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.GEO_POINT, "field2") + ), + "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.GEO_POINT, + equalTo(result) + ); + })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.CARTESIAN_POINT, DataType.CARTESIAN_POINT), () -> { + List field1 = randomList(1, 10, () -> new BytesRef(CARTESIAN.asWkt(ShapeTestUtils.randomPoint()))); + List field2 = randomList(1, 10, () -> new BytesRef(CARTESIAN.asWkt(ShapeTestUtils.randomPoint()))); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.CARTESIAN_POINT, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.CARTESIAN_POINT, "field2") + ), + "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.CARTESIAN_POINT, + equalTo(result) + ); + })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.GEO_SHAPE, DataType.GEO_SHAPE), () -> { + List field1 = randomList(1, 5, () -> new BytesRef(GEO.asWkt(GeometryTestUtils.randomGeometry(randomBoolean())))); + List field2 = randomList(1, 5, () -> new BytesRef(GEO.asWkt(GeometryTestUtils.randomGeometry(randomBoolean())))); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.GEO_SHAPE, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.GEO_SHAPE, "field2") + ), + "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.GEO_SHAPE, + equalTo(result) + ); + })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.CARTESIAN_SHAPE, DataType.CARTESIAN_SHAPE), () -> { + List field1 = randomList(1, 5, () -> new BytesRef(CARTESIAN.asWkt(ShapeTestUtils.randomGeometry(randomBoolean())))); + List field2 = randomList(1, 5, () -> new BytesRef(CARTESIAN.asWkt(ShapeTestUtils.randomGeometry(randomBoolean())))); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.CARTESIAN_SHAPE, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.CARTESIAN_SHAPE, "field2") + ), + "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.CARTESIAN_SHAPE, + equalTo(result) + ); + })); + } + + private static void nulls(List suppliers) { + suppliers.add(new TestCaseSupplier(List.of(DataType.INTEGER, DataType.INTEGER), () -> { + List field2 = randomList(2, 10, () -> randomInt()); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(null, DataType.INTEGER, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.INTEGER, "field2") + ), + "MvAppendIntEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.INTEGER, + equalTo(null) + ); + })); + suppliers.add(new TestCaseSupplier(List.of(DataType.INTEGER, DataType.INTEGER), () -> { + List field1 = randomList(2, 10, () -> randomInt()); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.INTEGER, "field1"), + new TestCaseSupplier.TypedData(null, DataType.INTEGER, "field2") + ), + "MvAppendIntEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.INTEGER, + equalTo(null) + ); + })); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java index 0ad43f520daf3..966a5a590e256 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -41,14 +40,14 @@ public static Iterable parameters() { return equalTo(sum.value() / size); }; List cases = new ArrayList<>(); - doubles(cases, "mv_avg", "MvAvg", DataTypes.DOUBLE, avg); - ints(cases, "mv_avg", "MvAvg", DataTypes.DOUBLE, (size, data) -> avg.apply(size, data.mapToDouble(v -> (double) v))); - longs(cases, "mv_avg", "MvAvg", DataTypes.DOUBLE, (size, data) -> avg.apply(size, data.mapToDouble(v -> (double) v))); + doubles(cases, "mv_avg", "MvAvg", DataType.DOUBLE, avg); + ints(cases, "mv_avg", "MvAvg", DataType.DOUBLE, (size, data) -> avg.apply(size, data.mapToDouble(v -> (double) v))); + longs(cases, "mv_avg", "MvAvg", DataType.DOUBLE, (size, data) -> avg.apply(size, data.mapToDouble(v -> (double) v))); unsignedLongs( cases, "mv_avg", "MvAvg", - DataTypes.DOUBLE, + DataType.DOUBLE, /* * Converting strait from BigInteger to double will round differently. * So we have to go back to encoded `long` and then convert to double @@ -71,6 +70,6 @@ protected DataType[] supportedTypes() { @Override protected DataType expectedType(List argTypes) { - return DataTypes.DOUBLE; // Averages are always a double + return DataType.DOUBLE; // Averages are always a double } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java index 3b4ab2b5cfaa0..39ef5eefe9287 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -33,11 +32,11 @@ public MvConcatTests(@Name("TestCase") Supplier testC @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - for (DataType fieldType : DataTypes.types()) { + for (DataType fieldType : DataType.types()) { if (EsqlDataTypes.isString(fieldType) == false) { continue; } - for (DataType delimType : DataTypes.types()) { + for (DataType delimType : DataType.types()) { if (EsqlDataTypes.isString(delimType) == false) { continue; } @@ -62,7 +61,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(new BytesRef(delim), delimType, "delim") ), "MvConcat[field=Attribute[channel=0], delim=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(expected)) ); })); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java index 9c379990e4707..8733dc0d25c40 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -30,17 +29,17 @@ public MvCountTests(@Name("TestCase") Supplier testCa @ParametersFactory public static Iterable parameters() { List cases = new ArrayList<>(); - booleans(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - bytesRefs(cases, "mv_count", "MvCount", t -> DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - doubles(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - ints(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - longs(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - unsignedLongs(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - dateTimes(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - geoPoints(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - cartesianPoints(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - geoShape(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - cartesianShape(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + booleans(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + bytesRefs(cases, "mv_count", "MvCount", t -> DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + doubles(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + ints(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + longs(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + unsignedLongs(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + dateTimes(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + geoPoints(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + cartesianPoints(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + geoShape(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + cartesianShape(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, cases))); } @@ -56,6 +55,6 @@ protected DataType[] supportedTypes() { @Override protected DataType expectedType(List argTypes) { - return DataTypes.INTEGER; + return DataType.INTEGER; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java index 0aed84d57004a..f202a8033ffc9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java @@ -42,6 +42,11 @@ public static Iterable parameters() { doubles(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Double::valueOf))); ints(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Integer::valueOf))); longs(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Long::valueOf))); + cartesianPoints(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); + cartesianShape(cases, "mv_dedupe", "MvDedupe", DataType.CARTESIAN_SHAPE, (size, values) -> getMatcher(values)); + geoPoints(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); + geoShape(cases, "mv_dedupe", "MvDedupe", DataType.GEO_SHAPE, (size, values) -> getMatcher(values)); + // TODO switch extraction to BigInteger so this just works. // unsignedLongs(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); return parameterSuppliersFromTypedData(cases); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java index abd9a39f944d4..1c24b1a8aae64 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -31,17 +30,17 @@ public MvFirstTests(@Name("TestCase") Supplier testCa @ParametersFactory public static Iterable parameters() { List cases = new ArrayList<>(); - booleans(cases, "mv_first", "MvFirst", DataTypes.BOOLEAN, (size, values) -> equalTo(values.findFirst().get())); + booleans(cases, "mv_first", "MvFirst", DataType.BOOLEAN, (size, values) -> equalTo(values.findFirst().get())); bytesRefs(cases, "mv_first", "MvFirst", Function.identity(), (size, values) -> equalTo(values.findFirst().get())); - doubles(cases, "mv_first", "MvFirst", DataTypes.DOUBLE, (size, values) -> equalTo(values.findFirst().getAsDouble())); - ints(cases, "mv_first", "MvFirst", DataTypes.INTEGER, (size, values) -> equalTo(values.findFirst().getAsInt())); - longs(cases, "mv_first", "MvFirst", DataTypes.LONG, (size, values) -> equalTo(values.findFirst().getAsLong())); - unsignedLongs(cases, "mv_first", "MvFirst", DataTypes.UNSIGNED_LONG, (size, values) -> equalTo(values.findFirst().get())); - dateTimes(cases, "mv_first", "MvFirst", DataTypes.DATETIME, (size, values) -> equalTo(values.findFirst().getAsLong())); - geoPoints(cases, "mv_first", "MvFirst", DataTypes.GEO_POINT, (size, values) -> equalTo(values.findFirst().get())); - cartesianPoints(cases, "mv_first", "MvFirst", DataTypes.CARTESIAN_POINT, (size, values) -> equalTo(values.findFirst().get())); - geoShape(cases, "mv_first", "MvFirst", DataTypes.GEO_SHAPE, (size, values) -> equalTo(values.findFirst().get())); - cartesianShape(cases, "mv_first", "MvFirst", DataTypes.CARTESIAN_SHAPE, (size, values) -> equalTo(values.findFirst().get())); + doubles(cases, "mv_first", "MvFirst", DataType.DOUBLE, (size, values) -> equalTo(values.findFirst().getAsDouble())); + ints(cases, "mv_first", "MvFirst", DataType.INTEGER, (size, values) -> equalTo(values.findFirst().getAsInt())); + longs(cases, "mv_first", "MvFirst", DataType.LONG, (size, values) -> equalTo(values.findFirst().getAsLong())); + unsignedLongs(cases, "mv_first", "MvFirst", DataType.UNSIGNED_LONG, (size, values) -> equalTo(values.findFirst().get())); + dateTimes(cases, "mv_first", "MvFirst", DataType.DATETIME, (size, values) -> equalTo(values.findFirst().getAsLong())); + geoPoints(cases, "mv_first", "MvFirst", DataType.GEO_POINT, (size, values) -> equalTo(values.findFirst().get())); + cartesianPoints(cases, "mv_first", "MvFirst", DataType.CARTESIAN_POINT, (size, values) -> equalTo(values.findFirst().get())); + geoShape(cases, "mv_first", "MvFirst", DataType.GEO_SHAPE, (size, values) -> equalTo(values.findFirst().get())); + cartesianShape(cases, "mv_first", "MvFirst", DataType.CARTESIAN_SHAPE, (size, values) -> equalTo(values.findFirst().get())); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java index 286b372dce584..1b6fb482ea3d0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -31,17 +30,17 @@ public MvLastTests(@Name("TestCase") Supplier testCas @ParametersFactory public static Iterable parameters() { List cases = new ArrayList<>(); - booleans(cases, "mv_last", "MvLast", DataTypes.BOOLEAN, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + booleans(cases, "mv_last", "MvLast", DataType.BOOLEAN, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); bytesRefs(cases, "mv_last", "MvLast", Function.identity(), (size, values) -> equalTo(values.reduce((f, s) -> s).get())); - doubles(cases, "mv_last", "MvLast", DataTypes.DOUBLE, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsDouble())); - ints(cases, "mv_last", "MvLast", DataTypes.INTEGER, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsInt())); - longs(cases, "mv_last", "MvLast", DataTypes.LONG, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsLong())); - unsignedLongs(cases, "mv_last", "MvLast", DataTypes.UNSIGNED_LONG, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); - dateTimes(cases, "mv_last", "MvLast", DataTypes.DATETIME, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsLong())); - geoPoints(cases, "mv_last", "MvLast", DataTypes.GEO_POINT, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); - cartesianPoints(cases, "mv_last", "MvLast", DataTypes.CARTESIAN_POINT, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); - geoShape(cases, "mv_last", "MvLast", DataTypes.GEO_SHAPE, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); - cartesianShape(cases, "mv_last", "MvLast", DataTypes.CARTESIAN_SHAPE, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + doubles(cases, "mv_last", "MvLast", DataType.DOUBLE, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsDouble())); + ints(cases, "mv_last", "MvLast", DataType.INTEGER, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsInt())); + longs(cases, "mv_last", "MvLast", DataType.LONG, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsLong())); + unsignedLongs(cases, "mv_last", "MvLast", DataType.UNSIGNED_LONG, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + dateTimes(cases, "mv_last", "MvLast", DataType.DATETIME, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsLong())); + geoPoints(cases, "mv_last", "MvLast", DataType.GEO_POINT, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + cartesianPoints(cases, "mv_last", "MvLast", DataType.CARTESIAN_POINT, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + geoShape(cases, "mv_last", "MvLast", DataType.GEO_SHAPE, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + cartesianShape(cases, "mv_last", "MvLast", DataType.CARTESIAN_SHAPE, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java index f6395074dbb80..4c324c916f861 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -72,11 +71,11 @@ public static Iterable parameters() { cases.add( new TestCaseSupplier( "mv_median(<1, 2>)", - List.of(DataTypes.INTEGER), + List.of(DataType.INTEGER), () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(1, 2), DataTypes.INTEGER, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(1, 2), DataType.INTEGER, "field")), "MvMedian[field=Attribute[channel=0]]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(1) ) ) @@ -84,11 +83,11 @@ public static Iterable parameters() { cases.add( new TestCaseSupplier( "mv_median(<-1, -2>)", - List.of(DataTypes.INTEGER), + List.of(DataType.INTEGER), () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(-1, -2), DataTypes.INTEGER, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(-1, -2), DataType.INTEGER, "field")), "MvMedian[field=Attribute[channel=0]]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(-2) ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java index eba1bb623bb98..3ab17b78ff8e7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -44,8 +44,8 @@ public static Iterable parameters() { return parameterSuppliersFromTypedData( anyNullIsNull( suppliers, - (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL - ? DataTypes.NULL + (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataType.NULL + ? DataType.NULL : original.expectedType(), (nullPosition, nullData, original) -> original ) @@ -59,282 +59,282 @@ protected Expression build(Source source, List args) { private static void booleans(List suppliers) { // Positive - suppliers.add(new TestCaseSupplier(List.of(DataTypes.BOOLEAN, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.BOOLEAN, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 10, () -> randomBoolean()); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.BOOLEAN, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.BOOLEAN, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBooleanEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); // Positive Start IndexOutofBound - suppliers.add(new TestCaseSupplier(List.of(DataTypes.BOOLEAN, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.BOOLEAN, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 10, () -> randomBoolean()); int length = field.size(); int start = randomIntBetween(length, length + 1); int end = randomIntBetween(start, length + 10); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.BOOLEAN, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.BOOLEAN, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBooleanEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, nullValue() ); })); // Positive End IndexOutofBound - suppliers.add(new TestCaseSupplier(List.of(DataTypes.BOOLEAN, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.BOOLEAN, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 10, () -> randomBoolean()); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(length, length + 10); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.BOOLEAN, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.BOOLEAN, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBooleanEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(start == length - 1 ? field.get(start) : field.subList(start, length)) ); })); // Negative - suppliers.add(new TestCaseSupplier(List.of(DataTypes.BOOLEAN, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.BOOLEAN, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 10, () -> randomBoolean()); int length = field.size(); int start = randomIntBetween(0 - length, -1); int end = randomIntBetween(start, -1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.BOOLEAN, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.BOOLEAN, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBooleanEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(start == end ? field.get(start + length) : field.subList(start + length, end + 1 + length)) ); })); } private static void ints(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataTypes.INTEGER, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.INTEGER, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 10, () -> randomInt()); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.INTEGER, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.INTEGER, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceIntEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); } private static void longs(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataTypes.LONG, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.LONG, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 10, () -> randomLong()); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.LONG, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.LONG, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceLongEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.LONG, + DataType.LONG, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.DATETIME, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.DATETIME, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 10, () -> randomLong()); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.DATETIME, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.DATETIME, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceLongEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); } private static void doubles(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataTypes.DOUBLE, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.DOUBLE, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 10, () -> randomDouble()); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.DOUBLE, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.DOUBLE, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceDoubleEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); } private static void bytesRefs(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataTypes.KEYWORD, DataTypes.INTEGER, DataTypes.INTEGER), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataTypes.KEYWORD).value()); + suppliers.add(new TestCaseSupplier(List.of(DataType.KEYWORD, DataType.INTEGER, DataType.INTEGER), () -> { + List field = randomList(1, 10, () -> randomLiteral(DataType.KEYWORD).value()); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.KEYWORD, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.KEYWORD, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBytesRefEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.TEXT, DataTypes.INTEGER, DataTypes.INTEGER), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataTypes.TEXT).value()); + suppliers.add(new TestCaseSupplier(List.of(DataType.TEXT, DataType.INTEGER, DataType.INTEGER), () -> { + List field = randomList(1, 10, () -> randomLiteral(DataType.TEXT).value()); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.TEXT, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.TEXT, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBytesRefEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.TEXT, + DataType.TEXT, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.IP, DataTypes.INTEGER, DataTypes.INTEGER), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataTypes.IP).value()); + suppliers.add(new TestCaseSupplier(List.of(DataType.IP, DataType.INTEGER, DataType.INTEGER), () -> { + List field = randomList(1, 10, () -> randomLiteral(DataType.IP).value()); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.IP, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.IP, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBytesRefEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.IP, + DataType.IP, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.VERSION, DataTypes.INTEGER, DataTypes.INTEGER), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataTypes.VERSION).value()); + suppliers.add(new TestCaseSupplier(List.of(DataType.VERSION, DataType.INTEGER, DataType.INTEGER), () -> { + List field = randomList(1, 10, () -> randomLiteral(DataType.VERSION).value()); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.VERSION, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.VERSION, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBytesRefEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.VERSION, + DataType.VERSION, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.GEO_POINT, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.GEO_POINT, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 5, () -> new BytesRef(GEO.asWkt(GeometryTestUtils.randomPoint()))); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.GEO_POINT, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.GEO_POINT, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBytesRefEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.GEO_POINT, + DataType.GEO_POINT, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.CARTESIAN_POINT, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.CARTESIAN_POINT, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 5, () -> new BytesRef(CARTESIAN.asWkt(ShapeTestUtils.randomPoint()))); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.CARTESIAN_POINT, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.CARTESIAN_POINT, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBytesRefEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.CARTESIAN_POINT, + DataType.CARTESIAN_POINT, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.GEO_SHAPE, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.GEO_SHAPE, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 5, () -> new BytesRef(GEO.asWkt(GeometryTestUtils.randomGeometry(randomBoolean())))); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.GEO_SHAPE, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.GEO_SHAPE, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBytesRefEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.GEO_SHAPE, + DataType.GEO_SHAPE, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.CARTESIAN_SHAPE, DataTypes.INTEGER, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.CARTESIAN_SHAPE, DataType.INTEGER, DataType.INTEGER), () -> { List field = randomList(1, 5, () -> new BytesRef(CARTESIAN.asWkt(ShapeTestUtils.randomGeometry(randomBoolean())))); int length = field.size(); int start = randomIntBetween(0, length - 1); int end = randomIntBetween(start, length - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.CARTESIAN_SHAPE, "field"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(end, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(field, DataType.CARTESIAN_SHAPE, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") ), "MvSliceBytesRefEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", - DataTypes.CARTESIAN_SHAPE, + DataType.CARTESIAN_SHAPE, equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java index 9019de87256f9..7c6413e590bfe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -47,16 +47,16 @@ protected Expression build(Source source, List args) { } private static void booleans(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataTypes.BOOLEAN, DataTypes.KEYWORD), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.BOOLEAN, DataType.KEYWORD), () -> { List field = randomList(1, 10, () -> randomBoolean()); BytesRef order = new BytesRef("ASC"); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.BOOLEAN, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() + new TestCaseSupplier.TypedData(field, DataType.BOOLEAN, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BOOLEAN + "[field=Attribute[channel=0], order=true]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted().toList()) ); })); @@ -64,120 +64,120 @@ private static void booleans(List suppliers) { } private static void ints(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataTypes.INTEGER, DataTypes.KEYWORD), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.INTEGER, DataType.KEYWORD), () -> { List field = randomList(1, 10, () -> randomInt()); BytesRef order = new BytesRef("DESC"); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.INTEGER, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() + new TestCaseSupplier.TypedData(field, DataType.INTEGER, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.INT + "[field=Attribute[channel=0], order=false]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted(Collections.reverseOrder()).toList()) ); })); } private static void longs(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataTypes.LONG, DataTypes.KEYWORD), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.LONG, DataType.KEYWORD), () -> { List field = randomList(1, 10, () -> randomLong()); BytesRef order = new BytesRef("ASC"); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.LONG, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() + new TestCaseSupplier.TypedData(field, DataType.LONG, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.LONG + "[field=Attribute[channel=0], order=true]", - DataTypes.LONG, + DataType.LONG, equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted().toList()) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.DATETIME, DataTypes.KEYWORD), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.DATETIME, DataType.KEYWORD), () -> { List field = randomList(1, 10, () -> randomLong()); BytesRef order = new BytesRef("DESC"); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.DATETIME, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() + new TestCaseSupplier.TypedData(field, DataType.DATETIME, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.LONG + "[field=Attribute[channel=0], order=false]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted(Collections.reverseOrder()).toList()) ); })); } private static void doubles(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataTypes.DOUBLE, DataTypes.KEYWORD), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.DOUBLE, DataType.KEYWORD), () -> { List field = randomList(1, 10, () -> randomDouble()); BytesRef order = new BytesRef("ASC"); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.DOUBLE, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() + new TestCaseSupplier.TypedData(field, DataType.DOUBLE, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.DOUBLE + "[field=Attribute[channel=0], order=true]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted().toList()) ); })); } private static void bytesRefs(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataTypes.KEYWORD).value()); + suppliers.add(new TestCaseSupplier(List.of(DataType.KEYWORD, DataType.KEYWORD), () -> { + List field = randomList(1, 10, () -> randomLiteral(DataType.KEYWORD).value()); BytesRef order = new BytesRef("DESC"); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.KEYWORD, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() + new TestCaseSupplier.TypedData(field, DataType.KEYWORD, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=false]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted(Collections.reverseOrder()).toList()) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.TEXT, DataTypes.KEYWORD), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataTypes.TEXT).value()); + suppliers.add(new TestCaseSupplier(List.of(DataType.TEXT, DataType.KEYWORD), () -> { + List field = randomList(1, 10, () -> randomLiteral(DataType.TEXT).value()); BytesRef order = new BytesRef("ASC"); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.TEXT, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() + new TestCaseSupplier.TypedData(field, DataType.TEXT, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=true]", - DataTypes.TEXT, + DataType.TEXT, equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted().toList()) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.IP, DataTypes.KEYWORD), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataTypes.IP).value()); + suppliers.add(new TestCaseSupplier(List.of(DataType.IP, DataType.KEYWORD), () -> { + List field = randomList(1, 10, () -> randomLiteral(DataType.IP).value()); BytesRef order = new BytesRef("DESC"); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.IP, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() + new TestCaseSupplier.TypedData(field, DataType.IP, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=false]", - DataTypes.IP, + DataType.IP, equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted(Collections.reverseOrder()).toList()) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.VERSION, DataTypes.KEYWORD), () -> { - List field = randomList(1, 10, () -> randomLiteral(DataTypes.VERSION).value()); + suppliers.add(new TestCaseSupplier(List.of(DataType.VERSION, DataType.KEYWORD), () -> { + List field = randomList(1, 10, () -> randomLiteral(DataType.VERSION).value()); BytesRef order = new BytesRef("ASC"); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(field, DataTypes.VERSION, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() + new TestCaseSupplier.TypedData(field, DataType.VERSION, "field"), + new TestCaseSupplier.TypedData(order, DataType.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=true]", - DataTypes.VERSION, + DataType.VERSION, equalTo(field.size() == 1 ? field.iterator().next() : field.stream().sorted().toList()) ); })); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java index 5dd248323923f..c1d820cd93931 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -41,27 +40,27 @@ public static Iterable parameters() { // longs(cases, "mv_sum", "MvSum", (size, values) -> equalTo(values.sum())); // unsignedLongAsBigInteger(cases, "mv_sum", "MvSum", (size, values) -> equalTo(values.sum())); - cases.add(arithmeticExceptionCase(DataTypes.INTEGER, () -> { + cases.add(arithmeticExceptionCase(DataType.INTEGER, () -> { List data = randomList(1, 10, () -> randomIntBetween(0, Integer.MAX_VALUE)); data.add(Integer.MAX_VALUE); return data; })); - cases.add(arithmeticExceptionCase(DataTypes.INTEGER, () -> { + cases.add(arithmeticExceptionCase(DataType.INTEGER, () -> { List data = randomList(1, 10, () -> randomIntBetween(Integer.MIN_VALUE, 0)); data.add(Integer.MIN_VALUE); return data; })); - cases.add(arithmeticExceptionCase(DataTypes.LONG, () -> { + cases.add(arithmeticExceptionCase(DataType.LONG, () -> { List data = randomList(1, 10, () -> randomLongBetween(0L, Long.MAX_VALUE)); data.add(Long.MAX_VALUE); return data; })); - cases.add(arithmeticExceptionCase(DataTypes.LONG, () -> { + cases.add(arithmeticExceptionCase(DataType.LONG, () -> { List data = randomList(1, 10, () -> randomLongBetween(Long.MIN_VALUE, 0L)); data.add(Long.MIN_VALUE); return data; })); - cases.add(arithmeticExceptionCase(DataTypes.UNSIGNED_LONG, () -> { + cases.add(arithmeticExceptionCase(DataType.UNSIGNED_LONG, () -> { List data = randomList(1, 10, ESTestCase::randomLong); data.add(asLongUnsigned(UNSIGNED_LONG_MAX)); return data; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java index 8d0e2a64d1643..30fe420f29960 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java @@ -15,9 +15,8 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import java.util.ArrayList; import java.util.List; @@ -26,59 +25,79 @@ import static java.lang.Math.max; import static org.hamcrest.Matchers.equalTo; -public class MvZipTests extends AbstractScalarFunctionTestCase { +public class MvZipTests extends AbstractFunctionTestCase { public MvZipTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { + // Note that any null is *not* null, so we explicitly test with nulls List suppliers = new ArrayList<>(); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.KEYWORD, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { - List left = randomList(1, 3, () -> randomLiteral(DataTypes.KEYWORD).value()); - List right = randomList(1, 3, () -> randomLiteral(DataTypes.KEYWORD).value()); - String delim = randomAlphaOfLengthBetween(1, 1); + for (DataType leftType : DataType.types()) { + if (leftType != DataType.NULL && DataType.isString(leftType) == false) { + continue; + } + for (DataType rightType : DataType.types()) { + if (rightType != DataType.NULL && DataType.isString(rightType) == false) { + continue; + } + for (DataType delimType : DataType.types()) { + if (delimType != DataType.NULL && DataType.isString(delimType) == false) { + continue; + } + suppliers.add(supplier(leftType, rightType, delimType)); + } + suppliers.add(supplier(leftType, rightType)); + } + } + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); + } + + private static TestCaseSupplier supplier(DataType leftType, DataType rightType, DataType delimType) { + return new TestCaseSupplier(List.of(leftType, rightType, delimType), () -> { + List left = randomList(leftType); + List right = randomList(rightType); + BytesRef delim = delimType == DataType.NULL ? null : new BytesRef(randomAlphaOfLength(1)); + List expected = calculateExpected(left, right, delim); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(left, DataTypes.KEYWORD, "mvLeft"), - new TestCaseSupplier.TypedData(right, DataTypes.KEYWORD, "mvRight"), - new TestCaseSupplier.TypedData(delim, DataTypes.KEYWORD, "delim") + new TestCaseSupplier.TypedData(left, leftType, "mvLeft"), + new TestCaseSupplier.TypedData(right, rightType, "mvRight"), + new TestCaseSupplier.TypedData(delim, delimType, "delim") ), "MvZipEvaluator[leftField=Attribute[channel=0], rightField=Attribute[channel=1], delim=Attribute[channel=2]]", - DataTypes.KEYWORD, - equalTo(expected.size() == 1 ? expected.iterator().next() : expected) + DataType.KEYWORD, + equalTo(expected == null ? null : expected.size() == 1 ? expected.iterator().next() : expected) ); - })); + }); + } - suppliers.add(new TestCaseSupplier(List.of(DataTypes.TEXT, DataTypes.TEXT, DataTypes.TEXT), () -> { - List left = randomList(1, 10, () -> randomLiteral(DataTypes.TEXT).value()); - List right = randomList(1, 10, () -> randomLiteral(DataTypes.TEXT).value()); - String delim = randomAlphaOfLengthBetween(1, 1); - List expected = calculateExpected(left, right, delim); + private static TestCaseSupplier supplier(DataType leftType, DataType rightType) { + return new TestCaseSupplier(List.of(leftType, rightType), () -> { + List left = randomList(leftType); + List right = randomList(rightType); + + List expected = calculateExpected(left, right, new BytesRef(",")); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(left, DataTypes.TEXT, "mvLeft"), - new TestCaseSupplier.TypedData(right, DataTypes.TEXT, "mvRight"), - new TestCaseSupplier.TypedData(delim, DataTypes.TEXT, "delim") + new TestCaseSupplier.TypedData(left, leftType, "mvLeft"), + new TestCaseSupplier.TypedData(right, rightType, "mvRight") ), - "MvZipEvaluator[leftField=Attribute[channel=0], rightField=Attribute[channel=1], delim=Attribute[channel=2]]", - DataTypes.KEYWORD, - equalTo(expected.size() == 1 ? expected.iterator().next() : expected) + "MvZipEvaluator[leftField=Attribute[channel=0], rightField=Attribute[channel=1], delim=LiteralsEvaluator[lit=,]]", + DataType.KEYWORD, + equalTo(expected == null ? null : expected.size() == 1 ? expected.iterator().next() : expected) ); - })); - - return parameterSuppliersFromTypedData(suppliers); + }); } - @Override - protected DataType expectedType(List argTypes) { - return DataTypes.KEYWORD; - } - - @Override - protected List argSpec() { - return List.of(required(strings()), required(strings()), optional(strings())); + private static List randomList(DataType type) { + if (type == DataType.NULL) { + return null; + } + return randomList(1, 3, () -> new BytesRef(randomAlphaOfLength(5))); } @Override @@ -86,27 +105,36 @@ protected Expression build(Source source, List args) { return new MvZip(source, args.get(0), args.get(1), args.size() > 2 ? args.get(2) : null); } - private static List calculateExpected(List left, List right, String delim) { + private static List calculateExpected(List left, List right, BytesRef delim) { + if (delim == null) { + return null; + } + if (left == null) { + return right; + } + if (right == null) { + return left; + } List expected = new ArrayList<>(max(left.size(), right.size())); int i = 0, j = 0; while (i < left.size() && j < right.size()) { BytesRefBuilder work = new BytesRefBuilder(); - work.append((BytesRef) left.get(i)); - work.append(new BytesRef(delim)); - work.append((BytesRef) right.get(j)); + work.append(left.get(i)); + work.append(delim); + work.append(right.get(j)); expected.add(work.get()); i++; j++; } while (i < left.size()) { BytesRefBuilder work = new BytesRefBuilder(); - work.append((BytesRef) left.get(i)); + work.append(left.get(i)); expected.add(work.get()); i++; } while (j < right.size()) { BytesRefBuilder work = new BytesRefBuilder(); - work.append((BytesRef) right.get(j)); + work.append(right.get(j)); expected.add(work.get()); j++; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index e884ba9400206..42022099ceace 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; @@ -59,29 +58,44 @@ public static Iterable parameters() { builder.expectBoolean(booleans -> booleans.filter(v -> v != null).findFirst()); suppliers.addAll(builder.suppliers()); addSpatialCombinations(suppliers); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.IP, DataTypes.IP), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.IP, DataType.IP), () -> { var first = randomBoolean() ? null : EsqlDataTypeConverter.stringToIP(NetworkAddress.format(randomIp(true))); var second = EsqlDataTypeConverter.stringToIP(NetworkAddress.format(randomIp(true))); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(first, DataTypes.IP, "first"), - new TestCaseSupplier.TypedData(second, DataTypes.IP, "second") + new TestCaseSupplier.TypedData(first, DataType.IP, "first"), + new TestCaseSupplier.TypedData(second, DataType.IP, "second") ), "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", - DataTypes.IP, + DataType.IP, equalTo(first == null ? second : first) ); })); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.DATETIME, DataTypes.DATETIME), () -> { + suppliers.add(new TestCaseSupplier(List.of(DataType.VERSION, DataType.VERSION), () -> { + var first = randomBoolean() + ? null + : EsqlDataTypeConverter.stringToVersion(randomInt(10) + "." + randomInt(10) + "." + randomInt(10)); + var second = EsqlDataTypeConverter.stringToVersion(randomInt(10) + "." + randomInt(10) + "." + randomInt(10)); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(first, DataType.VERSION, "first"), + new TestCaseSupplier.TypedData(second, DataType.VERSION, "second") + ), + "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + DataType.VERSION, + equalTo(first == null ? second : first) + ); + })); + suppliers.add(new TestCaseSupplier(List.of(DataType.DATETIME, DataType.DATETIME), () -> { Long firstDate = randomBoolean() ? null : ZonedDateTime.parse("2023-12-04T10:15:30Z").toInstant().toEpochMilli(); Long secondDate = ZonedDateTime.parse("2023-12-05T10:45:00Z").toInstant().toEpochMilli(); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(firstDate, DataTypes.DATETIME, "first"), - new TestCaseSupplier.TypedData(secondDate, DataTypes.DATETIME, "second") + new TestCaseSupplier.TypedData(firstDate, DataType.DATETIME, "first"), + new TestCaseSupplier.TypedData(secondDate, DataType.DATETIME, "second") ), "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(firstDate == null ? secondDate : firstDate) ); })); @@ -90,7 +104,7 @@ public static Iterable parameters() { } protected static void addSpatialCombinations(List suppliers) { - for (DataType dataType : List.of(DataTypes.GEO_POINT, DataTypes.GEO_SHAPE, DataTypes.CARTESIAN_POINT, DataTypes.CARTESIAN_SHAPE)) { + for (DataType dataType : List.of(DataType.GEO_POINT, DataType.GEO_SHAPE, DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE)) { TestCaseSupplier.TypedDataSupplier leftDataSupplier = SpatialRelatesFunctionTestCase.testCaseSupplier(dataType); TestCaseSupplier.TypedDataSupplier rightDataSupplier = SpatialRelatesFunctionTestCase.testCaseSupplier(dataType); suppliers.add( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java index b593ad4d3aed4..299b66433dcd0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -36,11 +35,11 @@ public IsNotNullTests(@Name("TestCase") Supplier test @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - for (DataType type : DataTypes.types()) { + for (DataType type : DataType.types()) { if (false == EsqlDataTypes.isRepresentable(type)) { continue; } - if (type != DataTypes.NULL) { + if (type != DataType.NULL) { suppliers.add( new TestCaseSupplier( "non-null " + type.typeName(), @@ -48,7 +47,7 @@ public static Iterable parameters() { () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(randomLiteral(type).value(), type, "v")), "IsNotNullEvaluator[field=Attribute[channel=0]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(true) ) ) @@ -61,7 +60,7 @@ public static Iterable parameters() { () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(null, type, "v")), "IsNotNullEvaluator[field=Attribute[channel=0]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(false) ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java index adeca03406ead..606e9598bda63 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -36,11 +35,11 @@ public IsNullTests(@Name("TestCase") Supplier testCas @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - for (DataType type : DataTypes.types()) { + for (DataType type : DataType.types()) { if (false == EsqlDataTypes.isRepresentable(type)) { continue; } - if (type != DataTypes.NULL) { + if (type != DataType.NULL) { suppliers.add( new TestCaseSupplier( "non-null " + type.typeName(), @@ -48,7 +47,7 @@ public static Iterable parameters() { () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(randomLiteral(type).value(), type, "v")), "IsNullEvaluator[field=Attribute[channel=0]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(false) ) ) @@ -61,7 +60,7 @@ public static Iterable parameters() { () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(null, type, "v")), "IsNullEvaluator[field=Attribute[channel=0]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(true) ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java index b29a952f4c792..d65ad5a2b961c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -30,9 +29,9 @@ public SpatialContainsTests(@Name("TestCase") Supplier parameters() { List suppliers = new ArrayList<>(); - DataType[] geoDataTypes = { DataTypes.GEO_POINT, DataTypes.GEO_SHAPE }; + DataType[] geoDataTypes = { DataType.GEO_POINT, DataType.GEO_SHAPE }; SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); - DataType[] cartesianDataTypes = { DataTypes.CARTESIAN_POINT, DataTypes.CARTESIAN_SHAPE }; + DataType[] cartesianDataTypes = { DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE }; SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); return parameterSuppliersFromTypedData( errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialContainsTests::typeErrorMessage) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java index 999fc695fd8fe..b3feac5619c16 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -30,9 +29,9 @@ public SpatialDisjointTests(@Name("TestCase") Supplier parameters() { List suppliers = new ArrayList<>(); - DataType[] geoDataTypes = { DataTypes.GEO_POINT, DataTypes.GEO_SHAPE }; + DataType[] geoDataTypes = { DataType.GEO_POINT, DataType.GEO_SHAPE }; SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); - DataType[] cartesianDataTypes = { DataTypes.CARTESIAN_POINT, DataTypes.CARTESIAN_SHAPE }; + DataType[] cartesianDataTypes = { DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE }; SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); return parameterSuppliersFromTypedData( errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialDisjointTests::typeErrorMessage) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java index 19d43f42a9555..ccf94bf6d2760 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -30,9 +29,9 @@ public SpatialIntersectsTests(@Name("TestCase") Supplier parameters() { List suppliers = new ArrayList<>(); - DataType[] geoDataTypes = { DataTypes.GEO_POINT, DataTypes.GEO_SHAPE }; + DataType[] geoDataTypes = { DataType.GEO_POINT, DataType.GEO_SHAPE }; SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); - DataType[] cartesianDataTypes = { DataTypes.CARTESIAN_POINT, DataTypes.CARTESIAN_SHAPE }; + DataType[] cartesianDataTypes = { DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE }; SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); return parameterSuppliersFromTypedData( errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialIntersectsTests::typeErrorMessage) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java index c689adfe50b29..9929971c48613 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -65,7 +64,7 @@ protected static void addSpatialCombinations(List suppliers, D leftDataSupplier, rightDataSupplier, SpatialRelatesFunctionTestCase::spatialEvaluatorString, - DataTypes.BOOLEAN, + DataType.BOOLEAN, (l, r) -> expected(l, leftType, r, rightType) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java index 3dde6a1f83a3f..5f48cfcd6d701 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -30,9 +29,9 @@ public SpatialWithinTests(@Name("TestCase") Supplier @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - DataType[] geoDataTypes = { DataTypes.GEO_POINT, DataTypes.GEO_SHAPE }; + DataType[] geoDataTypes = { DataType.GEO_POINT, DataType.GEO_SHAPE }; SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); - DataType[] cartesianDataTypes = { DataTypes.CARTESIAN_POINT, DataTypes.CARTESIAN_SHAPE }; + DataType[] cartesianDataTypes = { DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE }; SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); return parameterSuppliersFromTypedData( errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialWithinTests::typeErrorMessage) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java index 012ee982d3da6..b466ffe1e92f1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.UNSPECIFIED; @FunctionName("st_x") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java index 776a1a461b10f..1f3639bf1ecb4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.UNSPECIFIED; @FunctionName("st_y") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java index 9c8856280aa87..f46ae25fddfc7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -50,18 +49,18 @@ public static Iterable parameters() { for (int length = 4; length < 100; length++) { suppliers(suppliers, length); } - Set supported = Set.of(DataTypes.NULL, DataTypes.KEYWORD, DataTypes.TEXT); + Set supported = Set.of(DataType.NULL, DataType.KEYWORD, DataType.TEXT); List> supportedPerPosition = List.of(supported, supported); - for (DataType lhs : DataTypes.types()) { - if (lhs == DataTypes.NULL || EsqlDataTypes.isRepresentable(lhs) == false) { + for (DataType lhs : DataType.types()) { + if (lhs == DataType.NULL || EsqlDataTypes.isRepresentable(lhs) == false) { continue; } - for (DataType rhs : DataTypes.types()) { - if (rhs == DataTypes.NULL || EsqlDataTypes.isRepresentable(rhs) == false) { + for (DataType rhs : DataType.types()) { + if (rhs == DataType.NULL || EsqlDataTypes.isRepresentable(rhs) == false) { continue; } - boolean lhsIsString = lhs == DataTypes.KEYWORD || lhs == DataTypes.TEXT; - boolean rhsIsString = rhs == DataTypes.KEYWORD || rhs == DataTypes.TEXT; + boolean lhsIsString = lhs == DataType.KEYWORD || lhs == DataType.TEXT; + boolean rhsIsString = rhs == DataType.KEYWORD || rhs == DataType.TEXT; if (lhsIsString && rhsIsString) { continue; } @@ -74,8 +73,8 @@ public static Iterable parameters() { private static void suppliers(List suppliers, int length) { if (length > 3) { - suppliers.add(supplier("ascii", DataTypes.KEYWORD, length, () -> randomAlphaOfLengthBetween(1, 10))); - suppliers.add(supplier("unicode", DataTypes.TEXT, length, () -> randomRealisticUnicodeOfLengthBetween(1, 10))); + suppliers.add(supplier("ascii", DataType.KEYWORD, length, () -> randomAlphaOfLengthBetween(1, 10))); + suppliers.add(supplier("unicode", DataType.TEXT, length, () -> randomRealisticUnicodeOfLengthBetween(1, 10))); } else { add(suppliers, "ascii", length, () -> randomAlphaOfLengthBetween(1, 10)); add(suppliers, "unicode", length, () -> randomRealisticUnicodeOfLengthBetween(1, 10)); @@ -97,14 +96,14 @@ private static TestCaseSupplier supplier(String name, DataType type, int length, expectedToString += "Attribute[channel=" + v + "]"; } expectedToString += "]]"; - return new TestCaseSupplier.TestCase(values, expectedToString, DataTypes.KEYWORD, equalTo(new BytesRef(expectedValue))); + return new TestCaseSupplier.TestCase(values, expectedToString, DataType.KEYWORD, equalTo(new BytesRef(expectedValue))); }); } private static void add(List suppliers, String name, int length, Supplier valueSupplier) { Map>> permutations = new HashMap>>(); - List supportedDataTypes = List.of(DataTypes.KEYWORD, DataTypes.TEXT); - permutations.put(0, List.of(List.of(DataTypes.KEYWORD), List.of(DataTypes.TEXT))); + List supportedDataTypes = List.of(DataType.KEYWORD, DataType.TEXT); + permutations.put(0, List.of(List.of(DataType.KEYWORD), List.of(DataType.TEXT))); for (int v = 0; v < length - 1; v++) { List> current = permutations.get(v); List> next = new ArrayList<>(); @@ -133,7 +132,7 @@ private static void add(List suppliers, String name, int lengt expectedToString += "Attribute[channel=" + v + "]"; } expectedToString += "]]"; - return new TestCaseSupplier.TestCase(values, expectedToString, DataTypes.KEYWORD, equalTo(new BytesRef(expectedValue))); + return new TestCaseSupplier.TestCase(values, expectedToString, DataType.KEYWORD, equalTo(new BytesRef(expectedValue))); })); } @@ -189,7 +188,7 @@ public void testSomeConstant() { private void testOversized(int totalLen, List mix, List fieldValues) { for (int len; totalLen < Concat.MAX_CONCAT_LENGTH; totalLen += len) { len = randomIntBetween(1, (int) Concat.MAX_CONCAT_LENGTH); - mix.add(new Literal(Source.EMPTY, new BytesRef(randomAlphaOfLength(len)), DataTypes.KEYWORD)); + mix.add(new Literal(Source.EMPTY, new BytesRef(randomAlphaOfLength(len)), DataType.KEYWORD)); } Expression expression = build(testCase.getSource(), mix); Exception e = expectThrows(EsqlClientException.class, () -> { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java index 6443d739e64c5..863243a352bb0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -37,11 +37,11 @@ public static Iterable parameters() { String suffix = ""; return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(new BytesRef(suffix), DataTypes.KEYWORD, "suffix") + new TestCaseSupplier.TypedData(new BytesRef(str), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(new BytesRef(suffix), DataType.KEYWORD, "suffix") ), "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(str.endsWith(suffix)) ); })); @@ -50,11 +50,11 @@ public static Iterable parameters() { String suffix = randomAlphaOfLength(5); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(new BytesRef(suffix), DataTypes.KEYWORD, "suffix") + new TestCaseSupplier.TypedData(new BytesRef(str), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(new BytesRef(suffix), DataType.KEYWORD, "suffix") ), "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(str.endsWith(suffix)) ); })); @@ -65,11 +65,11 @@ public static Iterable parameters() { return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(new BytesRef(suffix), DataTypes.KEYWORD, "suffix") + new TestCaseSupplier.TypedData(new BytesRef(str), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(new BytesRef(suffix), DataType.KEYWORD, "suffix") ), "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(str.endsWith(suffix)) ); })); @@ -80,11 +80,11 @@ public static Iterable parameters() { return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(new BytesRef(suffix), DataTypes.KEYWORD, "suffix") + new TestCaseSupplier.TypedData(new BytesRef(str), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(new BytesRef(suffix), DataType.KEYWORD, "suffix") ), "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(str.endsWith(suffix)) ); })); @@ -95,11 +95,11 @@ public static Iterable parameters() { return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(new BytesRef(suffix), DataTypes.KEYWORD, "suffix") + new TestCaseSupplier.TypedData(new BytesRef(str), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(new BytesRef(suffix), DataType.KEYWORD, "suffix") ), "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(str.endsWith(suffix)) ); })); @@ -110,11 +110,11 @@ public static Iterable parameters() { return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.TEXT, "str"), - new TestCaseSupplier.TypedData(new BytesRef(suffix), DataTypes.TEXT, "suffix") + new TestCaseSupplier.TypedData(new BytesRef(str), DataType.TEXT, "str"), + new TestCaseSupplier.TypedData(new BytesRef(suffix), DataType.TEXT, "suffix") ), "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(str.endsWith(suffix)) ); })); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java index 291b0c5d6ce3c..e6a5d30d0fa53 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -37,132 +37,132 @@ public LeftTests(@Name("TestCase") Supplier testCaseS public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.add(new TestCaseSupplier("empty string", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("empty string", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { int length = between(-64, 64); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(""), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(""), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("")) ); })); - suppliers.add(new TestCaseSupplier("ascii", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("ascii", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomAlphaOfLengthBetween(1, 64); int length = between(1, text.length()); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(unicodeLeftSubstring(text, length))) ); })); - suppliers.add(new TestCaseSupplier("ascii longer than string", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("ascii longer than string", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomAlphaOfLengthBetween(1, 64); int length = between(text.length(), 128); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(text)) ); })); - suppliers.add(new TestCaseSupplier("ascii zero length", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("ascii zero length", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomAlphaOfLengthBetween(1, 64); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(0, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(0, DataType.INTEGER, "length") ), "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("")) ); })); - suppliers.add(new TestCaseSupplier("ascii negative length", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("ascii negative length", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomAlphaOfLengthBetween(1, 64); int length = between(-128, -1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("")) ); })); - suppliers.add(new TestCaseSupplier("unicode", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("unicode", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomUnicodeOfLengthBetween(1, 64); int length = between(1, text.length()); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(unicodeLeftSubstring(text, length))) ); })); - suppliers.add(new TestCaseSupplier("unicode longer than string", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("unicode longer than string", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomUnicodeOfLengthBetween(1, 64); int length = between(text.length(), 128); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(text)) ); })); - suppliers.add(new TestCaseSupplier("unicode zero length", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("unicode zero length", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomUnicodeOfLengthBetween(1, 64); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(0, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(0, DataType.INTEGER, "length") ), "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("")) ); })); - suppliers.add(new TestCaseSupplier("unicode negative length", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("unicode negative length", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomUnicodeOfLengthBetween(1, 64); int length = between(-128, -1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("")) ); })); - suppliers.add(new TestCaseSupplier("ascii as text input", List.of(DataTypes.TEXT, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("ascii as text input", List.of(DataType.TEXT, DataType.INTEGER), () -> { String text = randomAlphaOfLengthBetween(1, 64); int length = between(1, text.length()); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.TEXT, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.TEXT, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "LeftEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(unicodeLeftSubstring(text, length))) ); })); @@ -201,7 +201,7 @@ public void testUnicode() { private String process(String str, int length) { try ( EvalOperator.ExpressionEvaluator eval = evaluator( - new Left(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) + new Left(Source.EMPTY, field("str", DataType.KEYWORD), new Literal(Source.EMPTY, length, DataType.INTEGER)) ).get(driverContext()); Block block = eval.eval(row(List.of(new BytesRef(str)))) ) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java index 98ff38b48a7cf..81fcc118ade05 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -33,12 +33,12 @@ public LengthTests(@Name("TestCase") Supplier testCas @ParametersFactory public static Iterable parameters() { List cases = new ArrayList<>(); - cases.addAll(List.of(new TestCaseSupplier("length basic test", List.of(DataTypes.KEYWORD), () -> { + cases.addAll(List.of(new TestCaseSupplier("length basic test", List.of(DataType.KEYWORD), () -> { BytesRef value = new BytesRef(randomAlphaOfLength(between(0, 10000))); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(value, DataTypes.KEYWORD, "f")), + List.of(new TestCaseSupplier.TypedData(value, DataType.KEYWORD, "f")), "LengthEvaluator[val=Attribute[channel=0]]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(UnicodeUtil.codePointCount(value)) ); }))); @@ -56,21 +56,21 @@ private static List makeTestCases(String title, Supplier new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(new BytesRef(text.get()), DataTypes.KEYWORD, "f")), + List.of(new TestCaseSupplier.TypedData(new BytesRef(text.get()), DataType.KEYWORD, "f")), "LengthEvaluator[val=Attribute[channel=0]]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(expectedLength) ) ), new TestCaseSupplier( title + " with text", - List.of(DataTypes.TEXT), + List.of(DataType.TEXT), () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(new BytesRef(text.get()), DataTypes.TEXT, "f")), + List.of(new TestCaseSupplier.TypedData(new BytesRef(text.get()), DataType.TEXT, "f")), "LengthEvaluator[val=Attribute[channel=0]]", - DataTypes.INTEGER, + DataType.INTEGER, equalTo(expectedLength) ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java index fff74b8af7a15..011252a3f7e14 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -36,7 +35,7 @@ public LocateTests(@Name("TestCase") Supplier testCas this.testCase = testCaseSupplier.get(); } - private static final DataType[] STRING_TYPES = new DataType[] { DataTypes.KEYWORD, DataTypes.TEXT }; + private static final DataType[] STRING_TYPES = new DataType[] { DataType.KEYWORD, DataType.TEXT }; @ParametersFactory public static Iterable parameters() { @@ -147,8 +146,8 @@ private static TestCaseSupplier supplier(String str, String substr, @Nullable In return new TestCaseSupplier( name, - types(DataTypes.KEYWORD, DataTypes.KEYWORD, start != null), - () -> testCase(DataTypes.KEYWORD, DataTypes.KEYWORD, str, substr, start, expectedValue) + types(DataType.KEYWORD, DataType.KEYWORD, start != null), + () -> testCase(DataType.KEYWORD, DataType.KEYWORD, str, substr, start, expectedValue) ); } @@ -186,7 +185,7 @@ private static List types(DataType firstType, DataType secondType, boo types.add(firstType); types.add(secondType); if (hasStart) { - types.add(DataTypes.INTEGER); + types.add(DataType.INTEGER); } return types; } @@ -203,8 +202,8 @@ private static TestCaseSupplier.TestCase testCase( values.add(new TestCaseSupplier.TypedData(str == null ? null : new BytesRef(str), strType, "str")); values.add(new TestCaseSupplier.TypedData(substr == null ? null : new BytesRef(substr), substrType, "substr")); if (start != null) { - values.add(new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start")); + values.add(new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start")); } - return new TestCaseSupplier.TestCase(values, expectedToString(start != null), DataTypes.INTEGER, equalTo(expectedValue)); + return new TestCaseSupplier.TestCase(values, expectedToString(start != null), DataType.INTEGER, equalTo(expectedValue)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java index 57883e0b84067..e1bcc519840be 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -51,15 +50,15 @@ static Iterable parameters(Function escapeString, Supp cases.add( new TestCaseSupplier( "null", - List.of(DataTypes.NULL, DataTypes.KEYWORD, DataTypes.BOOLEAN), + List.of(DataType.NULL, DataType.KEYWORD, DataType.BOOLEAN), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(null, DataTypes.NULL, "e"), - new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), DataTypes.KEYWORD, "pattern").forceLiteral(), - new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "caseInsensitive").forceLiteral() + new TestCaseSupplier.TypedData(null, DataType.NULL, "e"), + new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), DataType.KEYWORD, "pattern").forceLiteral(), + new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "caseInsensitive").forceLiteral() ), "LiteralsEvaluator[lit=null]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, nullValue() ) ) @@ -70,8 +69,8 @@ static Iterable parameters(Function escapeString, Supp casesForString(cases, "3 bytes, 1 code point", () -> "☕", false, escapeString, optionalPattern); casesForString(cases, "6 bytes, 2 code points", () -> "❗️", false, escapeString, optionalPattern); casesForString(cases, "100 random code points", () -> randomUnicodeOfCodepointLength(100), true, escapeString, optionalPattern); - for (DataType type : DataTypes.types()) { - if (type == DataTypes.KEYWORD || type == DataTypes.TEXT || type == DataTypes.NULL) { + for (DataType type : DataType.types()) { + if (type == DataType.KEYWORD || type == DataType.TEXT || type == DataType.NULL) { continue; } if (EsqlDataTypes.isRepresentable(type) == false) { @@ -79,13 +78,13 @@ static Iterable parameters(Function escapeString, Supp } cases.add( new TestCaseSupplier( - List.of(type, DataTypes.KEYWORD, DataTypes.BOOLEAN), + List.of(type, DataType.KEYWORD, DataType.BOOLEAN), () -> TestCaseSupplier.TestCase.typeError( List.of( new TestCaseSupplier.TypedData(randomLiteral(type).value(), type, "e"), - new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), DataTypes.KEYWORD, "pattern") + new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), DataType.KEYWORD, "pattern") .forceLiteral(), - new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "caseInsensitive").forceLiteral() + new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "caseInsensitive").forceLiteral() ), "argument of [] must be [string], found value [e] type [" + type.typeName() + "]" ) @@ -127,17 +126,17 @@ private static void casesForString( } private static void cases(List cases, String title, Supplier textAndPattern, boolean expected) { - for (DataType type : new DataType[] { DataTypes.KEYWORD, DataTypes.TEXT }) { - cases.add(new TestCaseSupplier(title + " with " + type.esType(), List.of(type, type, DataTypes.BOOLEAN), () -> { + for (DataType type : new DataType[] { DataType.KEYWORD, DataType.TEXT }) { + cases.add(new TestCaseSupplier(title + " with " + type.esType(), List.of(type, type, DataType.BOOLEAN), () -> { TextAndPattern v = textAndPattern.get(); return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(new BytesRef(v.text), type, "e"), new TestCaseSupplier.TypedData(new BytesRef(v.pattern), type, "pattern").forceLiteral(), - new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "caseInsensitive").forceLiteral() + new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "caseInsensitive").forceLiteral() ), startsWith("AutomataMatchEvaluator[input=Attribute[channel=0], pattern=digraph Automaton {\n"), - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(expected) ); })); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java new file mode 100644 index 0000000000000..dc266066bd424 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.TestBlockFactory; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.junit.After; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +/** + * These tests create rows that are 1MB in size. Test classes + * which extend AbstractFunctionTestCase rerun test cases with + * many randomized inputs. Unfortunately, tests are run with + * limited memory, and instantiating many copies of these + * tests with large rows causes out of memory. + */ +public class RepeatStaticTests extends ESTestCase { + + public void testAlmostTooBig() { + String str = randomAlphaOfLength(1); + int number = (int) Repeat.MAX_REPEATED_LENGTH; + String repeated = process(str, number); + assertThat(repeated, equalTo(str.repeat(number))); + } + + public void testTooBig() { + String str = randomAlphaOfLength(1); + int number = (int) Repeat.MAX_REPEATED_LENGTH + 1; + String repeated = process(str, number); + assertNull(repeated); + assertWarnings( + "Line -1:-1: java.lang.IllegalArgumentException: Creating repeated strings with more than [1048576] bytes is not supported", + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded." + ); + } + + public String process(String str, int number) { + try ( + var eval = AbstractFunctionTestCase.evaluator( + new Repeat(Source.EMPTY, field("string", DataType.KEYWORD), field("number", DataType.INTEGER)) + ).get(driverContext()); + Block block = eval.eval(row(List.of(new BytesRef(str), number))); + ) { + return block.isNull(0) ? null : ((BytesRef) BlockUtils.toJavaObject(block, 0)).utf8ToString(); + } + } + + /** + * The following fields and methods were borrowed from AbstractFunctionTestCase + */ + private final List breakers = Collections.synchronizedList(new ArrayList<>()); + + private static Page row(List values) { + return new Page(1, BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), values)); + } + + private static FieldAttribute field(String name, DataType type) { + return new FieldAttribute(Source.synthetic(name), name, new EsField(name, type, Map.of(), true)); + } + + private DriverContext driverContext() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(256)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); + } + + @After + public void allMemoryReleased() { + for (CircuitBreaker breaker : breakers) { + assertThat(breaker.getUsed(), equalTo(0L)); + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java new file mode 100644 index 0000000000000..cb89dc168b928 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class RepeatTests extends AbstractFunctionTestCase { + public RepeatTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + + List cases = new ArrayList<>(); + + cases.add(new TestCaseSupplier("Repeat basic test", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { + String text = randomAlphaOfLength(10); + int number = between(0, 10); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(number, DataType.INTEGER, "number") + ), + "RepeatEvaluator[str=Attribute[channel=0], number=Attribute[channel=1]]", + DataType.KEYWORD, + equalTo(new BytesRef(text.repeat(number))) + ); + })); + + cases.add(new TestCaseSupplier("Repeat basic test with text input", List.of(DataType.TEXT, DataType.INTEGER), () -> { + String text = randomAlphaOfLength(10); + int number = between(0, 10); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.TEXT, "str"), + new TestCaseSupplier.TypedData(number, DataType.INTEGER, "number") + ), + "RepeatEvaluator[str=Attribute[channel=0], number=Attribute[channel=1]]", + DataType.KEYWORD, + equalTo(new BytesRef(text.repeat(number))) + ); + })); + + cases.add(new TestCaseSupplier("Repeat with number zero", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { + String text = randomAlphaOfLength(10); + int number = 0; + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(number, DataType.INTEGER, "number") + ), + "RepeatEvaluator[str=Attribute[channel=0], number=Attribute[channel=1]]", + DataType.KEYWORD, + equalTo(new BytesRef("")) + ); + })); + + cases.add(new TestCaseSupplier("Repeat Unicode", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { + String text = randomUnicodeOfLength(10); + int number = randomIntBetween(0, 10); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(number, DataType.INTEGER, "number") + ), + "RepeatEvaluator[str=Attribute[channel=0], number=Attribute[channel=1]]", + DataType.KEYWORD, + equalTo(new BytesRef(text.repeat(number))) + ); + })); + + cases.add(new TestCaseSupplier("Repeat Negative Number", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { + String text = randomAlphaOfLength(10); + int number = randomIntBetween(-10, -1); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(number, DataType.INTEGER, "number") + ), + "RepeatEvaluator[str=Attribute[channel=0], number=Attribute[channel=1]]", + DataType.KEYWORD, + nullValue() + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.IllegalArgumentException: Number parameter cannot be negative, found [" + number + "]") + .withFoldingException(IllegalArgumentException.class, "Number parameter cannot be negative, found [" + number + "]"); + })); + + cases = anyNullIsNull(true, cases); + cases = errorsForCasesWithoutExamples(cases); + return parameterSuppliersFromTypedData(cases); + } + + @Override + protected Expression build(Source source, List args) { + return new Repeat(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java index f8cee3c2dd42f..bfadf66f7f5cc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -33,16 +32,16 @@ public ReplaceTests(@Name("TestCase") Supplier testCa @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - for (DataType strType : DataTypes.types()) { - if (DataTypes.isString(strType) == false) { + for (DataType strType : DataType.types()) { + if (DataType.isString(strType) == false) { continue; } - for (DataType oldStrType : DataTypes.types()) { - if (DataTypes.isString(oldStrType) == false) { + for (DataType oldStrType : DataType.types()) { + if (DataType.isString(oldStrType) == false) { continue; } - for (DataType newStrType : DataTypes.types()) { - if (DataTypes.isString(newStrType) == false) { + for (DataType newStrType : DataType.types()) { + if (DataType.isString(newStrType) == false) { continue; } suppliers.add(new TestCaseSupplier(List.of(strType, oldStrType, newStrType), () -> { @@ -79,18 +78,18 @@ public static Iterable parameters() { ) ); - suppliers.add(new TestCaseSupplier("syntax error", List.of(DataTypes.KEYWORD, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { + suppliers.add(new TestCaseSupplier("syntax error", List.of(DataType.KEYWORD, DataType.KEYWORD, DataType.KEYWORD), () -> { String text = randomAlphaOfLength(10); String invalidRegex = "["; String newStr = randomAlphaOfLength(5); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(new BytesRef(invalidRegex), DataTypes.KEYWORD, "oldStr"), - new TestCaseSupplier.TypedData(new BytesRef(newStr), DataTypes.KEYWORD, "newStr") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(new BytesRef(invalidRegex), DataType.KEYWORD, "oldStr"), + new TestCaseSupplier.TypedData(new BytesRef(newStr), DataType.KEYWORD, "newStr") ), "ReplaceEvaluator[str=Attribute[channel=0], regex=Attribute[channel=1], newStr=Attribute[channel=2]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(null) ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning( @@ -110,8 +109,8 @@ public static Iterable parameters() { private static TestCaseSupplier fixedCase(String name, String str, String oldStr, String newStr, String result) { return new TestCaseSupplier( name, - List.of(DataTypes.KEYWORD, DataTypes.KEYWORD, DataTypes.KEYWORD), - () -> testCase(DataTypes.KEYWORD, DataTypes.KEYWORD, DataTypes.KEYWORD, str, oldStr, newStr, result) + List.of(DataType.KEYWORD, DataType.KEYWORD, DataType.KEYWORD), + () -> testCase(DataType.KEYWORD, DataType.KEYWORD, DataType.KEYWORD, str, oldStr, newStr, result) ); } @@ -131,7 +130,7 @@ private static TestCaseSupplier.TestCase testCase( new TestCaseSupplier.TypedData(new BytesRef(newStr), newStrType, "newStr") ), "ReplaceEvaluator[str=Attribute[channel=0], regex=Attribute[channel=1], newStr=Attribute[channel=2]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(result)) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java index a925410197aee..599ab51995217 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -37,132 +37,132 @@ public RightTests(@Name("TestCase") Supplier testCase public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.add(new TestCaseSupplier("empty string", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("empty string", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { int length = between(-64, 64); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(""), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(""), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("")) ); })); - suppliers.add(new TestCaseSupplier("ascii", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("ascii", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomAlphaOfLengthBetween(1, 64); int length = between(1, text.length()); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(unicodeRightSubstring(text, length))) ); })); - suppliers.add(new TestCaseSupplier("ascii longer than string", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("ascii longer than string", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomAlphaOfLengthBetween(1, 64); int length = between(text.length(), 128); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(text)) ); })); - suppliers.add(new TestCaseSupplier("ascii zero length", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("ascii zero length", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomAlphaOfLengthBetween(1, 64); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(0, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(0, DataType.INTEGER, "length") ), "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("")) ); })); - suppliers.add(new TestCaseSupplier("ascii negative length", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("ascii negative length", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomAlphaOfLengthBetween(1, 64); int length = between(-128, -1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("")) ); })); - suppliers.add(new TestCaseSupplier("unicode", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("unicode", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomUnicodeOfLengthBetween(1, 64); int length = between(1, text.length()); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(unicodeRightSubstring(text, length))) ); })); - suppliers.add(new TestCaseSupplier("unicode longer than string", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("unicode longer than string", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomUnicodeOfLengthBetween(1, 64); int length = between(text.length(), 128); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(text)) ); })); - suppliers.add(new TestCaseSupplier("unicode zero length", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("unicode zero length", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomUnicodeOfLengthBetween(1, 64); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(0, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(0, DataType.INTEGER, "length") ), "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("")) ); })); - suppliers.add(new TestCaseSupplier("unicode negative length", List.of(DataTypes.KEYWORD, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("unicode negative length", List.of(DataType.KEYWORD, DataType.INTEGER), () -> { String text = randomUnicodeOfLengthBetween(1, 64); int length = between(-128, -1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef("")) ); })); - suppliers.add(new TestCaseSupplier("ascii as text", List.of(DataTypes.TEXT, DataTypes.INTEGER), () -> { + suppliers.add(new TestCaseSupplier("ascii as text", List.of(DataType.TEXT, DataType.INTEGER), () -> { String text = randomAlphaOfLengthBetween(1, 64); int length = between(1, text.length()); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.TEXT, "str"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "length") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.TEXT, "str"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "length") ), "RightEvaluator[str=Attribute[channel=0], length=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(unicodeRightSubstring(text, length))) ); })); @@ -202,7 +202,7 @@ public void testUnicode() { private String process(String str, int length) { try ( EvalOperator.ExpressionEvaluator eval = evaluator( - new Right(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, length, DataTypes.INTEGER)) + new Right(Source.EMPTY, field("str", DataType.KEYWORD), new Literal(Source.EMPTY, length, DataType.INTEGER)) ).get(driverContext()); Block block = eval.eval(row(List.of(new BytesRef(str)))) ) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java index d809c8ebc9f53..47e48df90007e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -43,7 +42,7 @@ public SplitTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - List supportedDataTyes = List.of(DataTypes.KEYWORD, DataTypes.TEXT); + List supportedDataTyes = List.of(DataType.KEYWORD, DataType.TEXT); for (DataType sType : supportedDataTyes) { for (DataType dType : supportedDataTyes) { suppliers.add(new TestCaseSupplier("split test " + sType.toString() + " " + dType.toString(), List.of(sType, dType), () -> { @@ -59,7 +58,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(new BytesRef(delimiter), dType, "delim") ), "SplitVariableEvaluator[str=Attribute[channel=0], delim=Attribute[channel=1]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(strings.size() == 1 ? strings.get(0) : strings) ); })); @@ -77,7 +76,7 @@ public void testConstantDelimiter() { DriverContext driverContext = driverContext(); try ( EvalOperator.ExpressionEvaluator eval = evaluator( - new Split(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, new BytesRef(":"), DataTypes.KEYWORD)) + new Split(Source.EMPTY, field("str", DataType.KEYWORD), new Literal(Source.EMPTY, new BytesRef(":"), DataType.KEYWORD)) ).get(driverContext) ) { /* @@ -105,8 +104,8 @@ public void testTooLongConstantDelimiter() { () -> evaluator( new Split( Source.EMPTY, - field("str", DataTypes.KEYWORD), - new Literal(Source.EMPTY, new BytesRef(delimiter), DataTypes.KEYWORD) + field("str", DataType.KEYWORD), + new Literal(Source.EMPTY, new BytesRef(delimiter), DataType.KEYWORD) ) ).get(driverContext) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java index 8b98cbd0f8ca7..f0c51a9b22e55 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -38,11 +38,11 @@ public static Iterable parameters() { } return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(new BytesRef(prefix), DataTypes.KEYWORD, "prefix") + new TestCaseSupplier.TypedData(new BytesRef(str), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(new BytesRef(prefix), DataType.KEYWORD, "prefix") ), "StartsWithEvaluator[str=Attribute[channel=0], prefix=Attribute[channel=1]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(str.startsWith(prefix)) ); }), new TestCaseSupplier("Starts with basic test with text args", () -> { @@ -53,11 +53,11 @@ public static Iterable parameters() { } return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(str), DataTypes.TEXT, "str"), - new TestCaseSupplier.TypedData(new BytesRef(prefix), DataTypes.TEXT, "prefix") + new TestCaseSupplier.TypedData(new BytesRef(str), DataType.TEXT, "str"), + new TestCaseSupplier.TypedData(new BytesRef(prefix), DataType.TEXT, "prefix") ), "StartsWithEvaluator[str=Attribute[channel=0], prefix=Attribute[channel=1]]", - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(str.startsWith(prefix)) ); }))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index c2df6f61c1d25..c1a49455d9d83 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -41,62 +41,54 @@ public static Iterable parameters() { anyNullIsNull( true, List.of( - new TestCaseSupplier( - "Substring basic test", - List.of(DataTypes.KEYWORD, DataTypes.INTEGER, DataTypes.INTEGER), - () -> { - int start = between(1, 8); - int length = between(1, 10 - start); - String text = randomAlphaOfLength(10); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.KEYWORD, "str"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "end") - ), - "SubstringEvaluator[str=Attribute[channel=0], start=Attribute[channel=1], length=Attribute[channel=2]]", - DataTypes.KEYWORD, - equalTo(new BytesRef(text.substring(start - 1, start + length - 1))) - ); - } - ), + new TestCaseSupplier("Substring basic test", List.of(DataType.KEYWORD, DataType.INTEGER, DataType.INTEGER), () -> { + int start = between(1, 8); + int length = between(1, 10 - start); + String text = randomAlphaOfLength(10); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "end") + ), + "SubstringEvaluator[str=Attribute[channel=0], start=Attribute[channel=1], length=Attribute[channel=2]]", + DataType.KEYWORD, + equalTo(new BytesRef(text.substring(start - 1, start + length - 1))) + ); + }), new TestCaseSupplier( "Substring basic test with text input", - List.of(DataTypes.TEXT, DataTypes.INTEGER, DataTypes.INTEGER), + List.of(DataType.TEXT, DataType.INTEGER, DataType.INTEGER), () -> { int start = between(1, 8); int length = between(1, 10 - start); String text = randomAlphaOfLength(10); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef(text), DataTypes.TEXT, "str"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "end") + new TestCaseSupplier.TypedData(new BytesRef(text), DataType.TEXT, "str"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "end") ), "SubstringEvaluator[str=Attribute[channel=0], start=Attribute[channel=1], length=Attribute[channel=2]]", - DataTypes.KEYWORD, + DataType.KEYWORD, equalTo(new BytesRef(text.substring(start - 1, start + length - 1))) ); } ), - new TestCaseSupplier( - "Substring empty string", - List.of(DataTypes.TEXT, DataTypes.INTEGER, DataTypes.INTEGER), - () -> { - int start = between(1, 8); - int length = between(1, 10 - start); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef(""), DataTypes.TEXT, "str"), - new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start"), - new TestCaseSupplier.TypedData(length, DataTypes.INTEGER, "end") - ), - "SubstringEvaluator[str=Attribute[channel=0], start=Attribute[channel=1], length=Attribute[channel=2]]", - DataTypes.KEYWORD, - equalTo(new BytesRef("")) - ); - } - ) + new TestCaseSupplier("Substring empty string", List.of(DataType.TEXT, DataType.INTEGER, DataType.INTEGER), () -> { + int start = between(1, 8); + int length = between(1, 10 - start); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(""), DataType.TEXT, "str"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(length, DataType.INTEGER, "end") + ), + "SubstringEvaluator[str=Attribute[channel=0], start=Attribute[channel=1], length=Attribute[channel=2]]", + DataType.KEYWORD, + equalTo(new BytesRef("")) + ); + }) ) ) ) @@ -112,7 +104,7 @@ public Matcher resultsMatcher(List typedData public void testNoLengthToString() { assertThat( - evaluator(new Substring(Source.EMPTY, field("str", DataTypes.KEYWORD), field("start", DataTypes.INTEGER), null)).get( + evaluator(new Substring(Source.EMPTY, field("str", DataType.KEYWORD), field("start", DataType.INTEGER), null)).get( driverContext() ).toString(), equalTo("SubstringNoLengthEvaluator[str=Attribute[channel=0], start=Attribute[channel=1]]") @@ -187,9 +179,9 @@ private String process(String str, int start, Integer length) { EvalOperator.ExpressionEvaluator eval = evaluator( new Substring( Source.EMPTY, - field("str", DataTypes.KEYWORD), - new Literal(Source.EMPTY, start, DataTypes.INTEGER), - length == null ? null : new Literal(Source.EMPTY, length, DataTypes.INTEGER) + field("str", DataType.KEYWORD), + new Literal(Source.EMPTY, start, DataType.INTEGER), + length == null ? null : new Literal(Source.EMPTY, length, DataType.INTEGER) ) ).get(driverContext()); Block block = eval.eval(row(List.of(new BytesRef(str)))) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java index cdc8631988638..99b2b38aa8611 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.DateUtils; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractConfigurationFunctionTestCase; @@ -42,10 +41,10 @@ public ToLowerTests(@Name("TestCase") Supplier testCa public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.add(supplier("keyword ascii", DataTypes.KEYWORD, () -> randomAlphaOfLengthBetween(1, 10))); - suppliers.add(supplier("keyword unicode", DataTypes.KEYWORD, () -> randomUnicodeOfLengthBetween(1, 10))); - suppliers.add(supplier("text ascii", DataTypes.TEXT, () -> randomAlphaOfLengthBetween(1, 10))); - suppliers.add(supplier("text unicode", DataTypes.TEXT, () -> randomUnicodeOfLengthBetween(1, 10))); + suppliers.add(supplier("keyword ascii", DataType.KEYWORD, () -> randomAlphaOfLengthBetween(1, 10))); + suppliers.add(supplier("keyword unicode", DataType.KEYWORD, () -> randomUnicodeOfLengthBetween(1, 10))); + suppliers.add(supplier("text ascii", DataType.TEXT, () -> randomAlphaOfLengthBetween(1, 10))); + suppliers.add(supplier("text unicode", DataType.TEXT, () -> randomUnicodeOfLengthBetween(1, 10))); // add null as parameter return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, suppliers))); @@ -54,7 +53,7 @@ public static Iterable parameters() { public void testRandomLocale() { String testString = randomAlphaOfLength(10); EsqlConfiguration cfg = randomLocaleConfig(); - ToLower func = new ToLower(Source.EMPTY, new Literal(Source.EMPTY, testString, DataTypes.KEYWORD), cfg); + ToLower func = new ToLower(Source.EMPTY, new Literal(Source.EMPTY, testString, DataType.KEYWORD), cfg); assertThat(BytesRefs.toBytesRef(testString.toLowerCase(cfg.locale())), equalTo(func.fold())); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java index b4ed8e9fbe717..7b8e6abcdb3db 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.DateUtils; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractConfigurationFunctionTestCase; @@ -42,10 +41,10 @@ public ToUpperTests(@Name("TestCase") Supplier testCa public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.add(supplier("keyword ascii", DataTypes.KEYWORD, () -> randomAlphaOfLengthBetween(1, 10))); - suppliers.add(supplier("keyword unicode", DataTypes.KEYWORD, () -> randomUnicodeOfLengthBetween(1, 10))); - suppliers.add(supplier("text ascii", DataTypes.TEXT, () -> randomAlphaOfLengthBetween(1, 10))); - suppliers.add(supplier("text unicode", DataTypes.TEXT, () -> randomUnicodeOfLengthBetween(1, 10))); + suppliers.add(supplier("keyword ascii", DataType.KEYWORD, () -> randomAlphaOfLengthBetween(1, 10))); + suppliers.add(supplier("keyword unicode", DataType.KEYWORD, () -> randomUnicodeOfLengthBetween(1, 10))); + suppliers.add(supplier("text ascii", DataType.TEXT, () -> randomAlphaOfLengthBetween(1, 10))); + suppliers.add(supplier("text unicode", DataType.TEXT, () -> randomUnicodeOfLengthBetween(1, 10))); // add null as parameter return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, suppliers))); @@ -54,7 +53,7 @@ public static Iterable parameters() { public void testRandomLocale() { String testString = randomAlphaOfLength(10); EsqlConfiguration cfg = randomLocaleConfig(); - ToUpper func = new ToUpper(Source.EMPTY, new Literal(Source.EMPTY, testString, DataTypes.KEYWORD), cfg); + ToUpper func = new ToUpper(Source.EMPTY, new Literal(Source.EMPTY, testString, DataType.KEYWORD), cfg); assertThat(BytesRefs.toBytesRef(testString.toUpperCase(cfg.locale())), equalTo(func.fold())); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java index 1e98f10d3c7fc..3aee4a92e9570 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -55,7 +54,7 @@ public static Iterable parameters() { } private static void addCases(List suppliers) { - for (DataType type : new DataType[] { DataTypes.KEYWORD, DataTypes.TEXT }) { + for (DataType type : new DataType[] { DataType.KEYWORD, DataType.TEXT }) { suppliers.add(new TestCaseSupplier(" with " + type.esType(), List.of(type, type), () -> { BytesRef str = new BytesRef(randomAlphaOfLength(5)); String patternString = randomAlphaOfLength(2); @@ -67,7 +66,7 @@ private static void addCases(List suppliers) { new TestCaseSupplier.TypedData(pattern, type, "pattern").forceLiteral() ), startsWith("AutomataMatchEvaluator[input=Attribute[channel=0], pattern=digraph Automaton {\n"), - DataTypes.BOOLEAN, + DataType.BOOLEAN, equalTo(match) ); })); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java index b57183e9d56c2..7e803ea2f84a0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.tree.Location; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -26,8 +25,8 @@ import java.util.Locale; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.elasticsearch.xpack.esql.core.type.DataType.isNull; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.commonType; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isNull; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isRepresentable; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -71,7 +70,7 @@ protected Expression build(Source source, List args) { * @return True if the type combination is supported by the respective function. */ protected boolean supportsTypes(DataType lhsType, DataType rhsType) { - if ((lhsType == DataTypes.UNSIGNED_LONG || rhsType == DataTypes.UNSIGNED_LONG) && lhsType != rhsType) { + if ((lhsType == DataType.UNSIGNED_LONG || rhsType == DataType.UNSIGNED_LONG) && lhsType != rhsType) { // UL can only be operated on together with another UL, so skip non-UL&UL combinations return false; } @@ -80,8 +79,8 @@ protected boolean supportsTypes(DataType lhsType, DataType rhsType) { public final void testApplyToAllTypes() { // TODO replace with test cases - for (DataType lhsType : DataTypes.types()) { - for (DataType rhsType : DataTypes.types()) { + for (DataType lhsType : DataType.types()) { + for (DataType rhsType : DataType.types()) { if (supportsTypes(lhsType, rhsType) == false) { continue; } @@ -117,19 +116,19 @@ public final void testApplyToAllTypes() { } public final void testResolveType() { - for (DataType lhsType : DataTypes.types()) { + for (DataType lhsType : DataType.types()) { if (isRepresentable(lhsType) == false) { continue; } Literal lhs = randomLiteral(lhsType); - for (DataType rhsType : DataTypes.types()) { + for (DataType rhsType : DataType.types()) { if (isRepresentable(rhsType) == false) { continue; } Literal rhs = randomLiteral(rhsType); BinaryOperator op = build(new Source(Location.EMPTY, lhsType.typeName() + " " + rhsType.typeName()), lhs, rhs); - if (lhsType == DataTypes.UNSIGNED_LONG || rhsType == DataTypes.UNSIGNED_LONG) { + if (lhsType == DataType.UNSIGNED_LONG || rhsType == DataType.UNSIGNED_LONG) { validateUnsignedLongType(op, lhsType, rhsType); continue; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java index bd85fc3136467..b5bea7d858187 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.junit.After; @@ -41,8 +41,8 @@ public static Iterable parameters() { Expression expression = new Div( Source.synthetic("[1] / (long) 2"), - AbstractFunctionTestCase.field("f", DataTypes.LONG), - new Literal(Source.EMPTY, 2, DataTypes.INTEGER) + AbstractFunctionTestCase.field("f", DataType.LONG), + new Literal(Source.EMPTY, 2, DataType.INTEGER) ); for (int b = 0; b < 136; b++) { params.add(new Object[] { ByteSizeValue.ofBytes(b), expression }); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java index 0fa5b6121a6af..141fc24e73e18 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticTestCase.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.predicate.operator.AbstractBinaryOperatorTestCase; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -32,7 +31,7 @@ protected Matcher resultMatcher(List data, DataType dataType) { return equalTo(expectedValue(lhs.doubleValue(), rhs.doubleValue())); } if (lhs instanceof Long || rhs instanceof Long) { - if (dataType == DataTypes.UNSIGNED_LONG) { + if (dataType == DataType.UNSIGNED_LONG) { return equalTo(expectedUnsignedLongValue(lhs.longValue(), rhs.longValue())); } return equalTo(expectedValue(lhs.longValue(), rhs.longValue())); @@ -47,16 +46,16 @@ protected Matcher resultMatcher(List data, DataType dataType) { protected Matcher resultsMatcher(List typedData) { Number lhs = (Number) typedData.get(0).data(); Number rhs = (Number) typedData.get(1).data(); - if (typedData.stream().anyMatch(t -> t.type().equals(DataTypes.DOUBLE))) { + if (typedData.stream().anyMatch(t -> t.type().equals(DataType.DOUBLE))) { return equalTo(expectedValue(lhs.doubleValue(), rhs.doubleValue())); } - if (typedData.stream().anyMatch(t -> t.type().equals(DataTypes.UNSIGNED_LONG))) { + if (typedData.stream().anyMatch(t -> t.type().equals(DataType.UNSIGNED_LONG))) { return equalTo(expectedUnsignedLongValue(lhs.longValue(), rhs.longValue())); } - if (typedData.stream().anyMatch(t -> t.type().equals(DataTypes.LONG))) { + if (typedData.stream().anyMatch(t -> t.type().equals(DataType.LONG))) { return equalTo(expectedValue(lhs.longValue(), rhs.longValue())); } - if (typedData.stream().anyMatch(t -> t.type().equals(DataTypes.INTEGER))) { + if (typedData.stream().anyMatch(t -> t.type().equals(DataType.INTEGER))) { return equalTo(expectedValue(lhs.intValue(), rhs.intValue())); } throw new UnsupportedOperationException(); @@ -77,7 +76,7 @@ protected boolean supportsType(DataType type) { @Override protected void validateType(BinaryOperator op, DataType lhsType, DataType rhsType) { - if (DataTypes.isNullOrNumeric(lhsType) && DataTypes.isNullOrNumeric(rhsType)) { + if (DataType.isNullOrNumeric(lhsType) && DataType.isNullOrNumeric(rhsType)) { assertTrue(op.toString(), op.typeResolved().resolved()); assertThat(op.toString(), op.dataType(), equalTo(expectedType(lhsType, rhsType))); return; @@ -102,22 +101,22 @@ protected void validateType(BinaryOperator op, DataType lhsType, Dat } protected DataType expectedType(DataType lhsType, DataType rhsType) { - if (lhsType == DataTypes.DOUBLE || rhsType == DataTypes.DOUBLE) { - return DataTypes.DOUBLE; + if (lhsType == DataType.DOUBLE || rhsType == DataType.DOUBLE) { + return DataType.DOUBLE; } - if (lhsType == DataTypes.UNSIGNED_LONG || rhsType == DataTypes.UNSIGNED_LONG) { - assertThat(lhsType, is(DataTypes.UNSIGNED_LONG)); - assertThat(rhsType, is(DataTypes.UNSIGNED_LONG)); - return DataTypes.UNSIGNED_LONG; + if (lhsType == DataType.UNSIGNED_LONG || rhsType == DataType.UNSIGNED_LONG) { + assertThat(lhsType, is(DataType.UNSIGNED_LONG)); + assertThat(rhsType, is(DataType.UNSIGNED_LONG)); + return DataType.UNSIGNED_LONG; } - if (lhsType == DataTypes.LONG || rhsType == DataTypes.LONG) { - return DataTypes.LONG; + if (lhsType == DataType.LONG || rhsType == DataType.LONG) { + return DataType.LONG; } - if (lhsType == DataTypes.INTEGER || rhsType == DataTypes.INTEGER) { - return DataTypes.INTEGER; + if (lhsType == DataType.INTEGER || rhsType == DataType.INTEGER) { + return DataType.INTEGER; } - if (lhsType == DataTypes.NULL || rhsType == DataTypes.NULL) { - return DataTypes.NULL; + if (lhsType == DataType.NULL || rhsType == DataType.NULL) { + return DataType.NULL; } throw new UnsupportedOperationException(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractDateTimeArithmeticTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractDateTimeArithmeticTestCase.java index 5c137c1914d7b..8a27a289bb77f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractDateTimeArithmeticTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractDateTimeArithmeticTestCase.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.hamcrest.Matcher; @@ -19,8 +18,8 @@ import java.util.List; import java.util.Locale; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isDateTime; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.isNull; +import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTime; +import static org.elasticsearch.xpack.esql.core.type.DataType.isNull; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isNullOrTemporalAmount; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isTemporalAmount; import static org.hamcrest.Matchers.equalTo; @@ -108,7 +107,7 @@ private void assertTypeResolution(String failureMessage, BinaryOperator parameters() { "lhs", "rhs", (l, r) -> (((BigInteger) l).add((BigInteger) r)), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), List.of(), @@ -96,7 +95,7 @@ public static Iterable parameters() { suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( (lhs, rhs) -> ((Period) lhs).plus((Period) rhs), - DataTypes.DATE_PERIOD, + DataType.DATE_PERIOD, TestCaseSupplier.datePeriodCases(), TestCaseSupplier.datePeriodCases(), startsWith("LiteralsEvaluator[lit="), // lhs and rhs have to be literals, so we fold into a literal @@ -107,7 +106,7 @@ public static Iterable parameters() { suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( (lhs, rhs) -> ((Duration) lhs).plus((Duration) rhs), - DataTypes.TIME_DURATION, + DataType.TIME_DURATION, TestCaseSupplier.timeDurationCases(), TestCaseSupplier.timeDurationCases(), startsWith("LiteralsEvaluator[lit="), // lhs and rhs have to be literals, so we fold into a literal @@ -137,7 +136,7 @@ public static Iterable parameters() { suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( result, - DataTypes.DATETIME, + DataType.DATETIME, TestCaseSupplier.dateCases(), TestCaseSupplier.datePeriodCases(), startsWith("AddDatetimesEvaluator[datetime=Attribute[channel=0], temporalAmount="), @@ -148,7 +147,7 @@ public static Iterable parameters() { suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( result, - DataTypes.DATETIME, + DataType.DATETIME, TestCaseSupplier.dateCases(), TestCaseSupplier.timeDurationCases(), startsWith("AddDatetimesEvaluator[datetime=Attribute[channel=0], temporalAmount="), @@ -159,22 +158,22 @@ public static Iterable parameters() { suppliers.addAll(TestCaseSupplier.dateCases().stream().mapMulti((tds, consumer) -> { consumer.accept( new TestCaseSupplier( - List.of(DataTypes.DATETIME, DataTypes.NULL), + List.of(DataType.DATETIME, DataType.NULL), () -> new TestCaseSupplier.TestCase( List.of(tds.get(), TestCaseSupplier.TypedData.NULL), "LiteralsEvaluator[lit=null]", - DataTypes.DATETIME, + DataType.DATETIME, nullValue() ) ) ); consumer.accept( new TestCaseSupplier( - List.of(DataTypes.NULL, DataTypes.DATETIME), + List.of(DataType.NULL, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of(TestCaseSupplier.TypedData.NULL, tds.get()), "LiteralsEvaluator[lit=null]", - DataTypes.DATETIME, + DataType.DATETIME, nullValue() ) ) @@ -198,11 +197,11 @@ public static Iterable parameters() { int lhs2 = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(List.of(lhs, lhs2), DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") + new TestCaseSupplier.TypedData(List.of(lhs, lhs2), DataType.INTEGER, "lhs"), + new TestCaseSupplier.TypedData(rhs, DataType.INTEGER, "rhs") ), "AddIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - DataTypes.INTEGER, + DataType.INTEGER, is(nullValue()) ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); @@ -210,7 +209,7 @@ public static Iterable parameters() { // exact math arithmetic exceptions suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.INTEGER, + DataType.INTEGER, () -> randomIntBetween(1, Integer.MAX_VALUE), () -> Integer.MAX_VALUE, "AddIntsEvaluator" @@ -218,7 +217,7 @@ public static Iterable parameters() { ); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.INTEGER, + DataType.INTEGER, () -> randomIntBetween(Integer.MIN_VALUE, -1), () -> Integer.MIN_VALUE, "AddIntsEvaluator" @@ -226,7 +225,7 @@ public static Iterable parameters() { ); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.LONG, + DataType.LONG, () -> randomLongBetween(1L, Long.MAX_VALUE), () -> Long.MAX_VALUE, "AddLongsEvaluator" @@ -234,7 +233,7 @@ public static Iterable parameters() { ); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.LONG, + DataType.LONG, () -> randomLongBetween(Long.MIN_VALUE, -1L), () -> Long.MIN_VALUE, "AddLongsEvaluator" @@ -242,7 +241,7 @@ public static Iterable parameters() { ); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, () -> asLongUnsigned(randomBigInteger()), () -> asLongUnsigned(UNSIGNED_LONG_MAX), "AddUnsignedLongsEvaluator" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index 173ba55cefdac..a50d44822a4e3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -61,7 +60,7 @@ public static Iterable parameters() { "lhs", "rhs", (lhs, rhs) -> { - if (lhs.type() != DataTypes.DOUBLE || rhs.type() != DataTypes.DOUBLE) { + if (lhs.type() != DataType.DOUBLE || rhs.type() != DataType.DOUBLE) { return List.of(); } double v = ((Double) lhs.getValue()) / ((Double) rhs.getValue()); @@ -82,7 +81,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> (((BigInteger) l).divide((BigInteger) r)), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), TestCaseSupplier.ulongCases(BigInteger.ONE, BigInteger.valueOf(Long.MAX_VALUE), true), List.of(), @@ -113,7 +112,7 @@ public static Iterable parameters() { "DivDoublesEvaluator" ) ); - List numericTypes = List.of(DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE); + List numericTypes = List.of(DataType.INTEGER, DataType.LONG, DataType.DOUBLE); for (DataType lhsType : numericTypes) { for (DataType rhsType : numericTypes) { @@ -153,7 +152,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> null, - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.ZERO, true), List.of( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index df79ee203a3f8..ce67f6453362b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -69,7 +68,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> (((BigInteger) l).mod((BigInteger) r)), - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), TestCaseSupplier.ulongCases(BigInteger.ONE, BigInteger.valueOf(Long.MAX_VALUE), true), List.of(), @@ -100,7 +99,7 @@ public static Iterable parameters() { "ModDoublesEvaluator" ) ); - List numericTypes = List.of(DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE); + List numericTypes = List.of(DataType.INTEGER, DataType.LONG, DataType.DOUBLE); for (DataType lhsType : numericTypes) { for (DataType rhsType : numericTypes) { @@ -140,7 +139,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> null, - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.ZERO, true), List.of( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java index faae48920f351..8b4dfa88415be 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -56,22 +56,22 @@ public static Iterable parameters() { ) ); - suppliers.add(new TestCaseSupplier("Double * Double", List.of(DataTypes.DOUBLE, DataTypes.DOUBLE), () -> { + suppliers.add(new TestCaseSupplier("Double * Double", List.of(DataType.DOUBLE, DataType.DOUBLE), () -> { double rhs = randomDouble(); double lhs = randomDouble(); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.DOUBLE, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.DOUBLE, "rhs") + new TestCaseSupplier.TypedData(lhs, DataType.DOUBLE, "lhs"), + new TestCaseSupplier.TypedData(rhs, DataType.DOUBLE, "rhs") ), "MulDoublesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - DataTypes.DOUBLE, + DataType.DOUBLE, equalTo(lhs * rhs) ); })); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.INTEGER, + DataType.INTEGER, () -> randomBoolean() ? Integer.MIN_VALUE : Integer.MAX_VALUE, () -> randomIntBetween(2, Integer.MAX_VALUE), "MulIntsEvaluator" @@ -79,7 +79,7 @@ public static Iterable parameters() { ); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.LONG, + DataType.LONG, () -> randomBoolean() ? Long.MIN_VALUE : Long.MAX_VALUE, () -> randomLongBetween(2L, Long.MAX_VALUE), "MulLongsEvaluator" @@ -87,7 +87,7 @@ public static Iterable parameters() { ); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, () -> asLongUnsigned(UNSIGNED_LONG_MAX), () -> asLongUnsigned(randomLongBetween(-Long.MAX_VALUE, Long.MAX_VALUE)), "MulUnsignedLongsEvaluator" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index 784599678d65b..c2a9766c23cbe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -41,7 +40,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, "NegIntsEvaluator[v=Attribute[channel=0]]", - DataTypes.INTEGER, + DataType.INTEGER, Math::negateExact, Integer.MIN_VALUE + 1, Integer.MAX_VALUE, @@ -51,7 +50,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryInt( suppliers, "NegIntsEvaluator[v=Attribute[channel=0]]", - DataTypes.INTEGER, + DataType.INTEGER, z -> null, Integer.MIN_VALUE, Integer.MIN_VALUE, @@ -63,7 +62,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, "NegLongsEvaluator[v=Attribute[channel=0]]", - DataTypes.LONG, + DataType.LONG, Math::negateExact, Long.MIN_VALUE + 1, Long.MAX_VALUE, @@ -73,7 +72,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryLong( suppliers, "NegLongsEvaluator[v=Attribute[channel=0]]", - DataTypes.LONG, + DataType.LONG, z -> null, Long.MIN_VALUE, Long.MIN_VALUE, @@ -85,7 +84,7 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryDouble( suppliers, "NegDoublesEvaluator[v=Attribute[channel=0]]", - DataTypes.DOUBLE, + DataType.DOUBLE, // TODO: Probably we don't want to allow negative zeros d -> -d, Double.NEGATIVE_INFINITY, @@ -94,20 +93,20 @@ public static Iterable parameters() { ); // TODO: Wire up edge case generation functions for these - suppliers.addAll(List.of(new TestCaseSupplier("Duration", List.of(DataTypes.TIME_DURATION), () -> { - Duration arg = (Duration) randomLiteral(DataTypes.TIME_DURATION).value(); + suppliers.addAll(List.of(new TestCaseSupplier("Duration", List.of(DataType.TIME_DURATION), () -> { + Duration arg = (Duration) randomLiteral(DataType.TIME_DURATION).value(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(arg, DataTypes.TIME_DURATION, "arg")), + List.of(new TestCaseSupplier.TypedData(arg, DataType.TIME_DURATION, "arg")), "No evaluator since this expression is only folded", - DataTypes.TIME_DURATION, + DataType.TIME_DURATION, equalTo(arg.negated()) ); - }), new TestCaseSupplier("Period", List.of(DataTypes.DATE_PERIOD), () -> { - Period arg = (Period) randomLiteral(DataTypes.DATE_PERIOD).value(); + }), new TestCaseSupplier("Period", List.of(DataType.DATE_PERIOD), () -> { + Period arg = (Period) randomLiteral(DataType.DATE_PERIOD).value(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(arg, DataTypes.DATE_PERIOD, "arg")), + List.of(new TestCaseSupplier.TypedData(arg, DataType.DATE_PERIOD, "arg")), "No evaluator since this expression is only folded", - DataTypes.DATE_PERIOD, + DataType.DATE_PERIOD, equalTo(arg.negated()) ); }))); @@ -123,7 +122,7 @@ public void testEdgeCases() { // Run the assertions for the current test cases type only to avoid running the same assertions multiple times. // TODO: These remaining cases should get rolled into generation functions for periods and durations DataType testCaseType = testCase.getData().get(0).type(); - if (testCaseType == DataTypes.DATE_PERIOD) { + if (testCaseType == DataType.DATE_PERIOD) { Period maxPeriod = Period.of(Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE); Period negatedMaxPeriod = Period.of(-Integer.MAX_VALUE, -Integer.MAX_VALUE, -Integer.MAX_VALUE); assertEquals(negatedMaxPeriod, process(maxPeriod)); @@ -135,7 +134,7 @@ public void testEdgeCases() { () -> process(minPeriod) ); assertEquals(e.getMessage(), "arithmetic exception in expression []: [integer overflow]"); - } else if (testCaseType == DataTypes.TIME_DURATION) { + } else if (testCaseType == DataType.TIME_DURATION) { Duration maxDuration = Duration.ofSeconds(Long.MAX_VALUE, 0); Duration negatedMaxDuration = Duration.ofSeconds(-Long.MAX_VALUE, 0); assertEquals(negatedMaxDuration, process(maxDuration)); @@ -167,19 +166,19 @@ private Object process(Object val) { private static DataType typeOf(Object val) { if (val instanceof Integer) { - return DataTypes.INTEGER; + return DataType.INTEGER; } if (val instanceof Long) { - return DataTypes.LONG; + return DataType.LONG; } if (val instanceof Double) { - return DataTypes.DOUBLE; + return DataType.DOUBLE; } if (val instanceof Duration) { - return DataTypes.TIME_DURATION; + return DataType.TIME_DURATION; } if (val instanceof Period) { - return DataTypes.DATE_PERIOD; + return DataType.DATE_PERIOD; } throw new UnsupportedOperationException("unsupported type [" + val.getClass() + "]"); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java index f3c63569d458d..e75ee9333ba54 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -73,62 +73,62 @@ public static Iterable parameters() { BigInteger rhsBI = unsignedLongAsBigInteger(rhs); return new TestCase( Source.EMPTY, - List.of(new TypedData(lhs, DataTypes.UNSIGNED_LONG, "lhs"), new TypedData(rhs, DataTypes.UNSIGNED_LONG, "rhs")), + List.of(new TypedData(lhs, DataType.UNSIGNED_LONG, "lhs"), new TypedData(rhs, DataType.UNSIGNED_LONG, "rhs")), "SubUnsignedLongsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", equalTo(asLongUnsigned(lhsBI.subtract(rhsBI).longValue())) ); }) */ suppliers.add(new TestCaseSupplier("Datetime - Period", () -> { - long lhs = (Long) randomLiteral(DataTypes.DATETIME).value(); - Period rhs = (Period) randomLiteral(DataTypes.DATE_PERIOD).value(); + long lhs = (Long) randomLiteral(DataType.DATETIME).value(); + Period rhs = (Period) randomLiteral(DataType.DATE_PERIOD).value(); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.DATETIME, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.DATE_PERIOD, "rhs") + new TestCaseSupplier.TypedData(lhs, DataType.DATETIME, "lhs"), + new TestCaseSupplier.TypedData(rhs, DataType.DATE_PERIOD, "rhs") ), "SubDatetimesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(asMillis(asDateTime(lhs).minus(rhs))) ); })); suppliers.add(new TestCaseSupplier("Period - Period", () -> { - Period lhs = (Period) randomLiteral(DataTypes.DATE_PERIOD).value(); - Period rhs = (Period) randomLiteral(DataTypes.DATE_PERIOD).value(); + Period lhs = (Period) randomLiteral(DataType.DATE_PERIOD).value(); + Period rhs = (Period) randomLiteral(DataType.DATE_PERIOD).value(); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.DATE_PERIOD, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.DATE_PERIOD, "rhs") + new TestCaseSupplier.TypedData(lhs, DataType.DATE_PERIOD, "lhs"), + new TestCaseSupplier.TypedData(rhs, DataType.DATE_PERIOD, "rhs") ), "Only folding possible, so there's no evaluator", - DataTypes.DATE_PERIOD, + DataType.DATE_PERIOD, equalTo(lhs.minus(rhs)) ); })); suppliers.add(new TestCaseSupplier("Datetime - Duration", () -> { - long lhs = (Long) randomLiteral(DataTypes.DATETIME).value(); - Duration rhs = (Duration) randomLiteral(DataTypes.TIME_DURATION).value(); + long lhs = (Long) randomLiteral(DataType.DATETIME).value(); + Duration rhs = (Duration) randomLiteral(DataType.TIME_DURATION).value(); TestCaseSupplier.TestCase testCase = new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.DATETIME, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.TIME_DURATION, "rhs") + new TestCaseSupplier.TypedData(lhs, DataType.DATETIME, "lhs"), + new TestCaseSupplier.TypedData(rhs, DataType.TIME_DURATION, "rhs") ), "SubDatetimesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - DataTypes.DATETIME, + DataType.DATETIME, equalTo(asMillis(asDateTime(lhs).minus(rhs))) ); return testCase; })); suppliers.add(new TestCaseSupplier("Duration - Duration", () -> { - Duration lhs = (Duration) randomLiteral(DataTypes.TIME_DURATION).value(); - Duration rhs = (Duration) randomLiteral(DataTypes.TIME_DURATION).value(); + Duration lhs = (Duration) randomLiteral(DataType.TIME_DURATION).value(); + Duration rhs = (Duration) randomLiteral(DataType.TIME_DURATION).value(); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.TIME_DURATION, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.TIME_DURATION, "rhs") + new TestCaseSupplier.TypedData(lhs, DataType.TIME_DURATION, "lhs"), + new TestCaseSupplier.TypedData(rhs, DataType.TIME_DURATION, "rhs") ), "Only folding possible, so there's no evaluator", - DataTypes.TIME_DURATION, + DataType.TIME_DURATION, equalTo(lhs.minus(rhs)) ); })); @@ -139,11 +139,11 @@ public static Iterable parameters() { int lhs2 = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); return new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(List.of(lhs, lhs2), DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") + new TestCaseSupplier.TypedData(List.of(lhs, lhs2), DataType.INTEGER, "lhs"), + new TestCaseSupplier.TypedData(rhs, DataType.INTEGER, "rhs") ), "SubIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - DataTypes.INTEGER, + DataType.INTEGER, is(nullValue()) ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); @@ -151,7 +151,7 @@ public static Iterable parameters() { // exact math arithmetic exceptions suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.INTEGER, + DataType.INTEGER, () -> Integer.MIN_VALUE, () -> randomIntBetween(1, Integer.MAX_VALUE), "SubIntsEvaluator" @@ -159,7 +159,7 @@ public static Iterable parameters() { ); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.INTEGER, + DataType.INTEGER, () -> randomIntBetween(Integer.MIN_VALUE, -2), () -> Integer.MAX_VALUE, "SubIntsEvaluator" @@ -167,7 +167,7 @@ public static Iterable parameters() { ); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.LONG, + DataType.LONG, () -> Long.MIN_VALUE, () -> randomLongBetween(1L, Long.MAX_VALUE), "SubLongsEvaluator" @@ -175,7 +175,7 @@ public static Iterable parameters() { ); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.LONG, + DataType.LONG, () -> randomLongBetween(Long.MIN_VALUE, -2L), () -> Long.MAX_VALUE, "SubLongsEvaluator" @@ -183,7 +183,7 @@ public static Iterable parameters() { ); suppliers.add( arithmeticExceptionOverflowCase( - DataTypes.UNSIGNED_LONG, + DataType.UNSIGNED_LONG, () -> ZERO_AS_UNSIGNED_LONG, () -> randomLongBetween(-Long.MAX_VALUE, Long.MAX_VALUE), "SubUnsignedLongsEvaluator" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java index 77a80a4b60ce6..3817bbe9cc74c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -69,7 +69,7 @@ public static Iterable parameters() { "lhs", "rhs", Object::equals, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), List.of(), @@ -82,7 +82,7 @@ public static Iterable parameters() { "lhs", "rhs", Object::equals, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.booleanCases(), TestCaseSupplier.booleanCases(), List.of(), @@ -95,7 +95,7 @@ public static Iterable parameters() { "lhs", "rhs", Object::equals, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ipCases(), TestCaseSupplier.ipCases(), List.of(), @@ -108,7 +108,7 @@ public static Iterable parameters() { "lhs", "rhs", Object::equals, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.versionCases(""), TestCaseSupplier.versionCases(""), List.of(), @@ -123,7 +123,7 @@ public static Iterable parameters() { "lhs", "rhs", Object::equals, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.dateCases(), TestCaseSupplier.dateCases(), List.of(), @@ -136,7 +136,7 @@ public static Iterable parameters() { Object::equals, (lhsType, rhsType) -> "EqualsKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", List.of(), - DataTypes.BOOLEAN + DataType.BOOLEAN ) ); @@ -146,7 +146,7 @@ public static Iterable parameters() { "lhs", "rhs", Object::equals, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.geoPointCases(), TestCaseSupplier.geoPointCases(), List.of(), @@ -160,7 +160,7 @@ public static Iterable parameters() { "lhs", "rhs", Object::equals, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.geoShapeCases(), TestCaseSupplier.geoShapeCases(), List.of(), @@ -173,7 +173,7 @@ public static Iterable parameters() { "lhs", "rhs", Object::equals, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.cartesianPointCases(), TestCaseSupplier.cartesianPointCases(), List.of(), @@ -187,7 +187,7 @@ public static Iterable parameters() { "lhs", "rhs", Object::equals, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.cartesianShapeCases(), TestCaseSupplier.cartesianShapeCases(), List.of(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java index 99f4b4ff82d96..f25638b482817 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -70,7 +70,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BigInteger) l).compareTo((BigInteger) r) >= 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), List.of(), @@ -84,7 +84,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) >= 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ipCases(), TestCaseSupplier.ipCases(), List.of(), @@ -98,7 +98,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) >= 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.versionCases(""), TestCaseSupplier.versionCases(""), List.of(), @@ -113,7 +113,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((Number) l).longValue() >= ((Number) r).longValue(), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.dateCases(), TestCaseSupplier.dateCases(), List.of(), @@ -126,7 +126,7 @@ public static Iterable parameters() { (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) >= 0, (lhsType, rhsType) -> "GreaterThanOrEqualKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", List.of(), - DataTypes.BOOLEAN + DataType.BOOLEAN ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java index f0fe60c458c78..0735e0dfd64f2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -70,7 +70,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BigInteger) l).compareTo((BigInteger) r) > 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), List.of(), @@ -84,7 +84,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) > 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ipCases(), TestCaseSupplier.ipCases(), List.of(), @@ -98,7 +98,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) > 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.versionCases(""), TestCaseSupplier.versionCases(""), List.of(), @@ -113,7 +113,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((Number) l).longValue() > ((Number) r).longValue(), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.dateCases(), TestCaseSupplier.dateCases(), List.of(), @@ -126,7 +126,7 @@ public static Iterable parameters() { (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) > 0, (lhsType, rhsType) -> "GreaterThanKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", List.of(), - DataTypes.BOOLEAN + DataType.BOOLEAN ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java index 56758d43a83e6..4a802dfcaf975 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -70,7 +70,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BigInteger) l).compareTo((BigInteger) r) <= 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), List.of(), @@ -84,7 +84,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) <= 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ipCases(), TestCaseSupplier.ipCases(), List.of(), @@ -98,7 +98,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) <= 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.versionCases(""), TestCaseSupplier.versionCases(""), List.of(), @@ -113,7 +113,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((Number) l).longValue() <= ((Number) r).longValue(), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.dateCases(), TestCaseSupplier.dateCases(), List.of(), @@ -126,7 +126,7 @@ public static Iterable parameters() { (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) <= 0, (lhsType, rhsType) -> "LessThanOrEqualKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", List.of(), - DataTypes.BOOLEAN + DataType.BOOLEAN ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index fa6f3fbd0926c..6f3f2441c6d00 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -70,7 +70,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BigInteger) l).compareTo((BigInteger) r) < 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), List.of(), @@ -84,7 +84,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) < 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ipCases(), TestCaseSupplier.ipCases(), List.of(), @@ -98,7 +98,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) < 0, - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.versionCases(""), TestCaseSupplier.versionCases(""), List.of(), @@ -113,7 +113,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> ((Number) l).longValue() < ((Number) r).longValue(), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.dateCases(), TestCaseSupplier.dateCases(), List.of(), @@ -126,7 +126,7 @@ public static Iterable parameters() { (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) < 0, (lhsType, rhsType) -> "LessThanKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", List.of(), - DataTypes.BOOLEAN + DataType.BOOLEAN ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java index e03a569904dc4..174e2457eb0a5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -68,7 +68,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), List.of(), @@ -81,7 +81,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.booleanCases(), TestCaseSupplier.booleanCases(), List.of(), @@ -94,7 +94,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.ipCases(), TestCaseSupplier.ipCases(), List.of(), @@ -107,7 +107,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.versionCases(""), TestCaseSupplier.versionCases(""), List.of(), @@ -122,7 +122,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.dateCases(), TestCaseSupplier.dateCases(), List.of(), @@ -134,7 +134,7 @@ public static Iterable parameters() { (l, r) -> false == l.equals(r), (lhsType, rhsType) -> "NotEqualsKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", List.of(), - DataTypes.BOOLEAN + DataType.BOOLEAN ) ); suppliers.addAll( @@ -143,7 +143,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.geoPointCases(), TestCaseSupplier.geoPointCases(), List.of(), @@ -156,7 +156,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.geoShapeCases(), TestCaseSupplier.geoShapeCases(), List.of(), @@ -169,7 +169,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.cartesianPointCases(), TestCaseSupplier.cartesianPointCases(), List.of(), @@ -182,7 +182,7 @@ public static Iterable parameters() { "lhs", "rhs", (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, + DataType.BOOLEAN, TestCaseSupplier.cartesianShapeCases(), TestCaseSupplier.cartesianShapeCases(), List.of(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java index 9cb4b6cff3fc0..b45fcd2724399 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; import org.elasticsearch.xpack.esql.core.type.KeywordEsField; @@ -74,10 +73,13 @@ import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; @@ -91,7 +93,9 @@ import org.elasticsearch.xpack.esql.plan.physical.FilterExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.GrokExec; +import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; +import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -108,6 +112,8 @@ import java.util.Set; import java.util.stream.Stream; +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.SerializationTestUtils.serializeDeserialize; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -131,6 +137,8 @@ public class PlanNamedTypesTests extends ESTestCase { FragmentExec.class, GrokExec.class, LimitExec.class, + LocalSourceExec.class, + HashJoinExec.class, MvExpandExec.class, OrderExec.class, ProjectExec.class, @@ -147,7 +155,7 @@ public void testPhysicalPlanEntries() { .filter(e -> e.categoryClass().isAssignableFrom(PhysicalPlan.class)) .map(PlanNameRegistry.Entry::name) .toList(); - assertThat(actual, equalTo(expected)); + assertMap(actual, matchesList(expected)); } // List of known serializable logical plan nodes - this should be kept up to date or retrieved @@ -161,7 +169,10 @@ public void testPhysicalPlanEntries() { Eval.class, Filter.class, Grok.class, + Join.class, Limit.class, + LocalRelation.class, + Lookup.class, MvExpand.class, OrderBy.class, Project.class, @@ -177,7 +188,7 @@ public void testLogicalPlanEntries() { .map(PlanNameRegistry.Entry::name) .sorted() .toList(); - assertThat(actual, equalTo(expected)); + assertMap(actual, matchesList(expected)); } public void testFunctionEntries() { @@ -205,7 +216,7 @@ public void testWrappedStreamSimple() throws IOException { BytesStreamOutput bso = new BytesStreamOutput(); bso.writeString("hello"); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); - var plan = new RowExec(Source.EMPTY, List.of(new Alias(Source.EMPTY, "foo", field("field", DataTypes.LONG)))); + var plan = new RowExec(Source.EMPTY, List.of(new Alias(Source.EMPTY, "foo", field("field", DataType.LONG)))); out.writePhysicalPlanNode(plan); bso.writeVInt(11_345); @@ -219,7 +230,7 @@ public void testWrappedStreamSimple() throws IOException { } public void testBinComparisonSimple() throws IOException { - var orig = new Equals(Source.EMPTY, field("foo", DataTypes.DOUBLE), field("bar", DataTypes.DOUBLE)); + var orig = new Equals(Source.EMPTY, field("foo", DataType.DOUBLE), field("bar", DataType.DOUBLE)); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(EsqlBinaryComparison.class, orig); @@ -234,7 +245,7 @@ public void testBinComparison() { } public void testAggFunctionSimple() throws IOException { - var orig = new Avg(Source.EMPTY, field("foo_val", DataTypes.DOUBLE)); + var orig = new Avg(Source.EMPTY, field("foo_val", DataType.DOUBLE)); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(AggregateFunction.class, orig); @@ -247,7 +258,7 @@ public void testAggFunction() { } public void testArithmeticOperationSimple() throws IOException { - var orig = new Add(Source.EMPTY, field("foo", DataTypes.LONG), field("bar", DataTypes.LONG)); + var orig = new Add(Source.EMPTY, field("foo", DataType.LONG), field("bar", DataType.LONG)); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(ArithmeticOperation.class, orig); @@ -262,7 +273,7 @@ public void testArithmeticOperation() { } public void testSubStringSimple() throws IOException { - var orig = new Substring(Source.EMPTY, field("foo", DataTypes.KEYWORD), new Literal(Source.EMPTY, 1, DataTypes.INTEGER), null); + var orig = new Substring(Source.EMPTY, field("foo", DataType.KEYWORD), new Literal(Source.EMPTY, 1, DataType.INTEGER), null); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeSubstring(out, orig); @@ -271,7 +282,7 @@ public void testSubStringSimple() throws IOException { } public void testStartsWithSimple() throws IOException { - var orig = new StartsWith(Source.EMPTY, field("foo", DataTypes.KEYWORD), new Literal(Source.EMPTY, "fo", DataTypes.KEYWORD)); + var orig = new StartsWith(Source.EMPTY, field("foo", DataType.KEYWORD), new Literal(Source.EMPTY, "fo", DataType.KEYWORD)); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeStartsWith(out, orig); @@ -280,7 +291,7 @@ public void testStartsWithSimple() throws IOException { } public void testRoundSimple() throws IOException { - var orig = new Round(Source.EMPTY, field("value", DataTypes.DOUBLE), new Literal(Source.EMPTY, 1, DataTypes.INTEGER)); + var orig = new Round(Source.EMPTY, field("value", DataType.DOUBLE), new Literal(Source.EMPTY, 1, DataType.INTEGER)); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeRound(out, orig); @@ -289,7 +300,7 @@ public void testRoundSimple() throws IOException { } public void testPowSimple() throws IOException { - var orig = new Pow(Source.EMPTY, field("value", DataTypes.DOUBLE), new Literal(Source.EMPTY, 1, DataTypes.INTEGER)); + var orig = new Pow(Source.EMPTY, field("value", DataType.DOUBLE), new Literal(Source.EMPTY, 1, DataType.INTEGER)); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writePow(out, orig); @@ -297,19 +308,8 @@ public void testPowSimple() throws IOException { EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); } - public void testAliasSimple() throws IOException { - var orig = new Alias(Source.EMPTY, "alias_name", field("a", DataTypes.LONG)); - BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); - PlanNamedTypes.writeAlias(out, orig); - var in = planStreamInput(bso); - var deser = PlanNamedTypes.readAlias(in); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); - assertThat(deser.id(), equalTo(in.mapNameId(Long.parseLong(orig.id().toString())))); - } - public void testLiteralSimple() throws IOException { - var orig = new Literal(Source.EMPTY, 1, DataTypes.INTEGER); + var orig = new Literal(Source.EMPTY, 1, DataType.INTEGER); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeLiteral(out, orig); @@ -318,7 +318,7 @@ public void testLiteralSimple() throws IOException { } public void testOrderSimple() throws IOException { - var orig = new Order(Source.EMPTY, field("val", DataTypes.INTEGER), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); + var orig = new Order(Source.EMPTY, field("val", DataType.INTEGER), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeOrder(out, orig); @@ -327,7 +327,7 @@ public void testOrderSimple() throws IOException { } public void testFieldSortSimple() throws IOException { - var orig = new EsQueryExec.FieldSort(field("val", DataTypes.LONG), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); + var orig = new EsQueryExec.FieldSort(field("val", DataType.LONG), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeFieldSort(out, orig); @@ -571,7 +571,7 @@ static Map randomProperties(int depth) { return Map.copyOf(map); } - static List DATA_TYPES = DataTypes.types().stream().toList(); + static List DATA_TYPES = DataType.types().stream().toList(); static DataType randomDataType() { return DATA_TYPES.get(randomIntBetween(0, DATA_TYPES.size() - 1)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index a8a59f755a59f..00fb9d4943005 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.xpack.esql.Column; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.session.EsqlConfigurationSerializationTests; @@ -124,7 +124,7 @@ private Column randomColumn() { for (int i = 0; i < LEN; i++) { ints.appendInt(randomInt()); } - return new Column(DataTypes.INTEGER, ints.build()); + return new Column(DataType.INTEGER, ints.build()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 4674e8afc07e9..c7bec88de7082 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; @@ -151,7 +151,7 @@ public void testMissingFieldInProject() { var alias = as(eval.fields().get(0), Alias.class); var literal = as(alias.child(), Literal.class); assertThat(literal.fold(), is(nullValue())); - assertThat(literal.dataType(), is(DataTypes.KEYWORD)); + assertThat(literal.dataType(), is(DataType.KEYWORD)); var limit = as(eval.child(), Limit.class); var source = as(limit.child(), EsRelation.class); @@ -206,7 +206,7 @@ public void testMissingFieldInEval() { var alias = as(eval.fields().get(0), Alias.class); var literal = as(alias.child(), Literal.class); assertThat(literal.fold(), is(nullValue())); - assertThat(literal.dataType(), is(DataTypes.INTEGER)); + assertThat(literal.dataType(), is(DataType.INTEGER)); var limit = as(eval.child(), Limit.class); var source = as(limit.child(), EsRelation.class); @@ -327,7 +327,7 @@ public void testSparseDocument() throws Exception { Map large = Maps.newLinkedHashMapWithExpectedSize(size); for (int i = 0; i < size; i++) { var name = String.format(Locale.ROOT, "field%03d", i); - large.put(name, new EsField(name, DataTypes.INTEGER, emptyMap(), true, false)); + large.put(name, new EsField(name, DataType.INTEGER, emptyMap(), true, false)); } SearchStats searchStats = statsForExistingField("field000", "field001", "field002", "field003", "field004"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index d25c9cdb51398..d1a352589263a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; @@ -88,7 +88,7 @@ public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { /** * Estimated size of a keyword field in bytes. */ - private static final int KEYWORD_EST = EstimatesRowSize.estimateSize(DataTypes.KEYWORD); + private static final int KEYWORD_EST = EstimatesRowSize.estimateSize(DataType.KEYWORD); private TestPlannerOptimizer plannerOptimizer; private final EsqlConfiguration config; @@ -127,8 +127,8 @@ public void init() { List.of("a", "b"), Map.of("", "idx"), Map.ofEntries( - Map.entry("a", new EsField("a", DataTypes.INTEGER, Map.of(), true)), - Map.entry("b", new EsField("b", DataTypes.LONG, Map.of(), true)) + Map.entry("a", new EsField("a", DataType.INTEGER, Map.of(), true)), + Map.entry("b", new EsField("b", DataType.LONG, Map.of(), true)) ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 4bb797faff04c..e8d1336e4b4fd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; @@ -49,7 +48,6 @@ import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -107,6 +105,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.optimizer.rules.LiteralsOnTheRight; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; @@ -118,6 +117,8 @@ import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -136,6 +137,8 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.singletonList; +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.EsqlTestUtils.L; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; @@ -150,17 +153,17 @@ import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_SHAPE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.IP; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison.BinaryComparisonOperation.EQ; import static org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison.BinaryComparisonOperation.GT; import static org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison.BinaryComparisonOperation.GTE; @@ -197,7 +200,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase { private static Map mappingExtra; private static Analyzer analyzerExtra; private static EnrichResolution enrichResolution; - private static final OptimizerRules.LiteralsOnTheRight LITERALS_ON_THE_RIGHT = new OptimizerRules.LiteralsOnTheRight(); + private static final LiteralsOnTheRight LITERALS_ON_THE_RIGHT = new LiteralsOnTheRight(); private static class SubstitutionOnlyOptimizer extends LogicalPlanOptimizer { static SubstitutionOnlyOptimizer INSTANCE = new SubstitutionOnlyOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); @@ -996,7 +999,7 @@ public void testPushDownDissectPastProject() { var keep = as(plan, Project.class); var dissect = as(keep.child(), Dissect.class); - assertThat(dissect.extractedFields(), contains(new ReferenceAttribute(Source.EMPTY, "y", DataTypes.KEYWORD))); + assertThat(dissect.extractedFields(), contains(new ReferenceAttribute(Source.EMPTY, "y", DataType.KEYWORD))); } public void testPushDownGrokPastProject() { @@ -1009,7 +1012,7 @@ public void testPushDownGrokPastProject() { var keep = as(plan, Project.class); var grok = as(keep.child(), Grok.class); - assertThat(grok.extractedFields(), contains(new ReferenceAttribute(Source.EMPTY, "y", DataTypes.KEYWORD))); + assertThat(grok.extractedFields(), contains(new ReferenceAttribute(Source.EMPTY, "y", DataType.KEYWORD))); } public void testPushDownFilterPastProjectUsingEval() { @@ -3769,7 +3772,7 @@ public void testCountOfLiteral() { assertThat(Expressions.names(agg.aggregates()), contains("$$COUNT$s$0", "w")); var countAggLiteral = as(as(Alias.unwrap(agg.aggregates().get(0)), Count.class).field(), Literal.class); - assertTrue(countAggLiteral.semanticEquals(new Literal(EMPTY, StringUtils.WILDCARD, DataTypes.KEYWORD))); + assertTrue(countAggLiteral.semanticEquals(new Literal(EMPTY, StringUtils.WILDCARD, DataType.KEYWORD))); var exprs = eval.fields(); // s == mv_count([1,2]) * count(*) @@ -3897,7 +3900,7 @@ private record AggOfLiteralTestCase( "count_distinct({}, 1234)", c -> new ToLong( EMPTY, - new Coalesce(EMPTY, new MvCount(EMPTY, new MvDedupe(EMPTY, c)), List.of(new Literal(EMPTY, 0, DataTypes.INTEGER))) + new Coalesce(EMPTY, new MvCount(EMPTY, new MvDedupe(EMPTY, c)), List.of(new Literal(EMPTY, 0, DataType.INTEGER))) ), ints -> Arrays.stream(ints).distinct().count(), d -> 1L @@ -4969,6 +4972,158 @@ public void testIsNullDisjunction() throws Exception { assertEquals(and, new PropagateNullable().rule(and)); } + // + // Lookup + // + + /** + * Expects + * {@code + * Join[JoinConfig[type=LEFT OUTER, matchFields=[int{r}#4], conditions=[LOOKUP int_number_names ON int]]] + * |_EsqlProject[[_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, gender{f}#8, job{f}#13, job.raw{f}#14, languages{f}#9 AS int + * , last_name{f}#10, long_noidx{f}#15, salary{f}#11]] + * | \_Limit[1000[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] + * \_LocalRelation[[int{f}#16, name{f}#17],[IntVectorBlock[vector=IntArrayVector[positions=10, values=[0, 1, 2, 3, 4, 5, 6, 7, 8, + * 9]]], BytesRefVectorBlock[vector=BytesRefArrayVector[positions=10]]]] + * } + */ + public void testLookupSimple() { + var plan = optimizedPlan(""" + FROM test + | RENAME languages AS int + | LOOKUP int_number_names ON int + """); + var join = as(plan, Join.class); + + // Right is the lookup table + var right = as(join.right(), LocalRelation.class); + assertMap( + right.output().stream().map(Object::toString).sorted().toList(), + matchesList().item(containsString("int{f}")).item(containsString("name{f}")) + ); + + // Left is the rest of the query + var left = as(join.left(), EsqlProject.class); + assertThat(left.output().toString(), containsString("int{r}")); + var limit = as(left.child(), Limit.class); + assertThat(limit.limit().fold(), equalTo(1000)); + + assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); + assertThat(join.config().conditions().size(), equalTo(1)); + Equals eq = as(join.config().conditions().get(0), Equals.class); + assertThat(eq.left().toString(), startsWith("int{r}")); + assertThat(eq.right().toString(), startsWith("int{r}")); + assertTrue(join.children().get(0).outputSet() + " contains " + eq.left(), join.children().get(0).outputSet().contains(eq.left())); + assertTrue(join.children().get(1).outputSet() + " contains " + eq.right(), join.children().get(1).outputSet().contains(eq.right())); + + // Join's output looks sensible too + assertMap( + join.output().stream().map(Object::toString).toList(), + matchesList().item(startsWith("_meta_field{f}")) + // TODO prune unused columns down through the join + .item(startsWith("emp_no{f}")) + .item(startsWith("first_name{f}")) + .item(startsWith("gender{f}")) + .item(startsWith("job{f}")) + .item(startsWith("job.raw{f}")) + /* + * Int is a reference here because we renamed it in project. + * If we hadn't it'd be a field and that'd be fine. + */ + .item(containsString("int{r}")) + .item(startsWith("last_name{f}")) + .item(startsWith("long_noidx{f}")) + .item(startsWith("salary{f}")) + /* + * It's important that name is returned as a *reference* here + * instead of a field. If it were a field we'd use SearchStats + * on it and discover that it doesn't exist in the index. It doesn't! + * We don't expect it to. It exists only in the lookup table. + */ + .item(containsString("name{r}")) + ); + } + + /** + * Expects + * {@code + * Limit[1000[INTEGER]] + * \_Aggregate[[name{r}#20],[MIN(emp_no{f}#9) AS MIN(emp_no), name{r}#20]] + * \_Join[JoinConfig[type=LEFT OUTER, matchFields=[int{r}#4], conditions=[LOOKUP int_number_names ON int]]] + * |_EsqlProject[[_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, gender{f}#11, job{f}#16, job.raw{f}#17, languages{f}#12 AS + * int, last_name{f}#13, long_noidx{f}#18, salary{f}#14]] + * | \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + * \_LocalRelation[[int{f}#19, name{f}#20],[IntVectorBlock[vector=IntArrayVector[positions=10, values=[0, 1, 2, 3, 4, 5, 6, 7, 8, + * 9]]], BytesRefVectorBlock[vector=BytesRefArrayVector[positions=10]]]] + * } + */ + public void testLookupStats() { + var plan = optimizedPlan(""" + FROM test + | RENAME languages AS int + | LOOKUP int_number_names ON int + | STATS MIN(emp_no) BY name + """); + var limit = as(plan, Limit.class); + assertThat(limit.limit().fold(), equalTo(1000)); + + var agg = as(limit.child(), Aggregate.class); + assertMap( + agg.aggregates().stream().map(Object::toString).sorted().toList(), + matchesList().item(startsWith("MIN(emp_no)")).item(startsWith("name{r}")) + ); + assertMap(agg.groupings().stream().map(Object::toString).toList(), matchesList().item(startsWith("name{r}"))); + + var join = as(agg.child(), Join.class); + // Right is the lookup table + var right = as(join.right(), LocalRelation.class); + assertMap( + right.output().stream().map(Object::toString).toList(), + matchesList().item(containsString("int{f}")).item(containsString("name{f}")) + ); + + // Left is the rest of the query + var left = as(join.left(), EsqlProject.class); + assertThat(left.output().toString(), containsString("int{r}")); + as(left.child(), EsRelation.class); + + assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); + assertThat(join.config().conditions().size(), equalTo(1)); + Equals eq = as(join.config().conditions().get(0), Equals.class); + assertThat(eq.left().toString(), startsWith("int{r}")); + assertThat(eq.right().toString(), startsWith("int{r}")); + + // Join's output looks sensible too + assertMap( + join.output().stream().map(Object::toString).toList(), + matchesList().item(startsWith("_meta_field{f}")) + // TODO prune unused columns down through the join + .item(startsWith("emp_no{f}")) + .item(startsWith("first_name{f}")) + .item(startsWith("gender{f}")) + .item(startsWith("job{f}")) + .item(startsWith("job.raw{f}")) + /* + * Int is a reference here because we renamed it in project. + * If we hadn't it'd be a field and that'd be fine. + */ + .item(containsString("int{r}")) + .item(startsWith("last_name{f}")) + .item(startsWith("long_noidx{f}")) + .item(startsWith("salary{f}")) + /* + * It's important that name is returned as a *reference* here + * instead of a field. If it were a field we'd use SearchStats + * on it and discover that it doesn't exist in the index. It doesn't! + * We don't expect it to. It exists only in the lookup table. + */ + .item(containsString("name{r}")) + ); + } + private Literal nullOf(DataType dataType) { return new Literal(Source.EMPTY, null, dataType); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index 4b62ce1a31cb4..a829808ee040c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -9,19 +9,39 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.TestUtils; +import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; +import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; +import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike; +import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; +import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; +import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; +import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.FoldNull; +import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PropagateNullable; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.StringUtils; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Sub; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; @@ -29,24 +49,37 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.ReplaceRegexMatch; +import org.elasticsearch.xpack.esql.optimizer.rules.BooleanFunctionEqualsElimination; +import org.elasticsearch.xpack.esql.optimizer.rules.CombineDisjunctionsToIn; +import org.elasticsearch.xpack.esql.optimizer.rules.ConstantFolding; +import org.elasticsearch.xpack.esql.optimizer.rules.LiteralsOnTheRight; +import org.elasticsearch.xpack.esql.optimizer.rules.PropagateEquals; import java.util.List; import static java.util.Arrays.asList; +import static org.elasticsearch.xpack.esql.core.TestUtils.nullEqualsOf; +import static org.elasticsearch.xpack.esql.core.TestUtils.of; import static org.elasticsearch.xpack.esql.core.TestUtils.rangeOf; import static org.elasticsearch.xpack.esql.core.TestUtils.relation; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.hamcrest.Matchers.contains; public class OptimizerRulesTests extends ESTestCase { - private static final Literal ONE = new Literal(Source.EMPTY, 1, DataTypes.INTEGER); - private static final Literal TWO = new Literal(Source.EMPTY, 2, DataTypes.INTEGER); - private static final Literal THREE = new Literal(Source.EMPTY, 3, DataTypes.INTEGER); - private static final Literal FOUR = new Literal(Source.EMPTY, 4, DataTypes.INTEGER); - private static final Literal FIVE = new Literal(Source.EMPTY, 5, DataTypes.INTEGER); + private static final Literal ONE = new Literal(Source.EMPTY, 1, DataType.INTEGER); + private static final Literal TWO = new Literal(Source.EMPTY, 2, DataType.INTEGER); + private static final Literal THREE = new Literal(Source.EMPTY, 3, DataType.INTEGER); + private static final Literal FOUR = new Literal(Source.EMPTY, 4, DataType.INTEGER); + private static final Literal FIVE = new Literal(Source.EMPTY, 5, DataType.INTEGER); + private static final Literal SIX = new Literal(Source.EMPTY, 6, DataType.INTEGER); + private static final Expression DUMMY_EXPRESSION = + new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 0); private static Equals equalsOf(Expression left, Expression right) { return new Equals(EMPTY, left, right, null); @@ -76,6 +109,82 @@ private static FieldAttribute getFieldAttribute() { return TestUtils.getFieldAttribute("a"); } + // + // Constant folding + // + + public void testConstantFolding() { + Expression exp = new Add(EMPTY, TWO, THREE); + + assertTrue(exp.foldable()); + Expression result = new ConstantFolding().rule(exp); + assertTrue(result instanceof Literal); + assertEquals(5, ((Literal) result).value()); + + // check now with an alias + result = new ConstantFolding().rule(new Alias(EMPTY, "a", exp)); + assertEquals("a", Expressions.name(result)); + assertEquals(Alias.class, result.getClass()); + } + + public void testConstantFoldingBinaryComparison() { + assertEquals(FALSE, new ConstantFolding().rule(greaterThanOf(TWO, THREE)).canonical()); + assertEquals(FALSE, new ConstantFolding().rule(greaterThanOrEqualOf(TWO, THREE)).canonical()); + assertEquals(FALSE, new ConstantFolding().rule(equalsOf(TWO, THREE)).canonical()); + assertEquals(FALSE, new ConstantFolding().rule(nullEqualsOf(TWO, THREE)).canonical()); + assertEquals(FALSE, new ConstantFolding().rule(nullEqualsOf(TWO, NULL)).canonical()); + assertEquals(TRUE, new ConstantFolding().rule(notEqualsOf(TWO, THREE)).canonical()); + assertEquals(TRUE, new ConstantFolding().rule(lessThanOrEqualOf(TWO, THREE)).canonical()); + assertEquals(TRUE, new ConstantFolding().rule(lessThanOf(TWO, THREE)).canonical()); + } + + public void testConstantFoldingBinaryLogic() { + assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, greaterThanOf(TWO, THREE), TRUE)).canonical()); + assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, greaterThanOrEqualOf(TWO, THREE), TRUE)).canonical()); + } + + public void testConstantFoldingBinaryLogic_WithNullHandling() { + assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, NULL, TRUE)).canonical().nullable()); + assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, TRUE, NULL)).canonical().nullable()); + assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, NULL, FALSE)).canonical()); + assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, FALSE, NULL)).canonical()); + assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, NULL, NULL)).canonical().nullable()); + + assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, TRUE)).canonical()); + assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, TRUE, NULL)).canonical()); + assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, FALSE)).canonical().nullable()); + assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, FALSE, NULL)).canonical().nullable()); + assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, NULL)).canonical().nullable()); + } + + public void testConstantFoldingRange() { + assertEquals(true, new ConstantFolding().rule(rangeOf(FIVE, FIVE, true, new Literal(EMPTY, 10, DataType.INTEGER), false)).fold()); + assertEquals(false, new ConstantFolding().rule(rangeOf(FIVE, FIVE, false, new Literal(EMPTY, 10, DataType.INTEGER), false)).fold()); + } + + public void testConstantNot() { + assertEquals(FALSE, new ConstantFolding().rule(new Not(EMPTY, TRUE))); + assertEquals(TRUE, new ConstantFolding().rule(new Not(EMPTY, FALSE))); + } + + public void testConstantFoldingLikes() { + assertEquals(TRUE, new ConstantFolding().rule(new Like(EMPTY, of("test_emp"), new LikePattern("test%", (char) 0))).canonical()); + assertEquals(TRUE, new ConstantFolding().rule(new WildcardLike(EMPTY, of("test_emp"), new WildcardPattern("test*"))).canonical()); + assertEquals(TRUE, new ConstantFolding().rule(new RLike(EMPTY, of("test_emp"), new RLikePattern("test.emp"))).canonical()); + } + + public void testArithmeticFolding() { + assertEquals(10, foldOperator(new Add(EMPTY, new Literal(EMPTY, 7, DataType.INTEGER), THREE))); + assertEquals(4, foldOperator(new Sub(EMPTY, new Literal(EMPTY, 7, DataType.INTEGER), THREE))); + assertEquals(21, foldOperator(new Mul(EMPTY, new Literal(EMPTY, 7, DataType.INTEGER), THREE))); + assertEquals(2, foldOperator(new Div(EMPTY, new Literal(EMPTY, 7, DataType.INTEGER), THREE))); + assertEquals(1, foldOperator(new Mod(EMPTY, new Literal(EMPTY, 7, DataType.INTEGER), THREE))); + } + + private static Object foldOperator(BinaryOperator b) { + return ((Literal) new ConstantFolding().rule(b)).value(); + } + // // CombineDisjunction in Equals // @@ -83,7 +192,7 @@ public void testTwoEqualsWithOr() { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -94,7 +203,7 @@ public void testTwoEqualsWithSameValue() { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, ONE)); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(Equals.class, e.getClass()); Equals eq = (Equals) e; assertEquals(fa, eq.left()); @@ -105,7 +214,7 @@ public void testOneEqualsOneIn() { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, List.of(TWO))); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -116,7 +225,7 @@ public void testOneEqualsOneInWithSameValue() { FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, asList(ONE, TWO))); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -128,7 +237,7 @@ public void testSingleValueInToEquals() { Equals equals = equalsOf(fa, ONE); Or or = new Or(EMPTY, equals, new In(EMPTY, fa, List.of(ONE))); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(equals, e); } @@ -137,7 +246,7 @@ public void testEqualsBehindAnd() { And and = new And(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); Filter dummy = new Filter(EMPTY, relation(), and); - LogicalPlan transformed = new OptimizerRules.CombineDisjunctionsToIn().apply(dummy); + LogicalPlan transformed = new CombineDisjunctionsToIn().apply(dummy); assertSame(dummy, transformed); assertEquals(and, ((Filter) transformed).condition()); } @@ -147,7 +256,7 @@ public void testTwoEqualsDifferentFields() { FieldAttribute fieldTwo = TestUtils.getFieldAttribute("TWO"); Or or = new Or(EMPTY, equalsOf(fieldOne, ONE), equalsOf(fieldTwo, TWO)); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + Expression e = new CombineDisjunctionsToIn().rule(or); assertEquals(or, e); } @@ -156,7 +265,7 @@ public void testMultipleIn() { Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), new In(EMPTY, fa, List.of(TWO))); Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(secondOr); + Expression e = new CombineDisjunctionsToIn().rule(secondOr); assertEquals(In.class, e.getClass()); In in = (In) e; assertEquals(fa, in.value()); @@ -168,7 +277,7 @@ public void testOrWithNonCombinableExpressions() { Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), lessThanOf(fa, TWO)); Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); - Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(secondOr); + Expression e = new CombineDisjunctionsToIn().rule(secondOr); assertEquals(Or.class, e.getClass()); Or or = (Or) e; assertEquals(or.left(), firstOr.right()); @@ -180,8 +289,8 @@ public void testOrWithNonCombinableExpressions() { // Test BooleanFunctionEqualsElimination public void testBoolEqualsSimplificationOnExpressions() { - OptimizerRules.BooleanFunctionEqualsElimination s = new OptimizerRules.BooleanFunctionEqualsElimination(); - Expression exp = new GreaterThan(EMPTY, getFieldAttribute(), new Literal(EMPTY, 0, DataTypes.INTEGER), null); + BooleanFunctionEqualsElimination s = new BooleanFunctionEqualsElimination(); + Expression exp = new GreaterThan(EMPTY, getFieldAttribute(), new Literal(EMPTY, 0, DataType.INTEGER), null); assertEquals(exp, s.rule(new Equals(EMPTY, exp, TRUE))); // TODO: Replace use of QL Not with ESQL Not @@ -189,7 +298,7 @@ public void testBoolEqualsSimplificationOnExpressions() { } public void testBoolEqualsSimplificationOnFields() { - OptimizerRules.BooleanFunctionEqualsElimination s = new OptimizerRules.BooleanFunctionEqualsElimination(); + BooleanFunctionEqualsElimination s = new BooleanFunctionEqualsElimination(); FieldAttribute field = getFieldAttribute(); @@ -217,7 +326,7 @@ public void testDualEqualsConjunction() { Equals eq1 = equalsOf(fa, ONE); Equals eq2 = equalsOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq1, eq2)); assertEquals(FALSE, exp); } @@ -225,10 +334,10 @@ public void testDualEqualsConjunction() { // 1 < a < 10 AND a == 10 -> FALSE public void testEliminateRangeByEqualsOutsideInterval() { FieldAttribute fa = getFieldAttribute(); - Equals eq1 = equalsOf(fa, new Literal(EMPTY, 10, DataTypes.INTEGER)); - Range r = rangeOf(fa, ONE, false, new Literal(EMPTY, 10, DataTypes.INTEGER), false); + Equals eq1 = equalsOf(fa, new Literal(EMPTY, 10, DataType.INTEGER)); + Range r = rangeOf(fa, ONE, false, new Literal(EMPTY, 10, DataType.INTEGER), false); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq1, r)); assertEquals(FALSE, exp); } @@ -239,7 +348,7 @@ public void testPropagateEquals_VarNeq3AndVarEq3() { NotEquals neq = notEqualsOf(fa, THREE); Equals eq = equalsOf(fa, THREE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, neq, eq)); assertEquals(FALSE, exp); } @@ -250,7 +359,7 @@ public void testPropagateEquals_VarNeq4AndVarEq3() { NotEquals neq = notEqualsOf(fa, FOUR); Equals eq = equalsOf(fa, THREE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, neq, eq)); assertEquals(Equals.class, exp.getClass()); assertEquals(eq, exp); @@ -262,7 +371,7 @@ public void testPropagateEquals_VarEq2AndVarLt2() { Equals eq = equalsOf(fa, TWO); LessThan lt = lessThanOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, lt)); assertEquals(FALSE, exp); } @@ -273,7 +382,7 @@ public void testPropagateEquals_VarEq2AndVarLte2() { Equals eq = equalsOf(fa, TWO); LessThanOrEqual lt = lessThanOrEqualOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, lt)); assertEquals(eq, exp); } @@ -284,7 +393,7 @@ public void testPropagateEquals_VarEq2AndVarLte1() { Equals eq = equalsOf(fa, TWO); LessThanOrEqual lt = lessThanOrEqualOf(fa, ONE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, lt)); assertEquals(FALSE, exp); } @@ -295,7 +404,7 @@ public void testPropagateEquals_VarEq2AndVarGt2() { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, gt)); assertEquals(FALSE, exp); } @@ -306,7 +415,7 @@ public void testPropagateEquals_VarEq2AndVarGte2() { Equals eq = equalsOf(fa, TWO); GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, gte)); assertEquals(eq, exp); } @@ -317,7 +426,7 @@ public void testPropagateEquals_VarEq2AndVarLt3() { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, THREE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq, gt)); assertEquals(FALSE, exp); } @@ -330,7 +439,7 @@ public void testPropagateEquals_VarEq2AndVarLt3AndVarGt1AndVarNeq4() { GreaterThan gt = greaterThanOf(fa, ONE); NotEquals neq = notEqualsOf(fa, FOUR); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression and = Predicates.combineAnd(asList(eq, lt, gt, neq)); Expression exp = rule.rule((And) and); assertEquals(eq, exp); @@ -341,10 +450,10 @@ public void testPropagateEquals_VarEq2AndVarRangeGt1Lt3AndVarGt0AndVarNeq4() { FieldAttribute fa = getFieldAttribute(); Equals eq = equalsOf(fa, TWO); Range range = rangeOf(fa, ONE, false, THREE, false); - GreaterThan gt = greaterThanOf(fa, new Literal(EMPTY, 0, DataTypes.INTEGER)); + GreaterThan gt = greaterThanOf(fa, new Literal(EMPTY, 0, DataType.INTEGER)); NotEquals neq = notEqualsOf(fa, FOUR); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression and = Predicates.combineAnd(asList(eq, range, gt, neq)); Expression exp = rule.rule((And) and); assertEquals(eq, exp); @@ -356,7 +465,7 @@ public void testPropagateEquals_VarEq2OrVarGt1() { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, ONE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, gt)); assertEquals(gt, exp); } @@ -367,7 +476,7 @@ public void testPropagateEquals_VarEq2OrVarGte2() { Equals eq = equalsOf(fa, TWO); GreaterThan gt = greaterThanOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, gt)); assertEquals(GreaterThanOrEqual.class, exp.getClass()); GreaterThanOrEqual gte = (GreaterThanOrEqual) exp; @@ -380,7 +489,7 @@ public void testPropagateEquals_VarEq2OrVarLt3() { Equals eq = equalsOf(fa, TWO); LessThan lt = lessThanOf(fa, THREE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, lt)); assertEquals(lt, exp); } @@ -391,7 +500,7 @@ public void testPropagateEquals_VarEq3OrVarLt3() { Equals eq = equalsOf(fa, THREE); LessThan lt = lessThanOf(fa, THREE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, lt)); assertEquals(LessThanOrEqual.class, exp.getClass()); LessThanOrEqual lte = (LessThanOrEqual) exp; @@ -404,7 +513,7 @@ public void testPropagateEquals_VarEq2OrVarRangeGt1Lt3() { Equals eq = equalsOf(fa, TWO); Range range = rangeOf(fa, ONE, false, THREE, false); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, range)); assertEquals(range, exp); } @@ -415,7 +524,7 @@ public void testPropagateEquals_VarEq2OrVarRangeGt2Lt3() { Equals eq = equalsOf(fa, TWO); Range range = rangeOf(fa, TWO, false, THREE, false); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, range)); assertEquals(Range.class, exp.getClass()); Range r = (Range) exp; @@ -431,7 +540,7 @@ public void testPropagateEquals_VarEq3OrVarRangeGt2Lt3() { Equals eq = equalsOf(fa, THREE); Range range = rangeOf(fa, TWO, false, THREE, false); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, range)); assertEquals(Range.class, exp.getClass()); Range r = (Range) exp; @@ -447,7 +556,7 @@ public void testPropagateEquals_VarEq2OrVarNeq2() { Equals eq = equalsOf(fa, TWO); NotEquals neq = notEqualsOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, neq)); assertEquals(TRUE, exp); } @@ -458,7 +567,7 @@ public void testPropagateEquals_VarEq2OrVarNeq5() { Equals eq = equalsOf(fa, TWO); NotEquals neq = notEqualsOf(fa, FIVE); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new Or(EMPTY, eq, neq)); assertEquals(NotEquals.class, exp.getClass()); NotEquals ne = (NotEquals) exp; @@ -473,19 +582,19 @@ public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() { GreaterThan gt = greaterThanOf(fa, TWO); NotEquals neq = notEqualsOf(fa, TWO); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule((Or) Predicates.combineOr(asList(eq, range, neq, gt))); assertEquals(TRUE, exp); } // a == 1 AND a == 2 -> nop for date/time fields public void testPropagateEquals_ignoreDateTimeFields() { - FieldAttribute fa = TestUtils.getFieldAttribute("a", DataTypes.DATETIME); + FieldAttribute fa = TestUtils.getFieldAttribute("a", DataType.DATETIME); Equals eq1 = equalsOf(fa, ONE); Equals eq2 = equalsOf(fa, TWO); And and = new And(EMPTY, eq1, eq2); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(and); assertEquals(and, exp); } @@ -494,10 +603,288 @@ public void testPropagateEquals_ignoreDateTimeFields() { public void testEliminateRangeByEqualsInInterval() { FieldAttribute fa = getFieldAttribute(); Equals eq1 = equalsOf(fa, ONE); - Range r = rangeOf(fa, ONE, true, new Literal(EMPTY, 10, DataTypes.INTEGER), false); + Range r = rangeOf(fa, ONE, true, new Literal(EMPTY, 10, DataType.INTEGER), false); - OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + PropagateEquals rule = new PropagateEquals(); Expression exp = rule.rule(new And(EMPTY, eq1, r)); assertEquals(eq1, exp); } + // + // Null folding + + public void testNullFoldingIsNull() { + FoldNull foldNull = new FoldNull(); + assertEquals(true, foldNull.rule(new IsNull(EMPTY, NULL)).fold()); + assertEquals(false, foldNull.rule(new IsNull(EMPTY, TRUE)).fold()); + } + + public void testGenericNullableExpression() { + FoldNull rule = new FoldNull(); + // arithmetic + assertNullLiteral(rule.rule(new Add(EMPTY, getFieldAttribute(), NULL))); + // comparison + assertNullLiteral(rule.rule(greaterThanOf(getFieldAttribute(), NULL))); + // regex + assertNullLiteral(rule.rule(new RLike(EMPTY, NULL, new RLikePattern("123")))); + } + + public void testNullFoldingDoesNotApplyOnLogicalExpressions() { + org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.FoldNull rule = + new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.FoldNull(); + + Or or = new Or(EMPTY, NULL, TRUE); + assertEquals(or, rule.rule(or)); + or = new Or(EMPTY, NULL, NULL); + assertEquals(or, rule.rule(or)); + + And and = new And(EMPTY, NULL, TRUE); + assertEquals(and, rule.rule(and)); + and = new And(EMPTY, NULL, NULL); + assertEquals(and, rule.rule(and)); + } + + // + // Propagate nullability (IS NULL / IS NOT NULL) + // + + // a IS NULL AND a IS NOT NULL => false + public void testIsNullAndNotNull() { + FieldAttribute fa = getFieldAttribute(); + + And and = new And(EMPTY, new IsNull(EMPTY, fa), new IsNotNull(EMPTY, fa)); + assertEquals(FALSE, new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PropagateNullable().rule(and)); + } + + // a IS NULL AND b IS NOT NULL AND c IS NULL AND d IS NOT NULL AND e IS NULL AND a IS NOT NULL => false + public void testIsNullAndNotNullMultiField() { + FieldAttribute fa = getFieldAttribute(); + + And andOne = new And(EMPTY, new IsNull(EMPTY, fa), new IsNotNull(EMPTY, getFieldAttribute())); + And andTwo = new And(EMPTY, new IsNull(EMPTY, getFieldAttribute()), new IsNotNull(EMPTY, getFieldAttribute())); + And andThree = new And(EMPTY, new IsNull(EMPTY, getFieldAttribute()), new IsNotNull(EMPTY, fa)); + + And and = new And(EMPTY, andOne, new And(EMPTY, andThree, andTwo)); + + assertEquals(FALSE, new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PropagateNullable().rule(and)); + } + + // a IS NULL AND a > 1 => a IS NULL AND false + public void testIsNullAndComparison() { + FieldAttribute fa = getFieldAttribute(); + IsNull isNull = new IsNull(EMPTY, fa); + + And and = new And(EMPTY, isNull, greaterThanOf(fa, ONE)); + assertEquals(new And(EMPTY, isNull, nullOf(BOOLEAN)), new PropagateNullable().rule(and)); + } + + // a IS NULL AND b < 1 AND c < 1 AND a < 1 => a IS NULL AND b < 1 AND c < 1 => a IS NULL AND b < 1 AND c < 1 + public void testIsNullAndMultipleComparison() { + FieldAttribute fa = getFieldAttribute(); + IsNull isNull = new IsNull(EMPTY, fa); + + And nestedAnd = new And( + EMPTY, + lessThanOf(TestUtils.getFieldAttribute("b"), ONE), + lessThanOf(TestUtils.getFieldAttribute("c"), ONE) + ); + And and = new And(EMPTY, isNull, nestedAnd); + And top = new And(EMPTY, and, lessThanOf(fa, ONE)); + + Expression optimized = new PropagateNullable().rule(top); + Expression expected = new And(EMPTY, and, nullOf(BOOLEAN)); + assertEquals(Predicates.splitAnd(expected), Predicates.splitAnd(optimized)); + } + + // ((a+1)/2) > 1 AND a + 2 AND a IS NULL AND b < 3 => NULL AND NULL AND a IS NULL AND b < 3 + public void testIsNullAndDeeplyNestedExpression() { + FieldAttribute fa = getFieldAttribute(); + IsNull isNull = new IsNull(EMPTY, fa); + + Expression nullified = new And( + EMPTY, + greaterThanOf(new Div(EMPTY, new Add(EMPTY, fa, ONE), TWO), ONE), + greaterThanOf(new Add(EMPTY, fa, TWO), ONE) + ); + Expression kept = new And(EMPTY, isNull, lessThanOf(TestUtils.getFieldAttribute("b"), THREE)); + And and = new And(EMPTY, nullified, kept); + + Expression optimized = new PropagateNullable().rule(and); + Expression expected = new And(EMPTY, new And(EMPTY, nullOf(BOOLEAN), nullOf(BOOLEAN)), kept); + + assertEquals(Predicates.splitAnd(expected), Predicates.splitAnd(optimized)); + } + + // a IS NULL OR a IS NOT NULL => no change + // a IS NULL OR a > 1 => no change + public void testIsNullInDisjunction() { + FieldAttribute fa = getFieldAttribute(); + + Or or = new Or(EMPTY, new IsNull(EMPTY, fa), new IsNotNull(EMPTY, fa)); + Filter dummy = new Filter(EMPTY, relation(), or); + LogicalPlan transformed = new PropagateNullable().apply(dummy); + assertSame(dummy, transformed); + assertEquals(or, ((Filter) transformed).condition()); + + or = new Or(EMPTY, new IsNull(EMPTY, fa), greaterThanOf(fa, ONE)); + dummy = new Filter(EMPTY, relation(), or); + transformed = new PropagateNullable().apply(dummy); + assertSame(dummy, transformed); + assertEquals(or, ((Filter) transformed).condition()); + } + + // a + 1 AND (a IS NULL OR a > 3) => no change + public void testIsNullDisjunction() { + FieldAttribute fa = getFieldAttribute(); + IsNull isNull = new IsNull(EMPTY, fa); + + Or or = new Or(EMPTY, isNull, greaterThanOf(fa, THREE)); + And and = new And(EMPTY, new Add(EMPTY, fa, ONE), or); + + assertEquals(and, new PropagateNullable().rule(and)); + } + + // + // Like / Regex + // + public void testMatchAllLikeToExist() { + for (String s : asList("%", "%%", "%%%")) { + LikePattern pattern = new LikePattern(s, (char) 0); + FieldAttribute fa = getFieldAttribute(); + Like l = new Like(EMPTY, fa, pattern); + Expression e = new ReplaceRegexMatch().rule(l); + assertEquals(IsNotNull.class, e.getClass()); + IsNotNull inn = (IsNotNull) e; + assertEquals(fa, inn.field()); + } + } + + public void testMatchAllWildcardLikeToExist() { + for (String s : asList("*", "**", "***")) { + WildcardPattern pattern = new WildcardPattern(s); + FieldAttribute fa = getFieldAttribute(); + WildcardLike l = new WildcardLike(EMPTY, fa, pattern); + Expression e = new ReplaceRegexMatch().rule(l); + assertEquals(IsNotNull.class, e.getClass()); + IsNotNull inn = (IsNotNull) e; + assertEquals(fa, inn.field()); + } + } + + public void testMatchAllRLikeToExist() { + RLikePattern pattern = new RLikePattern(".*"); + FieldAttribute fa = getFieldAttribute(); + RLike l = new RLike(EMPTY, fa, pattern); + Expression e = new ReplaceRegexMatch().rule(l); + assertEquals(IsNotNull.class, e.getClass()); + IsNotNull inn = (IsNotNull) e; + assertEquals(fa, inn.field()); + } + + public void testExactMatchLike() { + for (String s : asList("ab", "ab0%", "ab0_c")) { + LikePattern pattern = new LikePattern(s, '0'); + FieldAttribute fa = getFieldAttribute(); + Like l = new Like(EMPTY, fa, pattern); + Expression e = new ReplaceRegexMatch().rule(l); + assertEquals(Equals.class, e.getClass()); + Equals eq = (Equals) e; + assertEquals(fa, eq.left()); + assertEquals(s.replace("0", StringUtils.EMPTY), eq.right().fold()); + } + } + + public void testExactMatchWildcardLike() { + String s = "ab"; + WildcardPattern pattern = new WildcardPattern(s); + FieldAttribute fa = getFieldAttribute(); + WildcardLike l = new WildcardLike(EMPTY, fa, pattern); + Expression e = new ReplaceRegexMatch().rule(l); + assertEquals(Equals.class, e.getClass()); + Equals eq = (Equals) e; + assertEquals(fa, eq.left()); + assertEquals(s, eq.right().fold()); + } + + public void testExactMatchRLike() { + RLikePattern pattern = new RLikePattern("abc"); + FieldAttribute fa = getFieldAttribute(); + RLike l = new RLike(EMPTY, fa, pattern); + Expression e = new ReplaceRegexMatch().rule(l); + assertEquals(Equals.class, e.getClass()); + Equals eq = (Equals) e; + assertEquals(fa, eq.left()); + assertEquals("abc", eq.right().fold()); + } + + private void assertNullLiteral(Expression expression) { + assertEquals(Literal.class, expression.getClass()); + assertNull(expression.fold()); + } + + private IsNotNull isNotNull(Expression field) { + return new IsNotNull(EMPTY, field); + } + + private IsNull isNull(Expression field) { + return new IsNull(EMPTY, field); + } + + private Literal nullOf(DataType dataType) { + return new Literal(Source.EMPTY, null, dataType); + } + // + // Logical simplifications + // + + public void testLiteralsOnTheRight() { + Alias a = new Alias(EMPTY, "a", new Literal(EMPTY, 10, INTEGER)); + Expression result = new LiteralsOnTheRight().rule(equalsOf(FIVE, a)); + assertTrue(result instanceof Equals); + Equals eq = (Equals) result; + assertEquals(a, eq.left()); + assertEquals(FIVE, eq.right()); + + // Note: Null Equals test removed here + } + + public void testBoolSimplifyOr() { + org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification simplification = + new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification(); + + assertEquals(TRUE, simplification.rule(new Or(EMPTY, TRUE, TRUE))); + assertEquals(TRUE, simplification.rule(new Or(EMPTY, TRUE, DUMMY_EXPRESSION))); + assertEquals(TRUE, simplification.rule(new Or(EMPTY, DUMMY_EXPRESSION, TRUE))); + + assertEquals(FALSE, simplification.rule(new Or(EMPTY, FALSE, FALSE))); + assertEquals(DUMMY_EXPRESSION, simplification.rule(new Or(EMPTY, FALSE, DUMMY_EXPRESSION))); + assertEquals(DUMMY_EXPRESSION, simplification.rule(new Or(EMPTY, DUMMY_EXPRESSION, FALSE))); + } + + public void testBoolSimplifyAnd() { + org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification simplification = + new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification(); + + assertEquals(TRUE, simplification.rule(new And(EMPTY, TRUE, TRUE))); + assertEquals(DUMMY_EXPRESSION, simplification.rule(new And(EMPTY, TRUE, DUMMY_EXPRESSION))); + assertEquals(DUMMY_EXPRESSION, simplification.rule(new And(EMPTY, DUMMY_EXPRESSION, TRUE))); + + assertEquals(FALSE, simplification.rule(new And(EMPTY, FALSE, FALSE))); + assertEquals(FALSE, simplification.rule(new And(EMPTY, FALSE, DUMMY_EXPRESSION))); + assertEquals(FALSE, simplification.rule(new And(EMPTY, DUMMY_EXPRESSION, FALSE))); + } + + public void testBoolCommonFactorExtraction() { + org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification simplification = + new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification(); + + Expression a1 = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 1); + Expression a2 = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 1); + Expression b = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 2); + Expression c = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 3); + + Or actual = new Or(EMPTY, new And(EMPTY, a1, b), new And(EMPTY, a2, c)); + And expected = new And(EMPTY, a1, new Or(EMPTY, b, c)); + + assertEquals(expected, simplification.rule(actual)); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index a57754d594af9..bc70ce64944d1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -47,8 +47,8 @@ import org.elasticsearch.xpack.esql.core.index.IndexResolution; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; +import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; @@ -74,7 +74,11 @@ import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; @@ -89,6 +93,7 @@ import org.elasticsearch.xpack.esql.plan.physical.FilterExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.GrokExec; +import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -109,6 +114,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; @@ -116,6 +122,8 @@ import static org.elasticsearch.core.Tuple.tuple; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.EsqlTestUtils.configuration; @@ -127,8 +135,8 @@ import static org.elasticsearch.xpack.esql.core.expression.Expressions.names; import static org.elasticsearch.xpack.esql.core.expression.Order.OrderDirection.ASC; import static org.elasticsearch.xpack.esql.core.expression.function.scalar.FunctionTestUtils.l; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; import static org.elasticsearch.xpack.esql.parser.ExpressionBuilder.MAX_EXPRESSION_DEPTH; import static org.elasticsearch.xpack.esql.parser.LogicalPlanBuilder.MAX_QUERY_DEPTH; import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.FINAL; @@ -136,10 +144,14 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.matchesRegex; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; // @TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") public class PhysicalPlanOptimizerTests extends ESTestCase { @@ -149,7 +161,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { /** * Estimated size of a keyword field in bytes. */ - private static final int KEYWORD_EST = EstimatesRowSize.estimateSize(DataTypes.KEYWORD); + private static final int KEYWORD_EST = EstimatesRowSize.estimateSize(DataType.KEYWORD); private EsqlParser parser; private LogicalPlanOptimizer logicalOptimizer; @@ -240,8 +252,8 @@ private static EnrichResolution setupEnrichResolution() { List.of("a", "b"), Map.of("", "idx"), Map.ofEntries( - Map.entry("a", new EsField("a", DataTypes.INTEGER, Map.of(), true)), - Map.entry("b", new EsField("b", DataTypes.LONG, Map.of(), true)) + Map.entry("a", new EsField("a", DataType.INTEGER, Map.of(), true)), + Map.entry("b", new EsField("b", DataType.LONG, Map.of(), true)) ) ) ); @@ -254,10 +266,10 @@ private static EnrichResolution setupEnrichResolution() { List.of("city", "airport", "region", "city_boundary"), Map.of("", "airport_city_boundaries"), Map.ofEntries( - Map.entry("city", new EsField("city", DataTypes.KEYWORD, Map.of(), true)), - Map.entry("airport", new EsField("airport", DataTypes.TEXT, Map.of(), false)), - Map.entry("region", new EsField("region", DataTypes.TEXT, Map.of(), false)), - Map.entry("city_boundary", new EsField("city_boundary", DataTypes.GEO_SHAPE, Map.of(), false)) + Map.entry("city", new EsField("city", DataType.KEYWORD, Map.of(), true)), + Map.entry("airport", new EsField("airport", DataType.TEXT, Map.of(), false)), + Map.entry("region", new EsField("region", DataType.TEXT, Map.of(), false)), + Map.entry("city_boundary", new EsField("city_boundary", DataType.GEO_SHAPE, Map.of(), false)) ) ) ); @@ -269,7 +281,7 @@ private static EnrichResolution setupEnrichResolution() { EnrichPolicy.MATCH_TYPE, List.of("department"), Map.of("", ".enrich-departments-1", "cluster_1", ".enrich-departments-2"), - Map.of("department", new EsField("department", DataTypes.KEYWORD, Map.of(), true)) + Map.of("department", new EsField("department", DataType.KEYWORD, Map.of(), true)) ) ); enrichResolution.addResolvedPolicy( @@ -280,7 +292,7 @@ private static EnrichResolution setupEnrichResolution() { EnrichPolicy.MATCH_TYPE, List.of("department"), Map.of("", ".enrich-departments-3"), - Map.of("department", new EsField("department", DataTypes.KEYWORD, Map.of(), true)) + Map.of("department", new EsField("department", DataType.KEYWORD, Map.of(), true)) ) ); enrichResolution.addResolvedPolicy( @@ -291,7 +303,7 @@ private static EnrichResolution setupEnrichResolution() { EnrichPolicy.MATCH_TYPE, List.of("department"), Map.of("cluster_1", ".enrich-departments-2"), - Map.of("department", new EsField("department", DataTypes.KEYWORD, Map.of(), true)) + Map.of("department", new EsField("department", DataType.KEYWORD, Map.of(), true)) ) ); enrichResolution.addResolvedPolicy( @@ -302,7 +314,7 @@ private static EnrichResolution setupEnrichResolution() { EnrichPolicy.MATCH_TYPE, List.of("supervisor"), Map.of("", ".enrich-supervisors-a", "cluster_1", ".enrich-supervisors-b"), - Map.of("supervisor", new EsField("supervisor", DataTypes.KEYWORD, Map.of(), true)) + Map.of("supervisor", new EsField("supervisor", DataType.KEYWORD, Map.of(), true)) ) ); enrichResolution.addResolvedPolicy( @@ -313,7 +325,7 @@ private static EnrichResolution setupEnrichResolution() { EnrichPolicy.MATCH_TYPE, List.of("supervisor"), Map.of("", ".enrich-supervisors-c"), - Map.of("supervisor", new EsField("supervisor", DataTypes.KEYWORD, Map.of(), true)) + Map.of("supervisor", new EsField("supervisor", DataType.KEYWORD, Map.of(), true)) ) ); enrichResolution.addResolvedPolicy( @@ -324,7 +336,7 @@ private static EnrichResolution setupEnrichResolution() { EnrichPolicy.MATCH_TYPE, List.of("supervisor"), Map.of("cluster_1", ".enrich-supervisors-b"), - Map.of("supervisor", new EsField("supervisor", DataTypes.KEYWORD, Map.of(), true)) + Map.of("supervisor", new EsField("supervisor", DataType.KEYWORD, Map.of(), true)) ) ); return enrichResolution; @@ -4080,6 +4092,145 @@ public void testMaxQueryDepthPlusExpressionDepth() { assertThat(e.getMessage(), containsString("ESQL statement exceeded the maximum query depth allowed (" + MAX_QUERY_DEPTH + ")")); } + public void testLookupSimple() { + PhysicalPlan plan = physicalPlan(""" + FROM test | + RENAME languages AS int | + LOOKUP int_number_names ON int"""); + var join = as(plan, HashJoinExec.class); + assertMap(join.matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); + assertMap( + join.output().stream().map(Object::toString).toList(), + matchesList().item(startsWith("_meta_field{f}")) + .item(startsWith("emp_no{f}")) + .item(startsWith("first_name{f}")) + .item(startsWith("gender{f}")) + .item(startsWith("job{f}")) + .item(startsWith("job.raw{f}")) + .item(startsWith("int{r}")) + .item(startsWith("last_name{f}")) + .item(startsWith("long_noidx{f}")) + .item(startsWith("salary{f}")) + .item(startsWith("name{r}")) + ); + } + + /** + * Expected + * {@code + * ProjectExec[[emp_no{f}#17, int{r}#5 AS languages, name{f}#28 AS lang_name]] + * \_HashJoinExec[ + * LocalSourceExec[[int{f}#27, name{f}#28],[...]], + * [int{r}#5], + * [name{r}#28, _meta_field{f}#23, emp_no{f}#17, ...]] + * \_ProjectExec[[_meta_field{f}#23, emp_no{f}#17, ...]] + * \_TopNExec[[Order[emp_no{f}#17,ASC,LAST]],4[INTEGER],370] + * \_ExchangeExec[[],false] + * \_ProjectExec[[emp_no{f}#17, ..., languages{f}#20]] + * \_FieldExtractExec[emp_no{f}#17, _meta_field{f}#23, first_name{f}#18, ..]<[]> + * \_EsQueryExec[...] + * } + */ + public void testLookupThenProject() { + PhysicalPlan plan = optimizedPlan(physicalPlan(""" + FROM employees + | SORT emp_no + | LIMIT 4 + | RENAME languages AS int + | LOOKUP int_number_names ON int + | RENAME int AS languages, name AS lang_name + | KEEP emp_no, languages, lang_name""")); + + var outerProject = as(plan, ProjectExec.class); + assertThat(outerProject.projections().toString(), containsString("AS lang_name")); + var join = as(outerProject.child(), HashJoinExec.class); + assertMap(join.matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); + assertMap( + join.output().stream().map(Object::toString).toList(), + matchesList().item(startsWith("_meta_field{f}")) + .item(startsWith("emp_no{f}")) + .item(startsWith("first_name{f}")) + .item(startsWith("gender{f}")) + .item(startsWith("job{f}")) + .item(startsWith("job.raw{f}")) + .item(startsWith("int{r}")) + .item(startsWith("last_name{f}")) + .item(startsWith("long_noidx{f}")) + .item(startsWith("salary{f}")) + .item(startsWith("name{r}")) + ); + + var middleProject = as(join.child(), ProjectExec.class); + assertThat(middleProject.projections().stream().map(Objects::toString).toList(), not(hasItem(startsWith("name{f}")))); + /* + * At the moment we don't push projections past the HashJoin so we still include first_name here + */ + assertThat(middleProject.projections().stream().map(Objects::toString).toList(), hasItem(startsWith("first_name{f}"))); + + var outerTopn = as(middleProject.child(), TopNExec.class); + var exchange = as(outerTopn.child(), ExchangeExec.class); + var innerProject = as(exchange.child(), ProjectExec.class); + assertThat(innerProject.projections().stream().map(Objects::toString).toList(), not(hasItem(startsWith("name{f}")))); + } + + /** + * Expects optimized data node plan of + *
{@code
+     * TopN[[Order[name{r}#25,ASC,LAST], Order[emp_no{f}#14,ASC,LAST]],1000[INTEGER]]
+     * \_Join[JoinConfig[type=LEFT OUTER, unionFields=[int{r}#4]]]
+     *   |_EsqlProject[[..., long_noidx{f}#23, salary{f}#19]]
+     *   | \_EsRelation[test][_meta_field{f}#20, emp_no{f}#14, first_name{f}#15, ..]
+     *   \_LocalRelation[[int{f}#24, name{f}#25],[...]]
+     * }
+ */ + public void testLookupThenTopN() { + var plan = physicalPlan(""" + FROM employees + | RENAME languages AS int + | LOOKUP int_number_names ON int + | RENAME name AS languages + | KEEP languages, emp_no + | SORT languages ASC, emp_no ASC + """); + + ProjectExec outerProject = as(plan, ProjectExec.class); + TopNExec outerTopN = as(outerProject.child(), TopNExec.class); + ExchangeExec exchange = as(outerTopN.child(), ExchangeExec.class); + FragmentExec frag = as(exchange.child(), FragmentExec.class); + + LogicalPlan opt = logicalOptimizer.optimize(frag.fragment()); + TopN innerTopN = as(opt, TopN.class); + assertMap( + innerTopN.order().stream().map(o -> o.child().toString()).toList(), + matchesList().item(startsWith("name{r}")).item(startsWith("emp_no{f}")) + ); + Join join = as(innerTopN.child(), Join.class); + assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertMap(join.config().matchFields().stream().map(Objects::toString).toList(), matchesList().item(startsWith("int{r}"))); + + Project innerProject = as(join.left(), Project.class); + assertThat(innerProject.projections(), hasSize(10)); + assertMap( + innerProject.projections().stream().map(Object::toString).toList(), + matchesList().item(startsWith("_meta_field{f}")) + .item(startsWith("emp_no{f}")) + .item(startsWith("first_name{f}")) + .item(startsWith("gender{f}")) + .item(startsWith("job{f}")) + .item(startsWith("job.raw{f}")) + .item(matchesRegex("languages\\{f}#\\d+ AS int#\\d+")) + .item(startsWith("last_name{f}")) + .item(startsWith("long_noidx{f}")) + .item(startsWith("salary{f}")) + ); + + LocalRelation lookup = as(join.right(), LocalRelation.class); + assertMap( + lookup.output().stream().map(Object::toString).toList(), + matchesList().item(startsWith("int{f}")).item(startsWith("name{f}")) + ); + } + @SuppressWarnings("SameParameterValue") private static void assertFilterCondition( Filter filter, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index 09dae4c739115..b24d9e6083b69 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -44,12 +44,12 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.core.expression.function.FunctionResolutionStrategy.DEFAULT; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DATE_PERIOD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.TIME_DURATION; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index db4d54caf9943..884b24fc0fc57 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; @@ -49,6 +48,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Explain; import org.elasticsearch.xpack.esql.plan.logical.Grok; import org.elasticsearch.xpack.esql.plan.logical.InlineStats; +import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Row; @@ -68,7 +68,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; import static org.elasticsearch.xpack.esql.core.expression.function.FunctionResolutionStrategy.DEFAULT; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.esql.parser.ExpressionBuilder.breakIntoFragments; import static org.hamcrest.Matchers.allOf; @@ -946,6 +946,16 @@ public void testInlineConvertWithNonexistentType() { expectError("ROW (1+2)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); } + public void testLookup() { + var plan = statement("ROW a = 1 | LOOKUP t ON j"); + var lookup = as(plan, Lookup.class); + var tableName = as(lookup.tableName(), Literal.class); + assertThat(tableName.fold(), equalTo("t")); + assertThat(lookup.matchFields(), hasSize(1)); + var matchField = as(lookup.matchFields().get(0), UnresolvedAttribute.class); + assertThat(matchField.name(), equalTo("j")); + } + public void testInlineConvertUnsupportedType() { expectError("ROW 3::BYTE", "line 1:6: Unsupported conversion to type [BYTE]"); } @@ -1129,39 +1139,39 @@ private static ReferenceAttribute referenceAttribute(String name, DataType type) } private static Literal integer(int i) { - return new Literal(EMPTY, i, DataTypes.INTEGER); + return new Literal(EMPTY, i, DataType.INTEGER); } private static Literal integers(int... ints) { - return new Literal(EMPTY, Arrays.stream(ints).boxed().toList(), DataTypes.INTEGER); + return new Literal(EMPTY, Arrays.stream(ints).boxed().toList(), DataType.INTEGER); } private static Literal literalLong(long i) { - return new Literal(EMPTY, i, DataTypes.LONG); + return new Literal(EMPTY, i, DataType.LONG); } private static Literal literalLongs(long... longs) { - return new Literal(EMPTY, Arrays.stream(longs).boxed().toList(), DataTypes.LONG); + return new Literal(EMPTY, Arrays.stream(longs).boxed().toList(), DataType.LONG); } private static Literal literalDouble(double d) { - return new Literal(EMPTY, d, DataTypes.DOUBLE); + return new Literal(EMPTY, d, DataType.DOUBLE); } private static Literal literalDoubles(double... doubles) { - return new Literal(EMPTY, Arrays.stream(doubles).boxed().toList(), DataTypes.DOUBLE); + return new Literal(EMPTY, Arrays.stream(doubles).boxed().toList(), DataType.DOUBLE); } private static Literal literalUnsignedLong(String ulong) { - return new Literal(EMPTY, asLongUnsigned(new BigInteger(ulong)), DataTypes.UNSIGNED_LONG); + return new Literal(EMPTY, asLongUnsigned(new BigInteger(ulong)), DataType.UNSIGNED_LONG); } private static Literal literalUnsignedLongs(String... ulongs) { - return new Literal(EMPTY, Arrays.stream(ulongs).map(s -> asLongUnsigned(new BigInteger(s))).toList(), DataTypes.UNSIGNED_LONG); + return new Literal(EMPTY, Arrays.stream(ulongs).map(s -> asLongUnsigned(new BigInteger(s))).toList(), DataType.UNSIGNED_LONG); } private static Literal literalBoolean(boolean b) { - return new Literal(EMPTY, b, DataTypes.BOOLEAN); + return new Literal(EMPTY, b, DataType.BOOLEAN); } private static Literal literalBooleans(boolean... booleans) { @@ -1169,15 +1179,15 @@ private static Literal literalBooleans(boolean... booleans) { for (boolean b : booleans) { v.add(b); } - return new Literal(EMPTY, v, DataTypes.BOOLEAN); + return new Literal(EMPTY, v, DataType.BOOLEAN); } private static Literal literalString(String s) { - return new Literal(EMPTY, s, DataTypes.KEYWORD); + return new Literal(EMPTY, s, DataType.KEYWORD); } private static Literal literalStrings(String... strings) { - return new Literal(EMPTY, Arrays.asList(strings), DataTypes.KEYWORD); + return new Literal(EMPTY, Arrays.asList(strings), DataType.KEYWORD); } private void expectError(String query, String errorMessage) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java index 11c2d9532ff16..472e1a01e0151 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java @@ -31,7 +31,7 @@ import static org.elasticsearch.xpack.esql.core.TestUtils.of; import static org.elasticsearch.xpack.esql.core.TestUtils.relation; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.hamcrest.Matchers.contains; public class QueryPlanTests extends ESTestCase { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java index d14537aede31e..fcb7d02460e94 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; @@ -61,10 +60,10 @@ import java.util.Map; public class EvalMapperTests extends ESTestCase { - private static final FieldAttribute DOUBLE1 = field("foo", DataTypes.DOUBLE); - private static final FieldAttribute DOUBLE2 = field("bar", DataTypes.DOUBLE); - private static final FieldAttribute LONG = field("long", DataTypes.LONG); - private static final FieldAttribute DATE = field("date", DataTypes.DATETIME); + private static final FieldAttribute DOUBLE1 = field("foo", DataType.DOUBLE); + private static final FieldAttribute DOUBLE2 = field("bar", DataType.DOUBLE); + private static final FieldAttribute LONG = field("long", DataType.LONG); + private static final FieldAttribute DATE = field("date", DataType.DATETIME); private static final EsqlConfiguration TEST_CONFIG = new EsqlConfiguration( ZoneOffset.UTC, @@ -81,9 +80,9 @@ public class EvalMapperTests extends ESTestCase { @ParametersFactory(argumentFormatting = "%1$s") public static List params() { - Literal literal = new Literal(Source.EMPTY, new BytesRef("something"), DataTypes.KEYWORD); - Literal datePattern = new Literal(Source.EMPTY, new BytesRef("yyyy"), DataTypes.KEYWORD); - Literal dateInterval = new Literal(Source.EMPTY, Duration.ofHours(1), DataTypes.TIME_DURATION); + Literal literal = new Literal(Source.EMPTY, new BytesRef("something"), DataType.KEYWORD); + Literal datePattern = new Literal(Source.EMPTY, new BytesRef("yyyy"), DataType.KEYWORD); + Literal dateInterval = new Literal(Source.EMPTY, Duration.ofHours(1), DataType.TIME_DURATION); List params = new ArrayList<>(); for (Expression e : new Expression[] { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index 7063853b4fce2..853096626179e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.xpack.esql.core.expression.Order; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -93,9 +93,9 @@ public void testLuceneSourceOperatorHugeRowSize() throws IOException { public void testLuceneTopNSourceOperator() throws IOException { int estimatedRowSize = randomEstimatedRowSize(estimatedRowSizeIsHuge); - FieldAttribute sortField = new FieldAttribute(Source.EMPTY, "field", new EsField("field", DataTypes.INTEGER, Map.of(), true)); + FieldAttribute sortField = new FieldAttribute(Source.EMPTY, "field", new EsField("field", DataType.INTEGER, Map.of(), true)); EsQueryExec.FieldSort sort = new EsQueryExec.FieldSort(sortField, Order.OrderDirection.ASC, Order.NullsPosition.LAST); - Literal limit = new Literal(Source.EMPTY, 10, DataTypes.INTEGER); + Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( new EsQueryExec(Source.EMPTY, index(), IndexMode.STANDARD, List.of(), null, limit, List.of(sort), estimatedRowSize) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 5c96c410f27ec..b08a2798bc509 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -165,6 +165,7 @@ private class TestFieldExtractOperator implements Operator { private final MappedFieldType.FieldExtractPreference extractPreference; TestFieldExtractOperator(String columnName, DataType dataType, MappedFieldType.FieldExtractPreference extractPreference) { + assert columnNames.contains(columnName); this.columnName = columnName; this.dataType = dataType; this.extractPreference = extractPreference; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index 8134dc16aedf6..e50ba59a31b2d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.Order; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedNamedExpression; @@ -24,13 +25,14 @@ import org.elasticsearch.xpack.esql.core.tree.NodeSubclassTests; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType; @@ -84,6 +86,12 @@ protected Object pluggableMakeArg(Class> toBuildClass, Class (NamedExpression) makeArg(NamedExpression.class)), + randomList(0, 10, () -> (Expression) makeArg(Expression.class)) + ); } return null; @@ -119,7 +127,7 @@ protected Predicate pluggableClassNameFilter() { } /** Scans the {@code .class} files to identify all classes and checks if they are subclasses of {@link Node}. */ - @ParametersFactory + @ParametersFactory(argumentFormatting = "%1s") @SuppressWarnings("rawtypes") public static List nodeSubclasses() throws IOException { return subclassesOf(Node.class, CLASSNAME_FILTER).stream() @@ -158,7 +166,7 @@ static String randomGrokPattern() { ); } - static List DATA_TYPES = DataTypes.types().stream().toList(); + static List DATA_TYPES = DataType.types().stream().toList(); static EsQueryExec.FieldSort randomFieldSort() { return new EsQueryExec.FieldSort( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java index 9eaef21421f55..7dca73219d6a1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeRegistryTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.index.IndexResolution; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.session.EsqlIndexResolver; @@ -25,18 +24,18 @@ public class EsqlDataTypeRegistryTests extends ESTestCase { public void testCounter() { - resolve("long", TimeSeriesParams.MetricType.COUNTER, DataTypes.COUNTER_LONG); - resolve("integer", TimeSeriesParams.MetricType.COUNTER, DataTypes.COUNTER_INTEGER); - resolve("double", TimeSeriesParams.MetricType.COUNTER, DataTypes.COUNTER_DOUBLE); + resolve("long", TimeSeriesParams.MetricType.COUNTER, DataType.COUNTER_LONG); + resolve("integer", TimeSeriesParams.MetricType.COUNTER, DataType.COUNTER_INTEGER); + resolve("double", TimeSeriesParams.MetricType.COUNTER, DataType.COUNTER_DOUBLE); } public void testGauge() { - resolve("long", TimeSeriesParams.MetricType.GAUGE, DataTypes.LONG); + resolve("long", TimeSeriesParams.MetricType.GAUGE, DataType.LONG); } public void testLong() { - resolve("long", null, DataTypes.LONG); + resolve("long", null, DataType.LONG); } private void resolve(String esTypeName, TimeSeriesParams.MetricType metricType, DataType expected) { diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java index c46d4d334cd09..3949139db033b 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java @@ -161,7 +161,7 @@ public static void createNewSingletonPolicy( final StringEntity entity = new StringEntity("{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); Request request = new Request("PUT", "_ilm/policy/" + policyName); request.setEntity(entity); - client.performRequest(request); + assertOK(client.performRequest(request)); } public static void createComposableTemplate(RestClient client, String templateName, String indexPattern, Template template) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index 15a370e994583..7a420aa41ce76 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -191,6 +191,7 @@ private void createIndex(String index, String alias, boolean isTimeSeries) throw createIndexWithSettings(client(), index, alias, settings, mapping); } + @TestLogging(value = "org.elasticsearch.xpack.ilm:TRACE", reason = "https://github.com/elastic/elasticsearch/issues/105437") public void testRollupIndex() throws Exception { createIndex(index, alias, true); index(client(), index, true, null, "@timestamp", "2020-01-01T05:10:00Z", "volume", 11.0, "metricset", randomAlphaOfLength(5)); diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index f1f1311196435..f4378d8ab5b7c 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -41,7 +41,7 @@ dependencies { } if (BuildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { + tasks.withType(Test).configureEach { systemProperty 'es.semantic_text_feature_flag_enabled', 'true' } } diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index bb18b71eb3fea..cddcff9692a70 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -29,8 +29,8 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import java.io.IOException; import java.util.ArrayList; @@ -136,42 +136,38 @@ public void chunkedInfer( } } - private TextEmbeddingResults makeResults(List input, int dimensions) { - List embeddings = new ArrayList<>(); + private InferenceTextEmbeddingFloatResults makeResults(List input, int dimensions) { + List embeddings = new ArrayList<>(); for (int i = 0; i < input.size(); i++) { - double[] doubleEmbeddings = generateEmbedding(input.get(i), dimensions); + float[] doubleEmbeddings = generateEmbedding(input.get(i), dimensions); List floatEmbeddings = new ArrayList<>(dimensions); for (int j = 0; j < dimensions; j++) { - floatEmbeddings.add((float) doubleEmbeddings[j]); + floatEmbeddings.add(doubleEmbeddings[j]); } - embeddings.add(TextEmbeddingResults.Embedding.of(floatEmbeddings)); + embeddings.add(InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(floatEmbeddings)); } - return new TextEmbeddingResults(embeddings); + return new InferenceTextEmbeddingFloatResults(embeddings); } private List makeChunkedResults(List input, int dimensions) { - var results = new ArrayList(); + var chunks = new ArrayList(); for (int i = 0; i < input.size(); i++) { - double[] embeddings = generateEmbedding(input.get(i), dimensions); - results.add( - new org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults( - List.of(new ChunkedTextEmbeddingResults.EmbeddingChunk(input.get(i), embeddings)) - ) - ); + float[] embedding = generateEmbedding(input.get(i), dimensions); + chunks.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(embedding)); } - return results; + + return InferenceChunkedTextEmbeddingFloatResults.listOf(input, new InferenceTextEmbeddingFloatResults(chunks)); } protected ServiceSettings getServiceSettingsFromMap(Map serviceSettingsMap) { return TestServiceSettings.fromMap(serviceSettingsMap); } - private static double[] generateEmbedding(String input, int dimensions) { - double[] embedding = new double[dimensions]; + private static float[] generateEmbedding(String input, int dimensions) { + float[] embedding = new float[dimensions]; for (int j = 0; j < dimensions; j++) { embedding[j] = input.hashCode() + 1 + j; } - return embedding; } } diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java index 05e85334cff5a..27fa55b7b7dc0 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java @@ -28,9 +28,9 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.search.WeightedToken; import java.io.IOException; @@ -146,7 +146,9 @@ private List makeChunkedResults(List inp tokens.add(new WeightedToken("feature_" + j, generateEmbedding(input.get(i), j))); } results.add( - new ChunkedSparseEmbeddingResults(List.of(new ChunkedTextExpansionResults.ChunkedResult(input.get(i), tokens))) + new InferenceChunkedSparseEmbeddingResults( + List.of(new InferenceChunkedTextExpansionResults.ChunkedResult(input.get(i), tokens)) + ) ); } return results; diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 1602aa3af5e98..300c0d2c471dc 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -54,7 +54,6 @@ protected Collection> nodePlugins() { return Arrays.asList(Utils.TestInferencePlugin.class); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109005") public void testBulkOperations() throws Exception { Map shardsSettings = Collections.singletonMap(IndexMetadata.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 10)); indicesAdmin().prepareCreate(INDEX_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index bff7ecdcc4a07..b3dbd97d495a9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -15,16 +15,15 @@ import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.services.azureaistudio.completion.AzureAiStudioChatCompletionServiceSettings; import org.elasticsearch.xpack.inference.services.azureaistudio.completion.AzureAiStudioChatCompletionTaskSettings; import org.elasticsearch.xpack.inference.services.azureaistudio.embeddings.AzureAiStudioEmbeddingsServiceSettings; @@ -304,29 +303,22 @@ private static void addChunkedInferenceResultsNamedWriteables(List { +public class TransportDeleteInferenceEndpointAction extends TransportMasterNodeAction< + DeleteInferenceEndpointAction.Request, + DeleteInferenceEndpointAction.Response> { private final ModelRegistry modelRegistry; private final InferenceServiceRegistry serviceRegistry; @@ -60,6 +61,7 @@ public TransportDeleteInferenceEndpointAction( actionFilters, DeleteInferenceEndpointAction.Request::new, indexNameExpressionResolver, + DeleteInferenceEndpointAction.Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.modelRegistry = modelRegistry; @@ -71,7 +73,7 @@ protected void masterOperation( Task task, DeleteInferenceEndpointAction.Request request, ClusterState state, - ActionListener masterListener + ActionListener masterListener ) { SubscribableListener.newForked(modelConfigListener -> { // Get the model from the registry @@ -123,7 +125,9 @@ && endpointIsReferencedInPipelines(state, request.getInferenceEndpointId(), list } }) .addListener( - masterListener.delegateFailure((l3, didDeleteModel) -> masterListener.onResponse(AcknowledgedResponse.of(didDeleteModel))) + masterListener.delegateFailure( + (l3, didDeleteModel) -> masterListener.onResponse(new DeleteInferenceEndpointAction.Response(didDeleteModel, Set.of())) + ) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java index 77d03ac660952..78a7522448464 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java @@ -13,9 +13,9 @@ import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import java.util.ArrayList; import java.util.List; @@ -45,7 +45,7 @@ public class EmbeddingRequestChunker { private final int chunkOverlap; private List> chunkedInputs; - private List>> results; + private List>> results; private AtomicArray errors; private ActionListener> finalListener; @@ -160,7 +160,7 @@ private class DebatchingListener implements ActionListener chunks, - AtomicArray> debatchedResults + AtomicArray> debatchedResults ) { - var all = new ArrayList(); + var all = new ArrayList(); for (int i = 0; i < debatchedResults.length(); i++) { var subBatch = debatchedResults.get(i); all.addAll(subBatch); @@ -224,12 +224,14 @@ private ChunkedTextEmbeddingFloatResults merge( assert chunks.size() == all.size(); - var embeddingChunks = new ArrayList(); + var embeddingChunks = new ArrayList(); for (int i = 0; i < chunks.size(); i++) { - embeddingChunks.add(new ChunkedTextEmbeddingFloatResults.EmbeddingChunk(chunks.get(i), all.get(i).values())); + embeddingChunks.add( + new InferenceChunkedTextEmbeddingFloatResults.InferenceFloatEmbeddingChunk(chunks.get(i), all.get(i).values()) + ); } - return new ChunkedTextEmbeddingFloatResults(embeddingChunks); + return new InferenceChunkedTextEmbeddingFloatResults(embeddingChunks); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java index 140c08ceef80f..81bc90433d34a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java @@ -26,6 +26,7 @@ public class CohereActionCreator implements CohereActionVisitor { private final ServiceComponents serviceComponents; public CohereActionCreator(Sender sender, ServiceComponents serviceComponents) { + // TODO Batching - accept a class that can handle batching this.sender = Objects.requireNonNull(sender); this.serviceComponents = Objects.requireNonNull(serviceComponents); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java index 63e51d99a8cee..b4815f8f0d1bf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java @@ -36,6 +36,7 @@ public CohereEmbeddingsAction(Sender sender, CohereEmbeddingsModel model, Thread model.getServiceSettings().getCommonSettings().uri(), "Cohere embeddings" ); + // TODO - Batching pass the batching class on to the CohereEmbeddingsRequestManager requestCreator = CohereEmbeddingsRequestManager.of(model, threadPool); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureAiStudioChatCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureAiStudioChatCompletionRequestManager.java index deff410aebaa8..002fa71b7fb5d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureAiStudioChatCompletionRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureAiStudioChatCompletionRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -37,17 +36,16 @@ public AzureAiStudioChatCompletionRequestManager(AzureAiStudioChatCompletionMode } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { AzureAiStudioChatCompletionRequest request = new AzureAiStudioChatCompletionRequest(model, input); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } private static ResponseHandler createCompletionHandler() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureAiStudioEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureAiStudioEmbeddingsRequestManager.java index a2b363151a417..ec5ab2fee6a57 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureAiStudioEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureAiStudioEmbeddingsRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -41,17 +40,16 @@ public AzureAiStudioEmbeddingsRequestManager(AzureAiStudioEmbeddingsModel model, } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { var truncatedInput = truncate(input, model.getServiceSettings().maxInputTokens()); AzureAiStudioEmbeddingsRequest request = new AzureAiStudioEmbeddingsRequest(truncator, truncatedInput, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } private static ResponseHandler createEmbeddingsHandler() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java index 2811155f6f357..5206d6c2c23cc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -43,16 +42,15 @@ public AzureOpenAiCompletionRequestManager(AzureOpenAiCompletionModel model, Thr } @Override - public Runnable create( + public void execute( @Nullable String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { AzureOpenAiCompletionRequest request = new AzureOpenAiCompletionRequest(input, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiEmbeddingsRequestManager.java index 06152b50822aa..e0fcee30e5af3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiEmbeddingsRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -55,16 +54,15 @@ public AzureOpenAiEmbeddingsRequestManager(AzureOpenAiEmbeddingsModel model, Tru } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { var truncatedInput = truncate(input, model.getServiceSettings().maxInputTokens()); AzureOpenAiEmbeddingsRequest request = new AzureOpenAiEmbeddingsRequest(truncator, truncatedInput, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManager.java index abca0ce0d049b..a015716b81032 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManager.java @@ -38,11 +38,16 @@ public String inferenceEntityId() { @Override public Object rateLimitGrouping() { - return rateLimitGroup; + // It's possible that two inference endpoints have the same information defining the group but have different + // rate limits then they should be in different groups otherwise whoever initially created the group will set + // the rate and the other inference endpoint's rate will be ignored + return new EndpointGrouping(rateLimitGroup, rateLimitSettings); } @Override public RateLimitSettings rateLimitSettings() { return rateLimitSettings; } + + private record EndpointGrouping(Object group, RateLimitSettings settings) {} } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereCompletionRequestManager.java index 255d4a3f3879f..8a4b0e45b93fa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereCompletionRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereCompletionRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -46,16 +45,15 @@ private CohereCompletionRequestManager(CohereCompletionModel model, ThreadPool t } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { CohereCompletionRequest request = new CohereCompletionRequest(input, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java index 0bf1c11285adb..a51910f1d0a67 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -44,16 +43,15 @@ private CohereEmbeddingsRequestManager(CohereEmbeddingsModel model, ThreadPool t } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { CohereEmbeddingsRequest request = new CohereEmbeddingsRequest(input, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java index 1778663a194e8..1351eec406569 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -44,16 +43,15 @@ private CohereRerankRequestManager(CohereRerankModel model, ThreadPool threadPoo } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { CohereRerankRequest request = new CohereRerankRequest(query, input, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableInferenceRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableInferenceRequest.java index 53f30773cbfe3..214eba4ee3485 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableInferenceRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableInferenceRequest.java @@ -23,7 +23,6 @@ record ExecutableInferenceRequest( RequestSender requestSender, Logger logger, Request request, - HttpClientContext context, ResponseHandler responseHandler, Supplier hasFinished, ActionListener listener @@ -34,7 +33,7 @@ public void run() { var inferenceEntityId = request.createHttpRequest().inferenceEntityId(); try { - requestSender.send(logger, request, context, hasFinished, responseHandler, listener); + requestSender.send(logger, request, HttpClientContext.create(), hasFinished, responseHandler, listener); } catch (Exception e) { var errorMessage = Strings.format("Failed to send request from inference entity id [%s]", inferenceEntityId); logger.warn(errorMessage, e); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioCompletionRequestManager.java index eb9baa680446a..2b191b046477b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioCompletionRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioCompletionRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -42,15 +41,14 @@ public GoogleAiStudioCompletionRequestManager(GoogleAiStudioCompletionModel mode } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { GoogleAiStudioCompletionRequest request = new GoogleAiStudioCompletionRequest(input, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioEmbeddingsRequestManager.java index 15c2825e7d043..6436e0231ab48 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleAiStudioEmbeddingsRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -48,17 +47,16 @@ public GoogleAiStudioEmbeddingsRequestManager(GoogleAiStudioEmbeddingsModel mode } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { var truncatedInput = truncate(input, model.getServiceSettings().maxInputTokens()); GoogleAiStudioEmbeddingsRequest request = new GoogleAiStudioEmbeddingsRequest(truncator, truncatedInput, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java index 21a758a3db248..d1e309a774ab7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java @@ -15,6 +15,8 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.RequestExecutor; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; import org.elasticsearch.xpack.inference.services.ServiceComponents; @@ -39,30 +41,28 @@ public static class Factory { private final ServiceComponents serviceComponents; private final HttpClientManager httpClientManager; private final ClusterService clusterService; - private final SingleRequestManager requestManager; + private final RequestSender requestSender; public Factory(ServiceComponents serviceComponents, HttpClientManager httpClientManager, ClusterService clusterService) { this.serviceComponents = Objects.requireNonNull(serviceComponents); this.httpClientManager = Objects.requireNonNull(httpClientManager); this.clusterService = Objects.requireNonNull(clusterService); - var requestSender = new RetryingHttpSender( + requestSender = new RetryingHttpSender( this.httpClientManager.getHttpClient(), serviceComponents.throttlerManager(), new RetrySettings(serviceComponents.settings(), clusterService), serviceComponents.threadPool() ); - requestManager = new SingleRequestManager(requestSender); } - public Sender createSender(String serviceName) { + public Sender createSender() { return new HttpRequestSender( - serviceName, serviceComponents.threadPool(), httpClientManager, clusterService, serviceComponents.settings(), - requestManager + requestSender ); } } @@ -71,26 +71,24 @@ public Sender createSender(String serviceName) { private final ThreadPool threadPool; private final HttpClientManager manager; - private final RequestExecutorService service; + private final RequestExecutor service; private final AtomicBoolean started = new AtomicBoolean(false); private final CountDownLatch startCompleted = new CountDownLatch(1); private HttpRequestSender( - String serviceName, ThreadPool threadPool, HttpClientManager httpClientManager, ClusterService clusterService, Settings settings, - SingleRequestManager requestManager + RequestSender requestSender ) { this.threadPool = Objects.requireNonNull(threadPool); this.manager = Objects.requireNonNull(httpClientManager); service = new RequestExecutorService( - serviceName, threadPool, startCompleted, new RequestExecutorServiceSettings(settings, clusterService), - requestManager + requestSender ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceRequestManager.java index 7c09e0c67c1c6..6c8fc446d5243 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -55,26 +54,17 @@ private HuggingFaceRequestManager(HuggingFaceModel model, ResponseHandler respon } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { var truncatedInput = truncate(input, model.getTokenLimit()); var request = new HuggingFaceInferenceRequest(truncator, truncatedInput, model); - return new ExecutableInferenceRequest( - requestSender, - logger, - request, - context, - responseHandler, - hasRequestCompletedFunction, - listener - ); + execute(new ExecutableInferenceRequest(requestSender, logger, request, responseHandler, hasRequestCompletedFunction, listener)); } record RateLimitGrouping(int accountHash) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java index 3c711bb79717c..6199a75a41a7d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java @@ -19,9 +19,9 @@ public interface InferenceRequest { /** - * Returns the creator that handles building an executable request based on the input provided. + * Returns the manager that handles building and executing an inference request. */ - RequestManager getRequestCreator(); + RequestManager getRequestManager(); /** * Returns the query associated with this request. Used for Rerank tasks. diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/MistralEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/MistralEmbeddingsRequestManager.java index f31a633581705..ab6a1bfb31372 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/MistralEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/MistralEmbeddingsRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -51,18 +50,17 @@ public MistralEmbeddingsRequestManager(MistralEmbeddingsModel model, Truncator t } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { var truncatedInput = truncate(input, model.getServiceSettings().maxInputTokens()); MistralEmbeddingsRequest request = new MistralEmbeddingsRequest(truncator, truncatedInput, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } record RateLimitGrouping(int keyHashCode) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java deleted file mode 100644 index 0355880b3f714..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.http.sender; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.inference.InferenceServiceResults; - -import java.util.List; -import java.util.function.Supplier; - -class NoopTask implements RejectableTask { - - @Override - public RequestManager getRequestCreator() { - return null; - } - - @Override - public String getQuery() { - return null; - } - - @Override - public List getInput() { - return null; - } - - @Override - public ActionListener getListener() { - return null; - } - - @Override - public boolean hasCompleted() { - return true; - } - - @Override - public Supplier getRequestCompletedFunction() { - return () -> true; - } - - @Override - public void onRejection(Exception e) { - - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java index 9c6c216c61272..7bc09fd76736b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -43,17 +42,16 @@ private OpenAiCompletionRequestManager(OpenAiChatCompletionModel model, ThreadPo } @Override - public Runnable create( + public void execute( @Nullable String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { OpenAiChatCompletionRequest request = new OpenAiChatCompletionRequest(input, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } private static ResponseHandler createCompletionHandler() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsRequestManager.java index 3a0a8fd64a656..41f91d2b89ee5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsRequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -55,17 +54,16 @@ private OpenAiEmbeddingsRequestManager(OpenAiEmbeddingsModel model, Truncator tr } @Override - public Runnable create( + public void execute( String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ) { var truncatedInput = truncate(input, model.getServiceSettings().maxInputTokens()); OpenAiEmbeddingsRequest request = new OpenAiEmbeddingsRequest(truncator, truncatedInput, model); - return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java index d5a13c2e0771d..38d47aec68eb6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -17,21 +16,31 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.common.AdjustableCapacityBlockingQueue; +import org.elasticsearch.xpack.inference.common.RateLimiter; import org.elasticsearch.xpack.inference.external.http.RequestExecutor; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import java.time.Clock; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; /** * A service for queuing and executing {@link RequestTask}. This class is useful because the @@ -45,7 +54,18 @@ * {@link org.apache.http.client.config.RequestConfig.Builder#setConnectionRequestTimeout} for more info. */ class RequestExecutorService implements RequestExecutor { - private static final AdjustableCapacityBlockingQueue.QueueCreator QUEUE_CREATOR = + + /** + * Provides dependency injection mainly for testing + */ + interface Sleeper { + void sleep(TimeValue sleepTime) throws InterruptedException; + } + + // default for tests + static final Sleeper DEFAULT_SLEEPER = sleepTime -> sleepTime.timeUnit().sleep(sleepTime.duration()); + // default for tests + static final AdjustableCapacityBlockingQueue.QueueCreator DEFAULT_QUEUE_CREATOR = new AdjustableCapacityBlockingQueue.QueueCreator<>() { @Override public BlockingQueue create(int capacity) { @@ -65,86 +85,116 @@ public BlockingQueue create() { } }; + /** + * Provides dependency injection mainly for testing + */ + interface RateLimiterCreator { + RateLimiter create(double accumulatedTokensLimit, double tokensPerTimeUnit, TimeUnit unit); + } + + // default for testing + static final RateLimiterCreator DEFAULT_RATE_LIMIT_CREATOR = RateLimiter::new; private static final Logger logger = LogManager.getLogger(RequestExecutorService.class); - private final String serviceName; - private final AdjustableCapacityBlockingQueue queue; - private final AtomicBoolean running = new AtomicBoolean(true); - private final CountDownLatch terminationLatch = new CountDownLatch(1); - private final HttpClientContext httpContext; + private static final TimeValue RATE_LIMIT_GROUP_CLEANUP_INTERVAL = TimeValue.timeValueDays(1); + + private final ConcurrentMap rateLimitGroupings = new ConcurrentHashMap<>(); private final ThreadPool threadPool; private final CountDownLatch startupLatch; - private final BlockingQueue controlQueue = new LinkedBlockingQueue<>(); - private final SingleRequestManager requestManager; + private final CountDownLatch terminationLatch = new CountDownLatch(1); + private final RequestSender requestSender; + private final RequestExecutorServiceSettings settings; + private final Clock clock; + private final AtomicBoolean shutdown = new AtomicBoolean(false); + private final AdjustableCapacityBlockingQueue.QueueCreator queueCreator; + private final Sleeper sleeper; + private final RateLimiterCreator rateLimiterCreator; + private final AtomicReference cancellableCleanupTask = new AtomicReference<>(); + private final AtomicBoolean started = new AtomicBoolean(false); RequestExecutorService( - String serviceName, ThreadPool threadPool, @Nullable CountDownLatch startupLatch, RequestExecutorServiceSettings settings, - SingleRequestManager requestManager + RequestSender requestSender ) { - this(serviceName, threadPool, QUEUE_CREATOR, startupLatch, settings, requestManager); + this( + threadPool, + DEFAULT_QUEUE_CREATOR, + startupLatch, + settings, + requestSender, + Clock.systemUTC(), + DEFAULT_SLEEPER, + DEFAULT_RATE_LIMIT_CREATOR + ); } - /** - * This constructor should only be used directly for testing. - */ RequestExecutorService( - String serviceName, ThreadPool threadPool, - AdjustableCapacityBlockingQueue.QueueCreator createQueue, + AdjustableCapacityBlockingQueue.QueueCreator queueCreator, @Nullable CountDownLatch startupLatch, RequestExecutorServiceSettings settings, - SingleRequestManager requestManager + RequestSender requestSender, + Clock clock, + Sleeper sleeper, + RateLimiterCreator rateLimiterCreator ) { - this.serviceName = Objects.requireNonNull(serviceName); this.threadPool = Objects.requireNonNull(threadPool); - this.httpContext = HttpClientContext.create(); - this.queue = new AdjustableCapacityBlockingQueue<>(createQueue, settings.getQueueCapacity()); + this.queueCreator = Objects.requireNonNull(queueCreator); this.startupLatch = startupLatch; - this.requestManager = Objects.requireNonNull(requestManager); + this.requestSender = Objects.requireNonNull(requestSender); + this.settings = Objects.requireNonNull(settings); + this.clock = Objects.requireNonNull(clock); + this.sleeper = Objects.requireNonNull(sleeper); + this.rateLimiterCreator = Objects.requireNonNull(rateLimiterCreator); + } - Objects.requireNonNull(settings); - settings.registerQueueCapacityCallback(this::onCapacityChange); + public void shutdown() { + if (shutdown.compareAndSet(false, true)) { + if (cancellableCleanupTask.get() != null) { + logger.debug(() -> "Stopping clean up thread"); + cancellableCleanupTask.get().cancel(); + } + } } - private void onCapacityChange(int capacity) { - logger.debug(() -> Strings.format("Setting queue capacity to [%s]", capacity)); + public boolean isShutdown() { + return shutdown.get(); + } - var enqueuedCapacityCommand = controlQueue.offer(() -> updateCapacity(capacity)); - if (enqueuedCapacityCommand == false) { - logger.warn("Failed to change request batching service queue capacity. Control queue was full, please try again later."); - } else { - // ensure that the task execution loop wakes up - queue.offer(new NoopTask()); - } + public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { + return terminationLatch.await(timeout, unit); } - private void updateCapacity(int newCapacity) { - try { - queue.setCapacity(newCapacity); - } catch (Exception e) { - logger.warn( - format("Failed to set the capacity of the task queue to [%s] for request batching service [%s]", newCapacity, serviceName), - e - ); - } + public boolean isTerminated() { + return terminationLatch.getCount() == 0; + } + + public int queueSize() { + return rateLimitGroupings.values().stream().mapToInt(RateLimitingEndpointHandler::queueSize).sum(); } /** * Begin servicing tasks. + *

+ * Note: This should only be called once for the life of the object. + *

*/ public void start() { try { + assert started.get() == false : "start() can only be called once"; + started.set(true); + + startCleanupTask(); signalStartInitiated(); - while (running.get()) { + while (isShutdown() == false) { handleTasks(); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } finally { - running.set(false); + shutdown(); notifyRequestsOfShutdown(); terminationLatch.countDown(); } @@ -156,108 +206,68 @@ private void signalStartInitiated() { } } - /** - * Protects the task retrieval logic from an unexpected exception. - * - * @throws InterruptedException rethrows the exception if it occurred retrieving a task because the thread is likely attempting to - * shut down - */ - private void handleTasks() throws InterruptedException { - try { - RejectableTask task = queue.take(); + private void startCleanupTask() { + assert cancellableCleanupTask.get() == null : "The clean up task can only be set once"; + cancellableCleanupTask.set(startCleanupThread(RATE_LIMIT_GROUP_CLEANUP_INTERVAL)); + } - var command = controlQueue.poll(); - if (command != null) { - command.run(); - } + private Scheduler.Cancellable startCleanupThread(TimeValue interval) { + logger.debug(() -> Strings.format("Clean up task scheduled with interval [%s]", interval)); - // TODO add logic to complete pending items in the queue before shutting down - if (running.get() == false) { - logger.debug(() -> format("Http executor service [%s] exiting", serviceName)); - rejectTaskBecauseOfShutdown(task); - } else { - executeTask(task); - } - } catch (InterruptedException e) { - throw e; - } catch (Exception e) { - logger.warn(format("Http executor service [%s] failed while retrieving task for execution", serviceName), e); - } + return threadPool.scheduleWithFixedDelay(this::removeStaleGroupings, interval, threadPool.executor(UTILITY_THREAD_POOL_NAME)); } - private void executeTask(RejectableTask task) { - try { - requestManager.execute(task, httpContext); - } catch (Exception e) { - logger.warn(format("Http executor service [%s] failed to execute request [%s]", serviceName, task), e); + // default for testing + void removeStaleGroupings() { + var now = Instant.now(clock); + for (var iter = rateLimitGroupings.values().iterator(); iter.hasNext();) { + var endpoint = iter.next(); + + // if the current time is after the last time the endpoint enqueued a request + allowed stale period then we'll remove it + if (now.isAfter(endpoint.timeOfLastEnqueue().plus(settings.getRateLimitGroupStaleDuration()))) { + endpoint.close(); + iter.remove(); + } } } - private synchronized void notifyRequestsOfShutdown() { - assert isShutdown() : "Requests should only be notified if the executor is shutting down"; - - try { - List notExecuted = new ArrayList<>(); - queue.drainTo(notExecuted); - - rejectTasks(notExecuted, this::rejectTaskBecauseOfShutdown); - } catch (Exception e) { - logger.warn(format("Failed to notify tasks of queuing service [%s] shutdown", serviceName)); + private void handleTasks() throws InterruptedException { + var timeToWait = settings.getTaskPollFrequency(); + for (var endpoint : rateLimitGroupings.values()) { + timeToWait = TimeValue.min(endpoint.executeEnqueuedTask(), timeToWait); } - } - private void rejectTaskBecauseOfShutdown(RejectableTask task) { - try { - task.onRejection( - new EsRejectedExecutionException( - format("Failed to send request, queue service [%s] has shutdown prior to executing request", serviceName), - true - ) - ); - } catch (Exception e) { - logger.warn( - format("Failed to notify request [%s] for service [%s] of rejection after queuing service shutdown", task, serviceName) - ); - } + sleeper.sleep(timeToWait); } - private void rejectTasks(List tasks, Consumer rejectionFunction) { - for (var task : tasks) { - rejectionFunction.accept(task); + private void notifyRequestsOfShutdown() { + assert isShutdown() : "Requests should only be notified if the executor is shutting down"; + + for (var endpoint : rateLimitGroupings.values()) { + endpoint.notifyRequestsOfShutdown(); } } - public int queueSize() { - return queue.size(); - } + // default for testing + Integer remainingQueueCapacity(RequestManager requestManager) { + var endpoint = rateLimitGroupings.get(requestManager.rateLimitGrouping()); - @Override - public void shutdown() { - if (running.compareAndSet(true, false)) { - // if this fails because the queue is full, that's ok, we just want to ensure that queue.take() returns - queue.offer(new NoopTask()); + if (endpoint == null) { + return null; } - } - @Override - public boolean isShutdown() { - return running.get() == false; - } - - @Override - public boolean isTerminated() { - return terminationLatch.getCount() == 0; + return endpoint.remainingCapacity(); } - @Override - public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { - return terminationLatch.await(timeout, unit); + // default for testing + int numberOfRateLimitGroups() { + return rateLimitGroupings.size(); } /** * Execute the request at some point in the future. * - * @param requestCreator the http request to send + * @param requestManager the http request to send * @param inferenceInputs the inputs to send in the request * @param timeout the maximum time to wait for this request to complete (failing or succeeding). Once the time elapses, the * listener::onFailure is called with a {@link org.elasticsearch.ElasticsearchTimeoutException}. @@ -265,13 +275,13 @@ public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedE * @param listener an {@link ActionListener} for the response or failure */ public void execute( - RequestManager requestCreator, + RequestManager requestManager, InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener ) { var task = new RequestTask( - requestCreator, + requestManager, inferenceInputs, timeout, threadPool, @@ -280,38 +290,230 @@ public void execute( ContextPreservingActionListener.wrapPreservingContext(listener, threadPool.getThreadContext()) ); - completeExecution(task); + var endpoint = rateLimitGroupings.computeIfAbsent(requestManager.rateLimitGrouping(), key -> { + var endpointHandler = new RateLimitingEndpointHandler( + Integer.toString(requestManager.rateLimitGrouping().hashCode()), + queueCreator, + settings, + requestSender, + clock, + requestManager.rateLimitSettings(), + this::isShutdown, + rateLimiterCreator + ); + + endpointHandler.init(); + return endpointHandler; + }); + + endpoint.enqueue(task); } - private void completeExecution(RequestTask task) { - if (isShutdown()) { - EsRejectedExecutionException rejected = new EsRejectedExecutionException( - format("Failed to enqueue task because the http executor service [%s] has already shutdown", serviceName), - true + /** + * Provides rate limiting functionality for requests. A single {@link RateLimitingEndpointHandler} governs a group of requests. + * This allows many requests to be serialized if they are being sent too fast. If the rate limit has not been met they will be sent + * as soon as a thread is available. + */ + private static class RateLimitingEndpointHandler { + + private static final TimeValue NO_TASKS_AVAILABLE = TimeValue.MAX_VALUE; + private static final TimeValue EXECUTED_A_TASK = TimeValue.ZERO; + private static final Logger logger = LogManager.getLogger(RateLimitingEndpointHandler.class); + private static final int ACCUMULATED_TOKENS_LIMIT = 1; + + private final AdjustableCapacityBlockingQueue queue; + private final Supplier isShutdownMethod; + private final RequestSender requestSender; + private final String id; + private final AtomicReference timeOfLastEnqueue = new AtomicReference<>(); + private final Clock clock; + private final RateLimiter rateLimiter; + private final RequestExecutorServiceSettings requestExecutorServiceSettings; + + RateLimitingEndpointHandler( + String id, + AdjustableCapacityBlockingQueue.QueueCreator createQueue, + RequestExecutorServiceSettings settings, + RequestSender requestSender, + Clock clock, + RateLimitSettings rateLimitSettings, + Supplier isShutdownMethod, + RateLimiterCreator rateLimiterCreator + ) { + this.requestExecutorServiceSettings = Objects.requireNonNull(settings); + this.id = Objects.requireNonNull(id); + this.queue = new AdjustableCapacityBlockingQueue<>(createQueue, settings.getQueueCapacity()); + this.requestSender = Objects.requireNonNull(requestSender); + this.clock = Objects.requireNonNull(clock); + this.isShutdownMethod = Objects.requireNonNull(isShutdownMethod); + + Objects.requireNonNull(rateLimitSettings); + Objects.requireNonNull(rateLimiterCreator); + rateLimiter = rateLimiterCreator.create( + ACCUMULATED_TOKENS_LIMIT, + rateLimitSettings.requestsPerTimeUnit(), + rateLimitSettings.timeUnit() ); - task.onRejection(rejected); - return; } - boolean added = queue.offer(task); - if (added == false) { - EsRejectedExecutionException rejected = new EsRejectedExecutionException( - format("Failed to execute task because the http executor service [%s] queue is full", serviceName), - false - ); + public void init() { + requestExecutorServiceSettings.registerQueueCapacityCallback(id, this::onCapacityChange); + } - task.onRejection(rejected); - } else if (isShutdown()) { - // It is possible that a shutdown and notification request occurred after we initially checked for shutdown above - // If the task was added after the queue was already drained it could sit there indefinitely. So let's check again if - // we shut down and if so we'll redo the notification - notifyRequestsOfShutdown(); + private void onCapacityChange(int capacity) { + logger.debug(() -> Strings.format("Executor service grouping [%s] setting queue capacity to [%s]", id, capacity)); + + try { + queue.setCapacity(capacity); + } catch (Exception e) { + logger.warn(format("Executor service grouping [%s] failed to set the capacity of the task queue to [%s]", id, capacity), e); + } } - } - // default for testing - int remainingQueueCapacity() { - return queue.remainingCapacity(); + public int queueSize() { + return queue.size(); + } + + public boolean isShutdown() { + return isShutdownMethod.get(); + } + + public Instant timeOfLastEnqueue() { + return timeOfLastEnqueue.get(); + } + + public synchronized TimeValue executeEnqueuedTask() { + try { + return executeEnqueuedTaskInternal(); + } catch (Exception e) { + logger.warn(format("Executor service grouping [%s] failed to execute request", id), e); + // we tried to do some work but failed, so we'll say we did something to try looking for more work + return EXECUTED_A_TASK; + } + } + + private TimeValue executeEnqueuedTaskInternal() { + var timeBeforeAvailableToken = rateLimiter.timeToReserve(1); + if (shouldExecuteImmediately(timeBeforeAvailableToken) == false) { + return timeBeforeAvailableToken; + } + + var task = queue.poll(); + + // TODO Batching - in a situation where no new tasks are queued we'll want to execute any prepared tasks + // So we'll need to check for null and call a helper method executePreparedTasks() + + if (shouldExecuteTask(task) == false) { + return NO_TASKS_AVAILABLE; + } + + // We should never have to wait because we checked above + var reserveRes = rateLimiter.reserve(1); + assert shouldExecuteImmediately(reserveRes) : "Reserving request tokens required a sleep when it should not have"; + + task.getRequestManager() + .execute(task.getQuery(), task.getInput(), requestSender, task.getRequestCompletedFunction(), task.getListener()); + return EXECUTED_A_TASK; + } + + private static boolean shouldExecuteTask(RejectableTask task) { + return task != null && isNoopRequest(task) == false && task.hasCompleted() == false; + } + + private static boolean isNoopRequest(InferenceRequest inferenceRequest) { + return inferenceRequest.getRequestManager() == null + || inferenceRequest.getInput() == null + || inferenceRequest.getListener() == null; + } + + private static boolean shouldExecuteImmediately(TimeValue delay) { + return delay.duration() == 0; + } + + public void enqueue(RequestTask task) { + timeOfLastEnqueue.set(Instant.now(clock)); + + if (isShutdown()) { + EsRejectedExecutionException rejected = new EsRejectedExecutionException( + format( + "Failed to enqueue task for inference id [%s] because the request service [%s] has already shutdown", + task.getRequestManager().inferenceEntityId(), + id + ), + true + ); + + task.onRejection(rejected); + return; + } + + var addedToQueue = queue.offer(task); + + if (addedToQueue == false) { + EsRejectedExecutionException rejected = new EsRejectedExecutionException( + format( + "Failed to execute task for inference id [%s] because the request service [%s] queue is full", + task.getRequestManager().inferenceEntityId(), + id + ), + false + ); + + task.onRejection(rejected); + } else if (isShutdown()) { + notifyRequestsOfShutdown(); + } + } + + public synchronized void notifyRequestsOfShutdown() { + assert isShutdown() : "Requests should only be notified if the executor is shutting down"; + + try { + List notExecuted = new ArrayList<>(); + queue.drainTo(notExecuted); + + rejectTasks(notExecuted); + } catch (Exception e) { + logger.warn(format("Failed to notify tasks of executor service grouping [%s] shutdown", id)); + } + } + + private void rejectTasks(List tasks) { + for (var task : tasks) { + rejectTaskForShutdown(task); + } + } + + private void rejectTaskForShutdown(RejectableTask task) { + try { + task.onRejection( + new EsRejectedExecutionException( + format( + "Failed to send request, request service [%s] for inference id [%s] has shutdown prior to executing request", + id, + task.getRequestManager().inferenceEntityId() + ), + true + ) + ); + } catch (Exception e) { + logger.warn( + format( + "Failed to notify request for inference id [%s] of rejection after executor service grouping [%s] shutdown", + task.getRequestManager().inferenceEntityId(), + id + ) + ); + } + } + + public int remainingCapacity() { + return queue.remainingCapacity(); + } + + public void close() { + requestExecutorServiceSettings.deregisterQueueCapacityCallback(id); + } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettings.java index 86825035f2d05..616ef7a40068b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettings.java @@ -10,9 +10,12 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; -import java.util.ArrayList; +import java.time.Duration; import java.util.List; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; import java.util.function.Consumer; public class RequestExecutorServiceSettings { @@ -29,37 +32,108 @@ public class RequestExecutorServiceSettings { Setting.Property.Dynamic ); + private static final TimeValue DEFAULT_TASK_POLL_FREQUENCY_TIME = TimeValue.timeValueMillis(50); + /** + * Defines how often all the rate limit groups are polled for tasks. Setting this to very low number could result + * in a busy loop if there are no tasks available to handle. + */ + static final Setting TASK_POLL_FREQUENCY_SETTING = Setting.timeSetting( + "xpack.inference.http.request_executor.task_poll_frequency", + DEFAULT_TASK_POLL_FREQUENCY_TIME, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private static final TimeValue DEFAULT_RATE_LIMIT_GROUP_CLEANUP_INTERVAL = TimeValue.timeValueDays(1); + /** + * Defines how often a thread will check for rate limit groups that are stale. + */ + static final Setting RATE_LIMIT_GROUP_CLEANUP_INTERVAL_SETTING = Setting.timeSetting( + "xpack.inference.http.request_executor.rate_limit_group_cleanup_interval", + DEFAULT_RATE_LIMIT_GROUP_CLEANUP_INTERVAL, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private static final TimeValue DEFAULT_RATE_LIMIT_GROUP_STALE_DURATION = TimeValue.timeValueDays(10); + /** + * Defines the amount of time it takes to classify a rate limit group as stale. Once it is classified as stale, + * it can be removed when the cleanup thread executes. + */ + static final Setting RATE_LIMIT_GROUP_STALE_DURATION_SETTING = Setting.timeSetting( + "xpack.inference.http.request_executor.rate_limit_group_stale_duration", + DEFAULT_RATE_LIMIT_GROUP_STALE_DURATION, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + public static List> getSettingsDefinitions() { - return List.of(TASK_QUEUE_CAPACITY_SETTING); + return List.of( + TASK_QUEUE_CAPACITY_SETTING, + TASK_POLL_FREQUENCY_SETTING, + RATE_LIMIT_GROUP_CLEANUP_INTERVAL_SETTING, + RATE_LIMIT_GROUP_STALE_DURATION_SETTING + ); } private volatile int queueCapacity; - private final List> queueCapacityCallbacks = new ArrayList>(); + private volatile TimeValue taskPollFrequency; + private volatile Duration rateLimitGroupStaleDuration; + private final ConcurrentMap> queueCapacityCallbacks = new ConcurrentHashMap<>(); public RequestExecutorServiceSettings(Settings settings, ClusterService clusterService) { queueCapacity = TASK_QUEUE_CAPACITY_SETTING.get(settings); + taskPollFrequency = TASK_POLL_FREQUENCY_SETTING.get(settings); + setRateLimitGroupStaleDuration(RATE_LIMIT_GROUP_STALE_DURATION_SETTING.get(settings)); addSettingsUpdateConsumers(clusterService); } private void addSettingsUpdateConsumers(ClusterService clusterService) { clusterService.getClusterSettings().addSettingsUpdateConsumer(TASK_QUEUE_CAPACITY_SETTING, this::setQueueCapacity); + clusterService.getClusterSettings().addSettingsUpdateConsumer(TASK_POLL_FREQUENCY_SETTING, this::setTaskPollFrequency); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(RATE_LIMIT_GROUP_STALE_DURATION_SETTING, this::setRateLimitGroupStaleDuration); } // default for testing void setQueueCapacity(int queueCapacity) { this.queueCapacity = queueCapacity; - for (var callback : queueCapacityCallbacks) { + for (var callback : queueCapacityCallbacks.values()) { callback.accept(queueCapacity); } } - void registerQueueCapacityCallback(Consumer onChangeCapacityCallback) { - queueCapacityCallbacks.add(onChangeCapacityCallback); + private void setTaskPollFrequency(TimeValue taskPollFrequency) { + this.taskPollFrequency = taskPollFrequency; + } + + private void setRateLimitGroupStaleDuration(TimeValue staleDuration) { + rateLimitGroupStaleDuration = toDuration(staleDuration); + } + + private static Duration toDuration(TimeValue timeValue) { + return Duration.of(timeValue.duration(), timeValue.timeUnit().toChronoUnit()); + } + + void registerQueueCapacityCallback(String id, Consumer onChangeCapacityCallback) { + queueCapacityCallbacks.put(id, onChangeCapacityCallback); + } + + void deregisterQueueCapacityCallback(String id) { + queueCapacityCallbacks.remove(id); } int getQueueCapacity() { return queueCapacity; } + + TimeValue getTaskPollFrequency() { + return taskPollFrequency; + } + + Duration getRateLimitGroupStaleDuration() { + return rateLimitGroupStaleDuration; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java index 7d3cca596f1d0..79ef1b56ad231 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.apache.http.client.protocol.HttpClientContext; import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; @@ -21,14 +20,17 @@ * A contract for constructing a {@link Runnable} to handle sending an inference request to a 3rd party service. */ public interface RequestManager extends RateLimitable { - Runnable create( + void execute( @Nullable String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, - HttpClientContext context, ActionListener listener ); + // TODO For batching we'll add 2 new method: prepare(query, input, ...) which will allow the individual + // managers to implement their own batching + // executePreparedRequest() which will execute all prepared requests aka sends the batch + String inferenceEntityId(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java index 738592464232c..7a5f482412289 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java @@ -111,7 +111,7 @@ public void onRejection(Exception e) { } @Override - public RequestManager getRequestCreator() { + public RequestManager getRequestManager() { return requestCreator; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java deleted file mode 100644 index 494c77964080f..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.http.sender; - -import org.apache.http.client.protocol.HttpClientContext; -import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; - -import java.util.Objects; - -/** - * Handles executing a single inference request at a time. - */ -public class SingleRequestManager { - - protected RetryingHttpSender requestSender; - - public SingleRequestManager(RetryingHttpSender requestSender) { - this.requestSender = Objects.requireNonNull(requestSender); - } - - public void execute(InferenceRequest inferenceRequest, HttpClientContext context) { - if (isNoopRequest(inferenceRequest) || inferenceRequest.hasCompleted()) { - return; - } - - inferenceRequest.getRequestCreator() - .create( - inferenceRequest.getQuery(), - inferenceRequest.getInput(), - requestSender, - inferenceRequest.getRequestCompletedFunction(), - context, - inferenceRequest.getListener() - ) - .run(); - } - - private static boolean isNoopRequest(InferenceRequest inferenceRequest) { - return inferenceRequest.getRequestCreator() == null - || inferenceRequest.getInput() == null - || inferenceRequest.getListener() == null; - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java index 55a7f35710cf6..a4f48510bc0e6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -7,10 +7,13 @@ package org.elasticsearch.xpack.inference.external.response; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.core.Strings.format; public class XContentUtils { @@ -74,5 +77,26 @@ public static void consumeUntilObjectEnd(XContentParser parser) throws IOExcepti } } + /** + * Parses a single float. + * In the context of the inference API this method is usually used in conjunction + * with {@link XContentParserUtils#parseList(XContentParser, CheckedFunction)} to parse a list of floats of an embedding: + * + *
+     *     
+     *       var floats = XContentParserUtils.parseList(parser, XContentUtils::parseFloat);
+     *     
+     * 
+ * + * @param parser + * @return single float + * @throws IOException + */ + public static float parseFloat(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); + return parser.floatValue(); + } + private XContentUtils() {} } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioChatCompletionResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioChatCompletionResponseEntity.java index 18f5923353960..abf2c4877307c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioChatCompletionResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioChatCompletionResponseEntity.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.external.response.azureaistudio; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -49,7 +48,7 @@ private ChatCompletionResults parseRealtimeEndpointResponse(HttpResult response) moveToFirstToken(jsonParser); XContentParser.Token token = jsonParser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); while (token != null && token != XContentParser.Token.END_OBJECT) { if (token != XContentParser.Token.FIELD_NAME) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereCompletionResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereCompletionResponseEntity.java index 4740c93ea6c03..af58274d22181 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereCompletionResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereCompletionResponseEntity.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.external.response.cohere; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -84,7 +83,7 @@ public static ChatCompletionResults fromResponse(Request request, HttpResult res moveToFirstToken(jsonParser); XContentParser.Token token = jsonParser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); positionParserAtTokenAfterField(jsonParser, "text", FAILED_TO_FIND_FIELD_TEMPLATE); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java index f787c6337d646..3fa9635d38e8c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java @@ -11,17 +11,17 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.response.XContentUtils; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; import java.io.IOException; @@ -29,6 +29,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; @@ -140,7 +142,7 @@ public static InferenceServiceResults fromResponse(Request request, HttpResult r moveToFirstToken(jsonParser); XContentParser.Token token = jsonParser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); positionParserAtTokenAfterField(jsonParser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); @@ -183,21 +185,21 @@ private static InferenceServiceResults parseEmbeddingsObject(XContentParser pars } private static InferenceServiceResults parseByteEmbeddingsArray(XContentParser parser) throws IOException { - var embeddingList = XContentParserUtils.parseList(parser, CohereEmbeddingsResponseEntity::parseByteArrayEntry); + var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseByteArrayEntry); - return new TextEmbeddingByteResults(embeddingList); + return new InferenceTextEmbeddingByteResults(embeddingList); } - private static TextEmbeddingByteResults.Embedding parseByteArrayEntry(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); - List embeddingValuesList = XContentParserUtils.parseList(parser, CohereEmbeddingsResponseEntity::parseEmbeddingInt8Entry); + private static InferenceTextEmbeddingByteResults.InferenceByteEmbedding parseByteArrayEntry(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + List embeddingValuesList = parseList(parser, CohereEmbeddingsResponseEntity::parseEmbeddingInt8Entry); - return TextEmbeddingByteResults.Embedding.of(embeddingValuesList); + return InferenceTextEmbeddingByteResults.InferenceByteEmbedding.of(embeddingValuesList); } private static Byte parseEmbeddingInt8Entry(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); + ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); var parsedByte = parser.shortValue(); checkByteBounds(parsedByte); @@ -211,21 +213,16 @@ private static void checkByteBounds(short value) { } private static InferenceServiceResults parseFloatEmbeddingsArray(XContentParser parser) throws IOException { - var embeddingList = XContentParserUtils.parseList(parser, CohereEmbeddingsResponseEntity::parseFloatArrayEntry); + var embeddingList = parseList(parser, CohereEmbeddingsResponseEntity::parseFloatArrayEntry); - return new TextEmbeddingResults(embeddingList); + return new InferenceTextEmbeddingFloatResults(embeddingList); } - private static TextEmbeddingResults.Embedding parseFloatArrayEntry(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); - List embeddingValuesList = XContentParserUtils.parseList(parser, CohereEmbeddingsResponseEntity::parseEmbeddingFloatEntry); - return TextEmbeddingResults.Embedding.of(embeddingValuesList); - } - - private static Float parseEmbeddingFloatEntry(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); - return parser.floatValue(); + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseFloatArrayEntry(XContentParser parser) + throws IOException { + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + List embeddingValuesList = parseList(parser, XContentUtils::parseFloat); + return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); } private CohereEmbeddingsResponseEntity() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java index c9cc71b7fdcda..7f71933676ee0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -23,7 +22,9 @@ import java.io.IOException; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; @@ -91,7 +92,7 @@ public static InferenceServiceResults fromResponse(HttpResult response) throws I moveToFirstToken(jsonParser); XContentParser.Token token = jsonParser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); positionParserAtTokenAfterField(jsonParser, "results", FAILED_TO_FIND_FIELD_TEMPLATE); // TODO error message @@ -109,7 +110,7 @@ public static InferenceServiceResults fromResponse(HttpResult response) throws I } private static RankedDocsResults.RankedDoc parseRankedDocObject(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); int index = -1; float relevanceScore = -1; String documentText = null; @@ -129,7 +130,7 @@ private static RankedDocsResults.RankedDoc parseRankedDocObject(XContentParser p break; case "document": parser.nextToken(); // move to START_OBJECT; document text is wrapped in an object - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); do { if (parser.currentToken() == XContentParser.Token.FIELD_NAME && parser.currentName().equals("text")) { parser.nextToken(); // move to VALUE_STRING @@ -140,7 +141,7 @@ private static RankedDocsResults.RankedDoc parseRankedDocObject(XContentParser p // parser should now be at the next FIELD_NAME or END_OBJECT break; default: - XContentParserUtils.throwUnknownField(parser.currentName(), parser); + throwUnknownField(parser.currentName(), parser); } } else { parser.nextToken(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntity.java index 204738f2a2552..543b8e39d85f8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntity.java @@ -8,18 +8,20 @@ package org.elasticsearch.xpack.inference.external.response.googleaistudio; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.response.XContentUtils; import java.io.IOException; import java.util.List; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.consumeUntilObjectEnd; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; @@ -68,42 +70,37 @@ public class GoogleAiStudioEmbeddingsResponseEntity { * */ - public static TextEmbeddingResults fromResponse(Request request, HttpResult response) throws IOException { + public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { moveToFirstToken(jsonParser); XContentParser.Token token = jsonParser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); positionParserAtTokenAfterField(jsonParser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); - List embeddingList = XContentParserUtils.parseList( + List embeddingList = parseList( jsonParser, GoogleAiStudioEmbeddingsResponseEntity::parseEmbeddingObject ); - return new TextEmbeddingResults(embeddingList); + return new InferenceTextEmbeddingFloatResults(embeddingList); } } - private static TextEmbeddingResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingObject(XContentParser parser) + throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); positionParserAtTokenAfterField(parser, "values", FAILED_TO_FIND_FIELD_TEMPLATE); - List embeddingValuesList = XContentParserUtils.parseList(parser, GoogleAiStudioEmbeddingsResponseEntity::parseEmbeddingList); + List embeddingValuesList = parseList(parser, XContentUtils::parseFloat); // parse and discard the rest of the object consumeUntilObjectEnd(parser); - return TextEmbeddingResults.Embedding.of(embeddingValuesList); - } - - private static float parseEmbeddingList(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); - return parser.floatValue(); + return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); } private GoogleAiStudioEmbeddingsResponseEntity() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java index 270a981a6998d..f0e729e15b615 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.external.response.huggingface; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -23,6 +22,8 @@ import java.util.Collections; import java.util.List; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; public class HuggingFaceElserResponseEntity { @@ -65,7 +66,7 @@ public static SparseEmbeddingResults fromResponse(Request request, HttpResult re moveToFirstToken(jsonParser); var truncationResults = request.getTruncationInfo(); - List parsedEmbeddings = XContentParserUtils.parseList( + List parsedEmbeddings = parseList( jsonParser, (parser, index) -> HuggingFaceElserResponseEntity.parseExpansionResult(truncationResults, parser, index) ); @@ -81,14 +82,14 @@ public static SparseEmbeddingResults fromResponse(Request request, HttpResult re private static SparseEmbeddingResults.Embedding parseExpansionResult(boolean[] truncationResults, XContentParser parser, int index) throws IOException { XContentParser.Token token = parser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); List weightedTokens = new ArrayList<>(); token = parser.nextToken(); while (token != null && token != XContentParser.Token.END_OBJECT) { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); var floatToken = parser.nextToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, floatToken, parser); + ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, floatToken, parser); weightedTokens.add(new WeightedToken(parser.currentName(), parser.floatValue())); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java index a3e06b3c2075a..cdfe36447b88c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java @@ -10,18 +10,20 @@ package org.elasticsearch.xpack.inference.external.response.huggingface; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.response.XContentUtils; import java.io.IOException; import java.util.List; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; @@ -33,7 +35,7 @@ public class HuggingFaceEmbeddingsResponseEntity { * Parse the response from hugging face. The known formats are an array of arrays and object with an {@code embeddings} field containing * an array of arrays. */ - public static TextEmbeddingResults fromResponse(Request request, HttpResult response) throws IOException { + public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { @@ -91,13 +93,13 @@ public static TextEmbeddingResults fromResponse(Request request, HttpResult resp * sentence-transformers/all-MiniLM-L6-v2 * sentence-transformers/all-MiniLM-L12-v2 */ - private static TextEmbeddingResults parseArrayFormat(XContentParser parser) throws IOException { - List embeddingList = XContentParserUtils.parseList( + private static InferenceTextEmbeddingFloatResults parseArrayFormat(XContentParser parser) throws IOException { + List embeddingList = parseList( parser, HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry ); - return new TextEmbeddingResults(embeddingList); + return new InferenceTextEmbeddingFloatResults(embeddingList); } /** @@ -136,28 +138,23 @@ private static TextEmbeddingResults parseArrayFormat(XContentParser parser) thro * intfloat/multilingual-e5-small * sentence-transformers/all-mpnet-base-v2 */ - private static TextEmbeddingResults parseObjectFormat(XContentParser parser) throws IOException { + private static InferenceTextEmbeddingFloatResults parseObjectFormat(XContentParser parser) throws IOException { positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); - List embeddingList = XContentParserUtils.parseList( + List embeddingList = parseList( parser, HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry ); - return new TextEmbeddingResults(embeddingList); + return new InferenceTextEmbeddingFloatResults(embeddingList); } - private static TextEmbeddingResults.Embedding parseEmbeddingEntry(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingEntry(XContentParser parser) + throws IOException { + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); - List embeddingValuesList = XContentParserUtils.parseList(parser, HuggingFaceEmbeddingsResponseEntity::parseEmbeddingList); - return TextEmbeddingResults.Embedding.of(embeddingValuesList); - } - - private static float parseEmbeddingList(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); - return parser.floatValue(); + List embeddingValuesList = parseList(parser, XContentUtils::parseFloat); + return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); } private HuggingFaceEmbeddingsResponseEntity() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java index 39b97014c3619..ad6df06247080 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java @@ -10,18 +10,20 @@ package org.elasticsearch.xpack.inference.external.response.openai; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.response.XContentUtils; import java.io.IOException; import java.util.List; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.consumeUntilObjectEnd; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; @@ -72,42 +74,37 @@ public class OpenAiEmbeddingsResponseEntity { * * */ - public static TextEmbeddingResults fromResponse(Request request, HttpResult response) throws IOException { + public static InferenceTextEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { moveToFirstToken(jsonParser); XContentParser.Token token = jsonParser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); positionParserAtTokenAfterField(jsonParser, "data", FAILED_TO_FIND_FIELD_TEMPLATE); - List embeddingList = XContentParserUtils.parseList( + List embeddingList = parseList( jsonParser, OpenAiEmbeddingsResponseEntity::parseEmbeddingObject ); - return new TextEmbeddingResults(embeddingList); + return new InferenceTextEmbeddingFloatResults(embeddingList); } } - private static TextEmbeddingResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseEmbeddingObject(XContentParser parser) + throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); - List embeddingValuesList = XContentParserUtils.parseList(parser, OpenAiEmbeddingsResponseEntity::parseEmbeddingList); + List embeddingValuesList = parseList(parser, XContentUtils::parseFloat); // parse and discard the rest of the object consumeUntilObjectEnd(parser); - return TextEmbeddingResults.Embedding.of(embeddingValuesList); - } - - private static float parseEmbeddingList(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); - return parser.floatValue(); + return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); } private OpenAiEmbeddingsResponseEntity() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java index f8fab4fadf4ee..7b2e23f2e972d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.mapper; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -17,26 +16,21 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.support.MapXContentParser; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.search.WeightedToken; import java.io.IOException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -292,60 +286,13 @@ public static List toSemanticTextFieldChunks( ) { List chunks = new ArrayList<>(); for (var result : results) { - if (result instanceof ChunkedSparseEmbeddingResults textExpansionResults) { - for (var chunk : textExpansionResults.getChunkedResults()) { - chunks.add(new Chunk(chunk.matchedText(), toBytesReference(contentType.xContent(), chunk.weightedTokens()))); - } - } else if (result instanceof ChunkedTextEmbeddingResults textEmbeddingResults) { - for (var chunk : textEmbeddingResults.getChunks()) { - chunks.add(new Chunk(chunk.matchedText(), toBytesReference(contentType.xContent(), chunk.embedding()))); - } - } else { - throw new ElasticsearchStatusException( - "Invalid inference results format for field [{}] with inference id [{}], got {}", - RestStatus.BAD_REQUEST, - field, - inferenceId, - result.getWriteableName() - ); + for (Iterator it = result.chunksAsMatchedTextAndByteReference(contentType.xContent()); it + .hasNext();) { + var chunkAsByteReference = it.next(); + chunks.add(new Chunk(chunkAsByteReference.matchedText(), chunkAsByteReference.bytesReference())); } } return chunks; } - /** - * Serialises the {@code value} array, according to the provided {@link XContent}, into a {@link BytesReference}. - */ - private static BytesReference toBytesReference(XContent xContent, double[] value) { - try { - XContentBuilder b = XContentBuilder.builder(xContent); - b.startArray(); - for (double v : value) { - b.value(v); - } - b.endArray(); - return BytesReference.bytes(b); - } catch (IOException exc) { - throw new RuntimeException(exc); - } - } - - /** - * Serialises the {@link WeightedToken} list, according to the provided {@link XContent}, - * into a {@link BytesReference}. - */ - private static BytesReference toBytesReference(XContent xContent, List tokens) { - try { - XContentBuilder b = XContentBuilder.builder(xContent); - b.startObject(); - for (var weightedToken : tokens) { - weightedToken.toXContent(b, ToXContent.EMPTY_PARAMS); - } - b.endObject(); - return BytesReference.bytes(b); - } catch (IOException exc) { - throw new RuntimeException(exc); - } - } - } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 6874938f1e118..c2a4907125a31 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.mapper; import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.common.Explicit; @@ -48,7 +49,7 @@ import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import java.io.IOException; @@ -78,7 +79,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static final String CONTENT_TYPE = "semantic_text"; public static final TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.indexVersionCreated()), + (n, c) -> new Builder(n, c.indexVersionCreated(), c::bitSetProducer), List.of(notInMultiFields(CONTENT_TYPE), notFromDynamicTemplates(CONTENT_TYPE)) ); @@ -110,10 +111,10 @@ public static class Builder extends FieldMapper.Builder { private Function inferenceFieldBuilder; - public Builder(String name, IndexVersion indexVersionCreated) { + public Builder(String name, IndexVersion indexVersionCreated, Function bitSetProducer) { super(name); this.indexVersionCreated = indexVersionCreated; - this.inferenceFieldBuilder = c -> createInferenceField(c, indexVersionCreated, modelSettings.get()); + this.inferenceFieldBuilder = c -> createInferenceField(c, indexVersionCreated, modelSettings.get(), bitSetProducer); } public Builder setInferenceId(String id) { @@ -181,7 +182,7 @@ public Iterator iterator() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(simpleName(), fieldType().indexVersionCreated).init(this); + return new Builder(simpleName(), fieldType().indexVersionCreated, fieldType().getChunksField().bitsetProducer()).init(this); } @Override @@ -219,7 +220,11 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio final SemanticTextFieldMapper mapper; if (fieldType().getModelSettings() == null) { context.path().remove(); - Builder builder = (Builder) new Builder(simpleName(), fieldType().indexVersionCreated).init(this); + Builder builder = (Builder) new Builder( + simpleName(), + fieldType().indexVersionCreated, + fieldType().getChunksField().bitsetProducer() + ).init(this); try { mapper = builder.setModelSettings(field.inference().modelSettings()) .setInferenceId(field.inference().inferenceId()) @@ -395,12 +400,12 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost yield boolQuery; } case TEXT_EMBEDDING -> { - if (inferenceResults instanceof TextEmbeddingResults == false) { + if (inferenceResults instanceof MlTextEmbeddingResults == false) { throw new IllegalArgumentException( "Field [" + name() + "] expected query inference results to be of type [" - + TextEmbeddingResults.NAME + + MlTextEmbeddingResults.NAME + "]," + " got [" + inferenceResults.getWriteableName() @@ -408,7 +413,7 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost ); } - TextEmbeddingResults textEmbeddingResults = (TextEmbeddingResults) inferenceResults; + MlTextEmbeddingResults textEmbeddingResults = (MlTextEmbeddingResults) inferenceResults; float[] inference = textEmbeddingResults.getInferenceAsFloat(); if (inference.length != modelSettings.dimensions()) { throw new IllegalArgumentException( @@ -441,18 +446,20 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost private static ObjectMapper createInferenceField( MapperBuilderContext context, IndexVersion indexVersionCreated, - @Nullable SemanticTextField.ModelSettings modelSettings + @Nullable SemanticTextField.ModelSettings modelSettings, + Function bitSetProducer ) { return new ObjectMapper.Builder(INFERENCE_FIELD, Explicit.EXPLICIT_TRUE).dynamic(ObjectMapper.Dynamic.FALSE) - .add(createChunksField(indexVersionCreated, modelSettings)) + .add(createChunksField(indexVersionCreated, modelSettings, bitSetProducer)) .build(context); } private static NestedObjectMapper.Builder createChunksField( IndexVersion indexVersionCreated, - SemanticTextField.ModelSettings modelSettings + @Nullable SemanticTextField.ModelSettings modelSettings, + Function bitSetProducer ) { - NestedObjectMapper.Builder chunksField = new NestedObjectMapper.Builder(CHUNKS_FIELD, indexVersionCreated); + NestedObjectMapper.Builder chunksField = new NestedObjectMapper.Builder(CHUNKS_FIELD, indexVersionCreated, bitSetProducer); chunksField.dynamic(ObjectMapper.Dynamic.FALSE); KeywordFieldMapper.Builder chunkTextField = new KeywordFieldMapper.Builder(CHUNKED_TEXT_FIELD, indexVersionCreated).indexed(false) .docValues(false); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index 4d90920f45bac..8f1e28d0d8ee4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -33,7 +33,7 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; @@ -256,19 +256,20 @@ private static InferenceResults validateAndConvertInferenceResults( ); } else if (inferenceResults instanceof WarningInferenceResults warningInferenceResults) { throw new IllegalStateException("Field [" + fieldName + "] query inference warning: " + warningInferenceResults.getWarning()); - } else if (inferenceResults instanceof TextExpansionResults == false && inferenceResults instanceof TextEmbeddingResults == false) { - throw new IllegalArgumentException( - "Field [" - + fieldName - + "] expected query inference results to be of type [" - + TextExpansionResults.NAME - + "] or [" - + TextEmbeddingResults.NAME - + "], got [" - + inferenceResults.getWriteableName() - + "]. Has the inference endpoint configuration changed?" - ); - } + } else if (inferenceResults instanceof TextExpansionResults == false + && inferenceResults instanceof MlTextEmbeddingResults == false) { + throw new IllegalArgumentException( + "Field [" + + fieldName + + "] expected query inference results to be of type [" + + TextExpansionResults.NAME + + "] or [" + + MlTextEmbeddingResults.NAME + + "], got [" + + inferenceResults.getWriteableName() + + "]. Has the inference endpoint configuration changed?" + ); + } return inferenceResults; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceEndpointAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceEndpointAction.java index 25280b8acf92d..287e286e95693 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceEndpointAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceEndpointAction.java @@ -27,8 +27,8 @@ @ServerlessScope(Scope.PUBLIC) public class RestDeleteInferenceEndpointAction extends BaseRestHandler { - private String FORCE_DELETE_NAME = "force"; - private String DRY_RUN_NAME = "dry_run"; + private static final String FORCE_DELETE_NAME = "force"; + private static final String DRY_RUN_NAME = "dry_run"; @Override public String getName() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java index 24c0ab2cd893e..1c64f505402d8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -31,7 +31,7 @@ public abstract class SenderService implements InferenceService { public SenderService(HttpRequestSender.Factory factory, ServiceComponents serviceComponents) { Objects.requireNonNull(factory); - sender = factory.createSender(name()); + sender = factory.createSender(); this.serviceComponents = Objects.requireNonNull(serviceComponents); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 4b5ec48f99b74..f9aca89969614 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -21,8 +21,8 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbedding; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.services.settings.ApiKeySecrets; import java.net.URI; @@ -39,7 +39,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; -public class ServiceUtils { +public final class ServiceUtils { /** * Remove the object from the map and cast to the expected type. * If the object cannot be cast to type an ElasticsearchStatusException @@ -606,7 +606,7 @@ public static void getEmbeddingSize(Model model, InferenceService service, Actio new ElasticsearchStatusException( "Could not determine embedding size. " + "Expected a result of type [" - + TextEmbeddingResults.NAME + + InferenceTextEmbeddingFloatResults.NAME + "] got [" + r.getWriteableName() + "]", @@ -624,4 +624,6 @@ public static SecureString apiKey(@Nullable ApiKeySecrets secrets) { // To avoid a possible null pointer throughout the code we'll create a noop api key of an empty array return secrets == null ? new SecureString(new char[0]) : secrets.apiKey(); } + + private ServiceUtils() {} } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java index c488eac422401..214c652a97545 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java @@ -24,9 +24,9 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.external.action.azureaistudio.AzureAiStudioActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -56,7 +56,7 @@ public class AzureAiStudioService extends SenderService { - private static final String NAME = "azureaistudio"; + static final String NAME = "azureaistudio"; public AzureAiStudioService(HttpRequestSender.Factory factory, ServiceComponents serviceComponents) { super(factory, serviceComponents); @@ -116,12 +116,12 @@ private static List translateToChunkedResults( List inputs, InferenceServiceResults inferenceResults ) { - if (inferenceResults instanceof TextEmbeddingResults textEmbeddingResults) { - return ChunkedTextEmbeddingResults.of(inputs, textEmbeddingResults); + if (inferenceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { + return InferenceChunkedTextEmbeddingFloatResults.listOf(inputs, textEmbeddingResults); } else if (inferenceResults instanceof ErrorInferenceResults error) { return List.of(new ErrorChunkedInferenceResults(error.getException())); } else { - throw createInvalidChunkedResultException(inferenceResults.getWriteableName()); + throw createInvalidChunkedResultException(InferenceTextEmbeddingFloatResults.NAME, inferenceResults.getWriteableName()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceSettings.java index 10c57e19b6403..03034ae70c2b6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceSettings.java @@ -44,7 +44,13 @@ protected static BaseAzureAiStudioCommonFields fromMap( ConfigurationParseContext context ) { String target = extractRequiredString(map, TARGET_FIELD, ModelConfigurations.SERVICE_SETTINGS, validationException); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + AzureAiStudioService.NAME, + context + ); AzureAiStudioEndpointType endpointType = extractRequiredEnum( map, ENDPOINT_TYPE_FIELD, @@ -118,13 +124,13 @@ public void writeTo(StreamOutput out) throws IOException { protected void addXContentFields(XContentBuilder builder, Params params) throws IOException { this.addExposedXContentFields(builder, params); - rateLimitSettings.toXContent(builder, params); } protected void addExposedXContentFields(XContentBuilder builder, Params params) throws IOException { builder.field(TARGET_FIELD, this.target); builder.field(PROVIDER_FIELD, this.provider); builder.field(ENDPOINT_TYPE_FIELD, this.endpointType); + rateLimitSettings.toXContent(builder, params); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index e0e48ab20a86b..bd52bdb165148 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -24,9 +24,9 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -135,7 +135,15 @@ private static AzureOpenAiModel createModel( ); } case COMPLETION -> { - return new AzureOpenAiCompletionModel(inferenceEntityId, taskType, NAME, serviceSettings, taskSettings, secretSettings); + return new AzureOpenAiCompletionModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); } default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } @@ -233,12 +241,12 @@ private static List translateToChunkedResults( List inputs, InferenceServiceResults inferenceResults ) { - if (inferenceResults instanceof TextEmbeddingResults textEmbeddingResults) { - return ChunkedTextEmbeddingResults.of(inputs, textEmbeddingResults); + if (inferenceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { + return InferenceChunkedTextEmbeddingFloatResults.listOf(inputs, textEmbeddingResults); } else if (inferenceResults instanceof ErrorInferenceResults error) { return List.of(new ErrorChunkedInferenceResults(error.getException())); } else { - throw createInvalidChunkedResultException(inferenceResults.getWriteableName()); + throw createInvalidChunkedResultException(InferenceTextEmbeddingFloatResults.NAME, inferenceResults.getWriteableName()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java index 05cb663453542..c4146b2ba2d30 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; @@ -37,13 +38,14 @@ public AzureOpenAiCompletionModel( String service, Map serviceSettings, Map taskSettings, - @Nullable Map secrets + @Nullable Map secrets, + ConfigurationParseContext context ) { this( inferenceEntityId, taskType, service, - AzureOpenAiCompletionServiceSettings.fromMap(serviceSettings), + AzureOpenAiCompletionServiceSettings.fromMap(serviceSettings, context), AzureOpenAiCompletionTaskSettings.fromMap(taskSettings), AzureOpenAiSecretSettings.fromMap(secrets) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java index ba503b2bbdc4b..92dc461d9008c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java @@ -17,7 +17,9 @@ import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiService; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -55,10 +57,10 @@ public class AzureOpenAiCompletionServiceSettings extends FilteredXContentObject */ private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(120); - public static AzureOpenAiCompletionServiceSettings fromMap(Map map) { + public static AzureOpenAiCompletionServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); - var settings = fromMap(map, validationException); + var settings = fromMap(map, validationException, context); if (validationException.validationErrors().isEmpty() == false) { throw validationException; @@ -69,12 +71,19 @@ public static AzureOpenAiCompletionServiceSettings fromMap(Map m private static AzureOpenAiCompletionServiceSettings.CommonFields fromMap( Map map, - ValidationException validationException + ValidationException validationException, + ConfigurationParseContext context ) { String resourceName = extractRequiredString(map, RESOURCE_NAME, ModelConfigurations.SERVICE_SETTINGS, validationException); String deploymentId = extractRequiredString(map, DEPLOYMENT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); String apiVersion = extractRequiredString(map, API_VERSION, ModelConfigurations.SERVICE_SETTINGS, validationException); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + AzureOpenAiService.NAME, + context + ); return new AzureOpenAiCompletionServiceSettings.CommonFields(resourceName, deploymentId, apiVersion, rateLimitSettings); } @@ -137,7 +146,6 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par builder.startObject(); toXContentFragmentOfExposedFields(builder, params); - rateLimitSettings.toXContent(builder, params); builder.endObject(); return builder; @@ -148,6 +156,7 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil builder.field(RESOURCE_NAME, resourceName); builder.field(DEPLOYMENT_ID, deploymentId); builder.field(API_VERSION, apiVersion); + rateLimitSettings.toXContent(builder, params); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java index 33bb0fdb07c58..1c426815a83c0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiService; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -90,7 +91,13 @@ private static CommonFields fromMap( Integer dims = removeAsType(map, DIMENSIONS, Integer.class); Integer maxTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); SimilarityMeasure similarity = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + AzureOpenAiService.NAME, + context + ); Boolean dimensionsSetByUser = extractOptionalBoolean(map, DIMENSIONS_SET_BY_USER, validationException); @@ -245,8 +252,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); toXContentFragmentOfExposedFields(builder, params); - - rateLimitSettings.toXContent(builder, params); builder.field(DIMENSIONS_SET_BY_USER, dimensionsSetByUser); builder.endObject(); @@ -268,6 +273,7 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil if (similarity != null) { builder.field(SIMILARITY, similarity); } + rateLimitSettings.toXContent(builder, params); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 11dbf673ab7bd..4c673026d7efb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -51,6 +51,11 @@ public class CohereService extends SenderService { public static final String NAME = "cohere"; + // TODO Batching - We'll instantiate a batching class within the services that want to support it and pass it through to + // the Cohere*RequestManager via the CohereActionCreator class + // The reason it needs to be done here is that the batching logic needs to hold state but the *RequestManagers are instantiated + // on every request + public CohereService(HttpRequestSender.Factory factory, ServiceComponents serviceComponents) { super(factory, serviceComponents); } @@ -131,7 +136,15 @@ private static CohereModel createModel( context ); case RERANK -> new CohereRerankModel(inferenceEntityId, taskType, NAME, serviceSettings, taskSettings, secretSettings, context); - case COMPLETION -> new CohereCompletionModel(inferenceEntityId, taskType, NAME, serviceSettings, taskSettings, secretSettings); + case COMPLETION -> new CohereCompletionModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index b23f6f188d8c5..d477a8c5a5f55 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -58,7 +58,13 @@ public static CohereServiceSettings fromMap(Map map, Configurati Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); String oldModelId = extractOptionalString(map, OLD_MODEL_ID_FIELD, ModelConfigurations.SERVICE_SETTINGS, validationException); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + CohereService.NAME, + context + ); String modelId = extractOptionalString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); @@ -173,10 +179,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public XContentBuilder toXContentFragment(XContentBuilder builder, Params params) throws IOException { - toXContentFragmentOfExposedFields(builder, params); - rateLimitSettings.toXContent(builder, params); - - return builder; + return toXContentFragmentOfExposedFields(builder, params); } @Override @@ -196,6 +199,7 @@ public XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder if (modelId != null) { builder.field(MODEL_ID, modelId); } + rateLimitSettings.toXContent(builder, params); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionModel.java index 761081d4d723c..bec4f5a0b5c85 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionModel.java @@ -16,6 +16,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.cohere.CohereModel; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; @@ -30,13 +31,14 @@ public CohereCompletionModel( String service, Map serviceSettings, Map taskSettings, - @Nullable Map secrets + @Nullable Map secrets, + ConfigurationParseContext context ) { this( modelId, taskType, service, - CohereCompletionServiceSettings.fromMap(serviceSettings), + CohereCompletionServiceSettings.fromMap(serviceSettings, context), EmptyTaskSettings.INSTANCE, DefaultSecretSettings.fromMap(secrets) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettings.java index 2a22f6333f1a2..ba9e81b461f9f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettings.java @@ -16,7 +16,9 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.cohere.CohereRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.CohereService; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -39,12 +41,18 @@ public class CohereCompletionServiceSettings extends FilteredXContentObject impl // 10K requests per minute private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(10_000); - public static CohereCompletionServiceSettings fromMap(Map map) { + public static CohereCompletionServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); String url = extractOptionalString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + CohereService.NAME, + context + ); String modelId = extractOptionalString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); if (validationException.validationErrors().isEmpty() == false) { @@ -94,7 +102,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); toXContentFragmentOfExposedFields(builder, params); - rateLimitSettings.toXContent(builder, params); builder.endObject(); return builder; @@ -127,6 +134,7 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil if (modelId != null) { builder.field(MODEL_ID, modelId); } + rateLimitSettings.toXContent(builder, params); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 408e3ec1ccbca..0052607ce325b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -28,18 +28,21 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; -import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; @@ -239,7 +242,7 @@ public void infer( if (TaskType.TEXT_EMBEDDING.equals(taskType)) { inferTextEmbedding(model, input, inputType, timeout, listener); } else if (TaskType.RERANK.equals(taskType)) { - inferRerank(model, query, input, timeout, taskSettings, listener); + inferRerank(model, query, input, inputType, timeout, taskSettings, listener); } else { throw new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), RestStatus.BAD_REQUEST); } @@ -247,22 +250,26 @@ public void infer( public void inferTextEmbedding( Model model, - List input, + List inputs, InputType inputType, TimeValue timeout, ActionListener listener ) { - var request = InferTrainedModelDeploymentAction.Request.forTextInput( + var request = buildInferenceRequest( model.getConfigurations().getInferenceEntityId(), TextEmbeddingConfigUpdate.EMPTY_INSTANCE, - input, - timeout + inputs, + inputType, + timeout, + false ); client.execute( - InferTrainedModelDeploymentAction.INSTANCE, + InferModelAction.INSTANCE, request, - listener.delegateFailureAndWrap((l, inferenceResult) -> l.onResponse(TextEmbeddingResults.of(inferenceResult.getResults()))) + listener.delegateFailureAndWrap( + (l, inferenceResult) -> l.onResponse(InferenceTextEmbeddingFloatResults.of(inferenceResult.getInferenceResults())) + ) ); } @@ -270,16 +277,18 @@ public void inferRerank( Model model, String query, List inputs, + InputType inputType, TimeValue timeout, Map requestTaskSettings, ActionListener listener ) { - var config = new TextSimilarityConfigUpdate(query); - var request = InferTrainedModelDeploymentAction.Request.forTextInput( + var request = buildInferenceRequest( model.getConfigurations().getInferenceEntityId(), - config, + new TextSimilarityConfigUpdate(query), inputs, - timeout + inputType, + timeout, + false ); var modelSettings = (CustomElandRerankTaskSettings) model.getTaskSettings(); @@ -289,10 +298,12 @@ public void inferRerank( Function inputSupplier = returnDocs == Boolean.TRUE ? inputs::get : i -> null; client.execute( - InferTrainedModelDeploymentAction.INSTANCE, + InferModelAction.INSTANCE, request, listener.delegateFailureAndWrap( - (l, inferenceResult) -> l.onResponse(textSimilarityResultsToRankedDocs(inferenceResult.getResults(), inputSupplier)) + (l, inferenceResult) -> l.onResponse( + textSimilarityResultsToRankedDocs(inferenceResult.getInferenceResults(), inputSupplier) + ) ) ); } @@ -331,18 +342,21 @@ public void chunkedInfer( ? new TokenizationConfigUpdate(chunkingOptions.windowSize(), chunkingOptions.span()) : new TokenizationConfigUpdate(null, null); - var request = InferTrainedModelDeploymentAction.Request.forTextInput( + var request = buildInferenceRequest( model.getConfigurations().getInferenceEntityId(), configUpdate, input, - timeout + inputType, + timeout, + true ); - request.setChunkResults(true); client.execute( - InferTrainedModelDeploymentAction.INSTANCE, + InferModelAction.INSTANCE, request, - listener.delegateFailureAndWrap((l, inferenceResult) -> l.onResponse(translateToChunkedResults(inferenceResult.getResults()))) + listener.delegateFailureAndWrap( + (l, inferenceResult) -> l.onResponse(translateToChunkedResults(inferenceResult.getInferenceResults())) + ) ); } @@ -357,12 +371,12 @@ private static List translateToChunkedResults(Li } private static ChunkedInferenceServiceResults translateToChunkedResult(InferenceResults inferenceResult) { - if (inferenceResult instanceof org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults mlChunkedResult) { - return ChunkedTextEmbeddingResults.ofMlResult(mlChunkedResult); + if (inferenceResult instanceof MlChunkedTextEmbeddingFloatResults mlChunkedResult) { + return InferenceChunkedTextEmbeddingFloatResults.ofMlResults(mlChunkedResult); } else if (inferenceResult instanceof ErrorInferenceResults error) { return new ErrorChunkedInferenceResults(error.getException()); } else { - throw createInvalidChunkedResultException(inferenceResult.getWriteableName()); + throw createInvalidChunkedResultException(MlChunkedTextEmbeddingFloatResults.NAME, inferenceResult.getWriteableName()); } } @@ -532,4 +546,20 @@ private RankedDocsResults textSimilarityResultsToRankedDocs( return new RankedDocsResults(rankings); } + public static InferModelAction.Request buildInferenceRequest( + String id, + InferenceConfigUpdate update, + List inputs, + InputType inputType, + TimeValue timeout, + boolean chunk + ) { + var request = InferModelAction.Request.forTextInput(id, update, inputs, true, timeout); + request.setPrefixType( + InputType.SEARCH == inputType ? TrainedModelPrefixStrings.PrefixType.SEARCH : TrainedModelPrefixStrings.PrefixType.INGEST + ); + request.setHighPriority(InputType.SEARCH == inputType); + request.setChunked(chunk); + return request; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java index 01829bfee5241..6e7c177861cdd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java @@ -29,19 +29,19 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; -import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -58,6 +58,7 @@ import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.buildInferenceRequest; public class ElserInternalService implements InferenceService { @@ -259,7 +260,7 @@ public void stop(String inferenceEntityId, ActionListener listener) { public void infer( Model model, @Nullable String query, - List input, + List inputs, Map taskSettings, InputType inputType, TimeValue timeout, @@ -274,16 +275,21 @@ public void infer( return; } - var request = InferTrainedModelDeploymentAction.Request.forTextInput( + var request = buildInferenceRequest( model.getConfigurations().getInferenceEntityId(), TextExpansionConfigUpdate.EMPTY_UPDATE, - input, - timeout + inputs, + inputType, + timeout, + false // chunk ); + client.execute( - InferTrainedModelDeploymentAction.INSTANCE, + InferModelAction.INSTANCE, request, - listener.delegateFailureAndWrap((l, inferenceResult) -> l.onResponse(SparseEmbeddingResults.of(inferenceResult.getResults()))) + listener.delegateFailureAndWrap( + (l, inferenceResult) -> l.onResponse(SparseEmbeddingResults.of(inferenceResult.getInferenceResults())) + ) ); } @@ -303,7 +309,7 @@ public void chunkedInfer( public void chunkedInfer( Model model, @Nullable String query, - List input, + List inputs, Map taskSettings, InputType inputType, @Nullable ChunkingOptions chunkingOptions, @@ -321,18 +327,21 @@ public void chunkedInfer( ? new TokenizationConfigUpdate(chunkingOptions.windowSize(), chunkingOptions.span()) : new TokenizationConfigUpdate(null, null); - var request = InferTrainedModelDeploymentAction.Request.forTextInput( + var request = buildInferenceRequest( model.getConfigurations().getInferenceEntityId(), configUpdate, - input, - timeout + inputs, + inputType, + timeout, + true // chunk ); - request.setChunkResults(true); client.execute( - InferTrainedModelDeploymentAction.INSTANCE, + InferModelAction.INSTANCE, request, - listener.delegateFailureAndWrap((l, inferenceResult) -> l.onResponse(translateChunkedResults(inferenceResult.getResults()))) + listener.delegateFailureAndWrap( + (l, inferenceResult) -> l.onResponse(translateChunkedResults(inferenceResult.getInferenceResults())) + ) ); } @@ -407,15 +416,15 @@ private List translateChunkedResults(List(); for (var inferenceResult : inferenceResults) { - if (inferenceResult instanceof ChunkedTextExpansionResults mlChunkedResult) { - translated.add(ChunkedSparseEmbeddingResults.ofMlResult(mlChunkedResult)); + if (inferenceResult instanceof InferenceChunkedTextExpansionResults mlChunkedResult) { + translated.add(InferenceChunkedSparseEmbeddingResults.ofMlResult(mlChunkedResult)); } else if (inferenceResult instanceof ErrorInferenceResults error) { translated.add(new ErrorChunkedInferenceResults(error.getException())); } else { throw new ElasticsearchStatusException( "Expected a chunked inference [{}] received [{}]", RestStatus.INTERNAL_SERVER_ERROR, - ChunkedTextExpansionResults.NAME, + InferenceChunkedTextExpansionResults.NAME, inferenceResult.getWriteableName() ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java index f8720448b0f4f..cfa8566495143 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java @@ -108,7 +108,8 @@ private static GoogleAiStudioModel createModel( NAME, serviceSettings, taskSettings, - secretSettings + secretSettings, + context ); case TEXT_EMBEDDING -> new GoogleAiStudioEmbeddingsModel( inferenceEntityId, @@ -116,7 +117,8 @@ private static GoogleAiStudioModel createModel( NAME, serviceSettings, taskSettings, - secretSettings + secretSettings, + context ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModel.java index eafb0c372202c..8fa2ac0148716 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModel.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.googleaistudio.GoogleAiStudioActionVisitor; import org.elasticsearch.xpack.inference.external.request.googleaistudio.GoogleAiStudioUtils; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioModel; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; @@ -37,13 +38,14 @@ public GoogleAiStudioCompletionModel( String service, Map serviceSettings, Map taskSettings, - Map secrets + Map secrets, + ConfigurationParseContext context ) { this( inferenceEntityId, taskType, service, - GoogleAiStudioCompletionServiceSettings.fromMap(serviceSettings), + GoogleAiStudioCompletionServiceSettings.fromMap(serviceSettings, context), EmptyTaskSettings.INSTANCE, DefaultSecretSettings.fromMap(secrets) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettings.java index f8f343be8eb4c..7c0b812ee213b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettings.java @@ -16,7 +16,9 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioService; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -40,11 +42,17 @@ public class GoogleAiStudioCompletionServiceSettings extends FilteredXContentObj */ private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(360); - public static GoogleAiStudioCompletionServiceSettings fromMap(Map map) { + public static GoogleAiStudioCompletionServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); String model = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + GoogleAiStudioService.NAME, + context + ); if (validationException.validationErrors().isEmpty() == false) { throw validationException; @@ -82,7 +90,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); toXContentFragmentOfExposedFields(builder, params); - rateLimitSettings.toXContent(builder, params); builder.endObject(); return builder; @@ -107,6 +114,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { builder.field(MODEL_ID, modelId); + rateLimitSettings.toXContent(builder, params); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsModel.java index ad106797de51b..af19e26f3e97a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsModel.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.googleaistudio.GoogleAiStudioActionVisitor; import org.elasticsearch.xpack.inference.external.request.googleaistudio.GoogleAiStudioUtils; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioModel; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; @@ -37,13 +38,14 @@ public GoogleAiStudioEmbeddingsModel( String service, Map serviceSettings, Map taskSettings, - Map secrets + Map secrets, + ConfigurationParseContext context ) { this( inferenceEntityId, taskType, service, - GoogleAiStudioEmbeddingsServiceSettings.fromMap(serviceSettings), + GoogleAiStudioEmbeddingsServiceSettings.fromMap(serviceSettings, context), EmptyTaskSettings.INSTANCE, DefaultSecretSettings.fromMap(secrets) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettings.java index 07d07dc533f06..7608f48d0638d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettings.java @@ -18,7 +18,9 @@ import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioService; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -47,7 +49,7 @@ public class GoogleAiStudioEmbeddingsServiceSettings extends FilteredXContentObj */ private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(360); - public static GoogleAiStudioEmbeddingsServiceSettings fromMap(Map map) { + public static GoogleAiStudioEmbeddingsServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); String model = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); @@ -59,7 +61,13 @@ public static GoogleAiStudioEmbeddingsServiceSettings fromMap(Map config) { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - return createModel(inferenceEntityId, taskType, serviceSettingsMap, null, parsePersistedConfigErrorMsg(inferenceEntityId, name())); + return createModel( + inferenceEntityId, + taskType, + serviceSettingsMap, + null, + parsePersistedConfigErrorMsg(inferenceEntityId, name()), + ConfigurationParseContext.PERSISTENT + ); } protected abstract HuggingFaceModel createModel( @@ -105,7 +116,8 @@ protected abstract HuggingFaceModel createModel( TaskType taskType, Map serviceSettings, Map secretSettings, - String failureMessage + String failureMessage, + ConfigurationParseContext context ); @Override @@ -164,14 +176,19 @@ private static List translateToChunkedResults( List inputs, InferenceServiceResults inferenceResults ) { - if (inferenceResults instanceof TextEmbeddingResults textEmbeddingResults) { - return ChunkedTextEmbeddingResults.of(inputs, textEmbeddingResults); + if (inferenceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { + return InferenceChunkedTextEmbeddingFloatResults.listOf(inputs, textEmbeddingResults); } else if (inferenceResults instanceof SparseEmbeddingResults sparseEmbeddingResults) { - return ChunkedSparseEmbeddingResults.of(inputs, sparseEmbeddingResults); + return InferenceChunkedSparseEmbeddingResults.listOf(inputs, sparseEmbeddingResults); } else if (inferenceResults instanceof ErrorInferenceResults error) { return List.of(new ErrorChunkedInferenceResults(error.getException())); } else { - throw createInvalidChunkedResultException(inferenceResults.getWriteableName()); + String expectedClasses = Strings.format( + "One of [%s,%s]", + InferenceTextEmbeddingFloatResults.class.getSimpleName(), + SparseEmbeddingResults.class.getSimpleName() + ); + throw createInvalidChunkedResultException(expectedClasses, inferenceResults.getWriteableName()); } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index d8c383d2b4a67..c0438b3759a65 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -16,6 +16,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; @@ -36,11 +37,19 @@ protected HuggingFaceModel createModel( TaskType taskType, Map serviceSettings, @Nullable Map secretSettings, - String failureMessage + String failureMessage, + ConfigurationParseContext context ) { return switch (taskType) { - case TEXT_EMBEDDING -> new HuggingFaceEmbeddingsModel(inferenceEntityId, taskType, NAME, serviceSettings, secretSettings); - case SPARSE_EMBEDDING -> new HuggingFaceElserModel(inferenceEntityId, taskType, NAME, serviceSettings, secretSettings); + case TEXT_EMBEDDING -> new HuggingFaceEmbeddingsModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + secretSettings, + context + ); + case SPARSE_EMBEDDING -> new HuggingFaceElserModel(inferenceEntityId, taskType, NAME, serviceSettings, secretSettings, context); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index af2c433663ac4..fc31b1e518dd9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -18,6 +18,7 @@ import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -43,14 +44,20 @@ public class HuggingFaceServiceSettings extends FilteredXContentObject implement // 3000 requests per minute private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(3000); - public static HuggingFaceServiceSettings fromMap(Map map) { + public static HuggingFaceServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); var uri = extractUri(map, URL, validationException); SimilarityMeasure similarityMeasure = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); Integer dims = removeAsType(map, DIMENSIONS, Integer.class); Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + HuggingFaceService.NAME, + context + ); if (validationException.validationErrors().isEmpty() == false) { throw validationException; @@ -119,7 +126,6 @@ public HuggingFaceServiceSettings(StreamInput in) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); toXContentFragmentOfExposedFields(builder, params); - rateLimitSettings.toXContent(builder, params); builder.endObject(); return builder; } @@ -136,6 +142,7 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil if (maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, maxInputTokens); } + rateLimitSettings.toXContent(builder, params); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java index 9010571ea2e55..8132089d8dc99 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java @@ -13,6 +13,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; @@ -24,13 +25,14 @@ public HuggingFaceElserModel( TaskType taskType, String service, Map serviceSettings, - @Nullable Map secrets + @Nullable Map secrets, + ConfigurationParseContext context ) { this( inferenceEntityId, taskType, service, - HuggingFaceElserServiceSettings.fromMap(serviceSettings), + HuggingFaceElserServiceSettings.fromMap(serviceSettings, context), DefaultSecretSettings.fromMap(secrets) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java index 2587b2737e164..d3099e96ee7c1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -14,6 +14,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceBaseService; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; @@ -38,10 +39,11 @@ protected HuggingFaceModel createModel( TaskType taskType, Map serviceSettings, @Nullable Map secretSettings, - String failureMessage + String failureMessage, + ConfigurationParseContext context ) { return switch (taskType) { - case SPARSE_EMBEDDING -> new HuggingFaceElserModel(inferenceEntityId, taskType, NAME, serviceSettings, secretSettings); + case SPARSE_EMBEDDING -> new HuggingFaceElserModel(inferenceEntityId, taskType, NAME, serviceSettings, secretSettings, context); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index 1f337de450ef9..8b4bd61649de0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -15,7 +15,9 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceService; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -40,10 +42,16 @@ public class HuggingFaceElserServiceSettings extends FilteredXContentObject // 3000 requests per minute private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(3000); - public static HuggingFaceElserServiceSettings fromMap(Map map) { + public static HuggingFaceElserServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); var uri = extractUri(map, URL, validationException); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + HuggingFaceService.NAME, + context + ); if (validationException.validationErrors().isEmpty() == false) { throw validationException; @@ -93,7 +101,6 @@ public int maxInputTokens() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); toXContentFragmentOfExposedFields(builder, params); - rateLimitSettings.toXContent(builder, params); builder.endObject(); return builder; @@ -103,6 +110,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { builder.field(URL, uri.toString()); builder.field(MAX_INPUT_TOKENS, ELSER_TOKEN_LIMIT); + rateLimitSettings.toXContent(builder, params); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java index 1cee26558b490..fedd6380d035f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java @@ -13,6 +13,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; @@ -25,13 +26,14 @@ public HuggingFaceEmbeddingsModel( TaskType taskType, String service, Map serviceSettings, - @Nullable Map secrets + @Nullable Map secrets, + ConfigurationParseContext context ) { this( inferenceEntityId, taskType, service, - HuggingFaceServiceSettings.fromMap(serviceSettings), + HuggingFaceServiceSettings.fromMap(serviceSettings, context), DefaultSecretSettings.fromMap(secrets) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java index 7ddb71d001e8c..ee0cec1d75846 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java @@ -22,9 +22,9 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.mistral.MistralActionCreator; @@ -117,12 +117,12 @@ private static List translateToChunkedResults( List inputs, InferenceServiceResults inferenceResults ) { - if (inferenceResults instanceof TextEmbeddingResults textEmbeddingResults) { - return ChunkedTextEmbeddingResults.of(inputs, textEmbeddingResults); + if (inferenceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { + return InferenceChunkedTextEmbeddingFloatResults.listOf(inputs, textEmbeddingResults); } else if (inferenceResults instanceof ErrorInferenceResults error) { return List.of(new ErrorChunkedInferenceResults(error.getException())); } else { - throw createInvalidChunkedResultException(inferenceResults.getWriteableName()); + throw createInvalidChunkedResultException(InferenceChunkedTextEmbeddingFloatResults.NAME, inferenceResults.getWriteableName()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/embeddings/MistralEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/embeddings/MistralEmbeddingsServiceSettings.java index d2ea8ccbd18bd..62d06a4e0029c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/embeddings/MistralEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/embeddings/MistralEmbeddingsServiceSettings.java @@ -18,6 +18,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.mistral.MistralService; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -59,7 +60,13 @@ public static MistralEmbeddingsServiceSettings fromMap(Map map, ModelConfigurations.SERVICE_SETTINGS, validationException ); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + MistralService.NAME, + context + ); Integer dims = removeAsType(map, DIMENSIONS, Integer.class); if (validationException.validationErrors().isEmpty() == false) { @@ -141,7 +148,6 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); this.toXContentFragmentOfExposedFields(builder, params); - rateLimitSettings.toXContent(builder, params); builder.endObject(); return builder; } @@ -159,6 +165,7 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil if (this.maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, this.maxInputTokens); } + rateLimitSettings.toXContent(builder, params); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 84dfac8903678..04b6ae94d6b53 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -138,7 +138,8 @@ private static OpenAiModel createModel( NAME, serviceSettings, taskSettings, - secretSettings + secretSettings, + context ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java index b1b670c0911f5..7ca93684bc680 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java @@ -13,6 +13,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.openai.OpenAiModel; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; @@ -35,13 +36,14 @@ public OpenAiChatCompletionModel( String service, Map serviceSettings, Map taskSettings, - @Nullable Map secrets + @Nullable Map secrets, + ConfigurationParseContext context ) { this( inferenceEntityId, taskType, service, - OpenAiChatCompletionServiceSettings.fromMap(serviceSettings), + OpenAiChatCompletionServiceSettings.fromMap(serviceSettings, context), OpenAiChatCompletionTaskSettings.fromMap(taskSettings), DefaultSecretSettings.fromMap(secrets) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java index 5105bb59e048f..04f77da1b1463 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java @@ -16,7 +16,9 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.openai.OpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.openai.OpenAiService; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -47,7 +49,7 @@ public class OpenAiChatCompletionServiceSettings extends FilteredXContentObject // 500 requests per minute private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(500); - public static OpenAiChatCompletionServiceSettings fromMap(Map map) { + public static OpenAiChatCompletionServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); String modelId = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); @@ -58,7 +60,13 @@ public static OpenAiChatCompletionServiceSettings fromMap(Map ma Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + OpenAiService.NAME, + context + ); if (validationException.validationErrors().isEmpty() == false) { throw validationException; @@ -142,7 +150,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); toXContentFragmentOfExposedFields(builder, params); - rateLimitSettings.toXContent(builder, params); builder.endObject(); return builder; @@ -163,6 +170,7 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil if (maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, maxInputTokens); } + rateLimitSettings.toXContent(builder, params); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index fc479009d3334..080251bf1ba3a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -20,6 +20,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.openai.OpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.openai.OpenAiService; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -66,7 +67,7 @@ private static OpenAiEmbeddingsServiceSettings fromPersistentMap(Map map) { ValidationException validationException = new ValidationException(); - var commonFields = fromMap(map, validationException); + var commonFields = fromMap(map, validationException, ConfigurationParseContext.REQUEST); if (validationException.validationErrors().isEmpty() == false) { throw validationException; @@ -89,7 +90,11 @@ private static OpenAiEmbeddingsServiceSettings fromRequestMap(Map map, ValidationException validationException) { + private static CommonFields fromMap( + Map map, + ValidationException validationException, + ConfigurationParseContext context + ) { String url = extractOptionalString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); String organizationId = extractOptionalString(map, ORGANIZATION, ModelConfigurations.SERVICE_SETTINGS, validationException); @@ -98,7 +103,13 @@ private static CommonFields fromMap(Map map, ValidationException Integer dims = removeAsType(map, DIMENSIONS, Integer.class); URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); String modelId = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); - RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + OpenAiService.NAME, + context + ); return new CommonFields(modelId, uri, organizationId, similarity, maxInputTokens, dims, rateLimitSettings); } @@ -258,7 +269,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); toXContentFragmentOfExposedFields(builder, params); - rateLimitSettings.toXContent(builder, params); if (dimensionsSetByUser != null) { builder.field(DIMENSIONS_SET_BY_USER, dimensionsSetByUser); @@ -286,6 +296,7 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil if (maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, maxInputTokens); } + rateLimitSettings.toXContent(builder, params); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettings.java index cfc375a525dd6..f593ca4e0c603 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettings.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import java.io.IOException; import java.util.Map; @@ -21,19 +22,29 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveLong; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; public class RateLimitSettings implements Writeable, ToXContentFragment { - public static final String FIELD_NAME = "rate_limit"; public static final String REQUESTS_PER_MINUTE_FIELD = "requests_per_minute"; private final long requestsPerTimeUnit; private final TimeUnit timeUnit; - public static RateLimitSettings of(Map map, RateLimitSettings defaultValue, ValidationException validationException) { + public static RateLimitSettings of( + Map map, + RateLimitSettings defaultValue, + ValidationException validationException, + String serviceName, + ConfigurationParseContext context + ) { Map settings = removeFromMapOrDefaultEmpty(map, FIELD_NAME); var requestsPerMinute = extractOptionalPositiveLong(settings, REQUESTS_PER_MINUTE_FIELD, FIELD_NAME, validationException); + if (ConfigurationParseContext.isRequestContext(context)) { + throwIfNotEmptyMap(settings, serviceName); + } + return requestsPerMinute == null ? defaultValue : new RateLimitSettings(requestsPerMinute); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java index 428dbca892438..cd14d9e545079 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; -import org.elasticsearch.xpack.inference.results.LegacyTextEmbeddingResultsTests; +import org.elasticsearch.xpack.inference.results.LegacyMlTextEmbeddingResultsTests; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; import org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests; @@ -44,7 +44,7 @@ protected Writeable.Reader instanceReader() { protected InferenceAction.Response createTestInstance() { var result = switch (randomIntBetween(0, 2)) { case 0 -> TextEmbeddingResultsTests.createRandomResults(); - case 1 -> LegacyTextEmbeddingResultsTests.createRandomResults().transformToTextEmbeddingResults(); + case 1 -> LegacyMlTextEmbeddingResultsTests.createRandomResults().transformToTextEmbeddingResults(); default -> SparseEmbeddingResultsTests.createRandomResults(); }; @@ -73,7 +73,7 @@ public void testSerializesInferenceServiceResultsAddedVersion() throws IOExcepti } public void testSerializesOpenAiAddedVersion_UsingLegacyTextEmbeddingResult() throws IOException { - var embeddingResults = LegacyTextEmbeddingResultsTests.createRandomResults().transformToTextEmbeddingResults(); + var embeddingResults = LegacyMlTextEmbeddingResultsTests.createRandomResults().transformToTextEmbeddingResults(); var instance = new InferenceAction.Response(embeddingResults); var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), V_8_12_0); assertOnBWCObject(copy, instance, V_8_12_0); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index c87faa2b52cc8..d501c9a65d80e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -33,14 +33,15 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.inference.model.TestModel; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.junit.After; import org.junit.Before; import org.mockito.stubbing.Answer; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; @@ -323,7 +324,7 @@ private static ShardBulkInferenceActionFilter createFilter(ThreadPool threadPool private static BulkItemRequest[] randomBulkItemRequest( Map modelMap, Map fieldInferenceMap - ) { + ) throws IOException { Map docMap = new LinkedHashMap<>(); Map expectedDocMap = new LinkedHashMap<>(); XContentType requestContentType = randomFrom(XContentType.values()); @@ -376,7 +377,7 @@ public static StaticModel createRandomInstance() { } ChunkedInferenceServiceResults getResults(String text) { - return resultMap.getOrDefault(text, new ChunkedSparseEmbeddingResults(List.of())); + return resultMap.getOrDefault(text, new InferenceChunkedSparseEmbeddingResults(List.of())); } void putResult(String text, ChunkedInferenceServiceResults result) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java index 164f975cc464f..66079a00ee3b8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java @@ -10,9 +10,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import java.util.ArrayList; import java.util.List; @@ -177,34 +177,34 @@ public void testMergingListener() { // 4 inputs in 2 batches { - var embeddings = new ArrayList(); + var embeddings = new ArrayList(); for (int i = 0; i < batchSize; i++) { - embeddings.add(new TextEmbeddingResults.Embedding(new float[] { randomFloat() })); + embeddings.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { randomFloat() })); } - batches.get(0).listener().onResponse(new TextEmbeddingResults(embeddings)); + batches.get(0).listener().onResponse(new InferenceTextEmbeddingFloatResults(embeddings)); } { - var embeddings = new ArrayList(); + var embeddings = new ArrayList(); for (int i = 0; i < 4; i++) { // 4 requests in the 2nd batch - embeddings.add(new TextEmbeddingResults.Embedding(new float[] { randomFloat() })); + embeddings.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { randomFloat() })); } - batches.get(1).listener().onResponse(new TextEmbeddingResults(embeddings)); + batches.get(1).listener().onResponse(new InferenceTextEmbeddingFloatResults(embeddings)); } assertNotNull(finalListener.results); assertThat(finalListener.results, hasSize(4)); { var chunkedResult = finalListener.results.get(0); - assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedResult, instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (InferenceChunkedTextEmbeddingFloatResults) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(1)); assertEquals("1st small", chunkedFloatResult.chunks().get(0).matchedText()); } { // this is the large input split in multiple chunks var chunkedResult = finalListener.results.get(1); - assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedResult, instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (InferenceChunkedTextEmbeddingFloatResults) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(6)); assertThat(chunkedFloatResult.chunks().get(0).matchedText(), startsWith("passage_input0 ")); assertThat(chunkedFloatResult.chunks().get(1).matchedText(), startsWith(" passage_input20 ")); @@ -215,15 +215,15 @@ public void testMergingListener() { } { var chunkedResult = finalListener.results.get(2); - assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedResult, instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (InferenceChunkedTextEmbeddingFloatResults) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(1)); assertEquals("2nd small", chunkedFloatResult.chunks().get(0).matchedText()); } { var chunkedResult = finalListener.results.get(3); - assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedResult, instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (InferenceChunkedTextEmbeddingFloatResults) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(1)); assertEquals("3rd small", chunkedFloatResult.chunks().get(0).matchedText()); } @@ -251,10 +251,10 @@ public void onFailure(Exception e) { var batches = new EmbeddingRequestChunker(inputs, 10, 100, 0).batchRequestsWithListeners(listener); assertThat(batches, hasSize(1)); - var embeddings = new ArrayList(); - embeddings.add(new TextEmbeddingResults.Embedding(new float[] { randomFloat() })); - embeddings.add(new TextEmbeddingResults.Embedding(new float[] { randomFloat() })); - batches.get(0).listener().onResponse(new TextEmbeddingResults(embeddings)); + var embeddings = new ArrayList(); + embeddings.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { randomFloat() })); + embeddings.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { randomFloat() })); + batches.get(0).listener().onResponse(new InferenceTextEmbeddingFloatResults(embeddings)); assertEquals("Error the number of embedding responses [2] does not equal the number of requests [3]", failureMessage.get()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureaistudio/AzureAiStudioActionAndCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureaistudio/AzureAiStudioActionAndCreatorTests.java index 88d408d309a7b..8792234102a94 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureaistudio/AzureAiStudioActionAndCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureaistudio/AzureAiStudioActionAndCreatorTests.java @@ -44,6 +44,7 @@ import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; +import static org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests.createSender; import static org.elasticsearch.xpack.inference.external.request.azureaistudio.AzureAiStudioRequestFields.API_KEY_HEADER; import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; @@ -92,7 +93,7 @@ public void testEmbeddingsRequestAction() throws IOException { TruncatorTests.createTruncator() ); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); webServer.enqueue(new MockResponse().setResponseCode(200).setBody(testEmbeddingsTokenResponseJson)); @@ -141,7 +142,7 @@ public void testChatCompletionRequestAction() throws IOException { TruncatorTests.createTruncator() ); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); webServer.enqueue(new MockResponse().setResponseCode(200).setBody(testCompletionTokenResponseJson)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java index 0a2a00143b205..72124a6221254 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java @@ -44,6 +44,7 @@ import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; +import static org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests.createSender; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests.createCompletionModel; @@ -82,7 +83,7 @@ public void shutdown() throws IOException { public void testCreate_AzureOpenAiEmbeddingsModel() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -132,7 +133,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel() throws IOException { public void testCreate_AzureOpenAiEmbeddingsModel_WithoutUser() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -183,7 +184,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat // timeout as zero for no retries var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, ZERO_TIMEOUT_SETTINGS); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -237,7 +238,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusCode() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); // note - there is no complete documentation on Azure's error messages @@ -313,7 +314,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusCode() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); // note - there is no complete documentation on Azure's error messages @@ -389,7 +390,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC public void testExecute_TruncatesInputBeforeSending() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -440,7 +441,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { public void testInfer_AzureOpenAiCompletion_WithOverriddenUser() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -498,7 +499,7 @@ public void testInfer_AzureOpenAiCompletion_WithOverriddenUser() throws IOExcept public void testInfer_AzureOpenAiCompletionModel_WithoutUser() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -554,7 +555,7 @@ public void testInfer_AzureOpenAiCompletionModel_FailsFromInvalidResponseFormat( // timeout as zero for no retries var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, ZERO_TIMEOUT_SETTINGS); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); // "choices" missing diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java index 96127841c17a8..7d52616402405 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java @@ -44,6 +44,7 @@ import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests.createSender; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests.createCompletionModel; import static org.hamcrest.Matchers.hasSize; @@ -77,7 +78,7 @@ public void shutdown() throws IOException { public void testExecute_ReturnsSuccessfulResponse() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiEmbeddingsActionTests.java index 89cc847321796..4cc7b7c0d9cfc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiEmbeddingsActionTests.java @@ -43,6 +43,7 @@ import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests.createSender; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests.createModel; @@ -81,7 +82,7 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { mockClusterServiceEmpty() ); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java index 9b0371ad51f8c..9ec34e7d8e5c5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java @@ -42,6 +42,7 @@ import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests.createSender; import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; @@ -73,7 +74,7 @@ public void shutdown() throws IOException { public void testCreate_CohereEmbeddingsModel() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -154,7 +155,7 @@ public void testCreate_CohereEmbeddingsModel() throws IOException { public void testCreate_CohereCompletionModel_WithModelSpecified() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -214,7 +215,7 @@ public void testCreate_CohereCompletionModel_WithModelSpecified() throws IOExcep public void testCreate_CohereCompletionModel_WithoutModelSpecified() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereCompletionActionTests.java index 12c3d132d1244..0a604980f6c83 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereCompletionActionTests.java @@ -77,7 +77,7 @@ public void shutdown() throws IOException { public void testExecute_ReturnsSuccessfulResponse_WithModelSpecified() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = HttpRequestSenderTests.createSenderWithSingleRequestManager(senderFactory, "test_service")) { + try (var sender = HttpRequestSenderTests.createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -138,7 +138,7 @@ public void testExecute_ReturnsSuccessfulResponse_WithModelSpecified() throws IO public void testExecute_ReturnsSuccessfulResponse_WithoutModelSpecified() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = HttpRequestSenderTests.createSenderWithSingleRequestManager(senderFactory, "test_service")) { + try (var sender = HttpRequestSenderTests.createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -290,7 +290,7 @@ public void testExecute_ThrowsExceptionWithNullUrl() { public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = HttpRequestSenderTests.createSender(senderFactory)) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java index dbc97fa2e13d8..9cf6de27b93bc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java @@ -81,7 +81,7 @@ public void shutdown() throws IOException { public void testExecute_ReturnsSuccessfulResponse() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = HttpRequestSenderTests.createSenderWithSingleRequestManager(senderFactory, "test_service")) { + try (var sender = HttpRequestSenderTests.createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -162,7 +162,7 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { public void testExecute_ReturnsSuccessfulResponse_ForInt8ResponseType() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = HttpRequestSenderTests.createSenderWithSingleRequestManager(senderFactory, "test_service")) { + try (var sender = HttpRequestSenderTests.createSender(senderFactory)) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioCompletionActionTests.java index 09ef5351eb1fc..9dd465e0276f4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioCompletionActionTests.java @@ -74,7 +74,7 @@ public void shutdown() throws IOException { public void testExecute_ReturnsSuccessfulResponse() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = HttpRequestSenderTests.createSenderWithSingleRequestManager(senderFactory, "test_service")) { + try (var sender = HttpRequestSenderTests.createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -206,7 +206,7 @@ public void testExecute_ThrowsException() { public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = HttpRequestSenderTests.createSender(senderFactory)) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioEmbeddingsActionTests.java index a55b3c5f5030c..7e98b9b31f6ed 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googleaistudio/GoogleAiStudioEmbeddingsActionTests.java @@ -79,7 +79,7 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { var input = "input"; var senderFactory = new HttpRequestSender.Factory(createWithEmptySettings(threadPool), clientManager, mockClusterServiceEmpty()); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = senderFactory.createSender()) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java index fceea8810f6c2..b3ec565b3146a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java @@ -42,6 +42,7 @@ import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; +import static org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests.createSender; import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.hamcrest.Matchers.contains; @@ -75,7 +76,7 @@ public void shutdown() throws IOException { public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -131,7 +132,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx ); var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, settings); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -187,7 +188,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx public void testExecute_ReturnsSuccessfulResponse_ForEmbeddingsAction() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -239,7 +240,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForEmbeddingsAction() throws ); var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, settings); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); // this will fail because the only valid formats are {"embeddings": [[...]]} or [[...]] @@ -292,7 +293,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForEmbeddingsAction() throws public void testExecute_ReturnsSuccessfulResponse_AfterTruncating() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJsonContentTooLarge = """ @@ -357,7 +358,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating() throws IOExc public void testExecute_TruncatesInputBeforeSending() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java index 496238eaad0e4..b6d7eb673b7f0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java @@ -38,6 +38,7 @@ import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; +import static org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests.createSender; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; @@ -74,7 +75,7 @@ public void shutdown() throws IOException { public void testCreate_OpenAiEmbeddingsModel() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -127,7 +128,7 @@ public void testCreate_OpenAiEmbeddingsModel() throws IOException { public void testCreate_OpenAiEmbeddingsModel_WithoutUser() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -179,7 +180,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutUser() throws IOException { public void testCreate_OpenAiEmbeddingsModel_WithoutOrganization() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -238,7 +239,7 @@ public void testCreate_OpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() th ); var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, settings); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -292,7 +293,7 @@ public void testCreate_OpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() th public void testCreate_OpenAiChatCompletionModel() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -355,7 +356,7 @@ public void testCreate_OpenAiChatCompletionModel() throws IOException { public void testCreate_OpenAiChatCompletionModel_WithoutUser() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -417,7 +418,7 @@ public void testCreate_OpenAiChatCompletionModel_WithoutUser() throws IOExceptio public void testCreate_OpenAiChatCompletionModel_WithoutOrganization() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -486,7 +487,7 @@ public void testCreate_OpenAiChatCompletionModel_FailsFromInvalidResponseFormat( ); var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, settings); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -552,7 +553,7 @@ public void testCreate_OpenAiChatCompletionModel_FailsFromInvalidResponseFormat( public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusCode() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); var contentTooLargeErrorMessage = @@ -635,7 +636,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusCode() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); var contentTooLargeErrorMessage = @@ -718,7 +719,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC public void testExecute_TruncatesInputBeforeSending() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java index 914ff12db259a..42b062667f770 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java @@ -43,6 +43,7 @@ import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests.createSender; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; @@ -80,7 +81,7 @@ public void shutdown() throws IOException { public void testExecute_ReturnsSuccessfulResponse() throws IOException { var senderFactory = new HttpRequestSender.Factory(createWithEmptySettings(threadPool), clientManager, mockClusterServiceEmpty()); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -234,7 +235,7 @@ public void testExecute_ThrowsExceptionWithNullUrl() { public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java index 15b7417912ef5..03c0b4d146b2e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java @@ -79,7 +79,7 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { mockClusterServiceEmpty() ); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = senderFactory.createSender()) { sender.start(); String responseJson = """ diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java index d82ed47441442..d61a3cbde48c5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java @@ -22,7 +22,7 @@ import static org.elasticsearch.core.Strings.format; -public class Utils { +public final class Utils { public static String getUrl(MockWebServer webServer) { return format("http://%s:%s", webServer.getHostName(), webServer.getPort()); @@ -46,4 +46,6 @@ public static Map entityAsMap(InputStream body) throws IOExcepti return parser.map(); } } + + private Utils() {} } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManagerTests.java new file mode 100644 index 0000000000000..03838896b879d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManagerTests.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.mockito.Mockito.mock; + +public class BaseRequestManagerTests extends ESTestCase { + public void testRateLimitGrouping_DifferentObjectReferences_HaveSameGroup() { + int val1 = 1; + int val2 = 1; + + var manager1 = new BaseRequestManager(mock(ThreadPool.class), "id", val1, new RateLimitSettings(1)) { + @Override + public void execute( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + + } + }; + + var manager2 = new BaseRequestManager(mock(ThreadPool.class), "id", val2, new RateLimitSettings(1)) { + @Override + public void execute( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + + } + }; + + assertThat(manager1.rateLimitGrouping(), is(manager2.rateLimitGrouping())); + } + + public void testRateLimitGrouping_DifferentSettings_HaveDifferentGroup() { + int val1 = 1; + + var manager1 = new BaseRequestManager(mock(ThreadPool.class), "id", val1, new RateLimitSettings(1)) { + @Override + public void execute( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + + } + }; + + var manager2 = new BaseRequestManager(mock(ThreadPool.class), "id", val1, new RateLimitSettings(2)) { + @Override + public void execute( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + + } + }; + + assertThat(manager1.rateLimitGrouping(), not(manager2.rateLimitGrouping())); + } + + public void testRateLimitGrouping_DifferentSettingsTimeUnit_HaveDifferentGroup() { + int val1 = 1; + + var manager1 = new BaseRequestManager(mock(ThreadPool.class), "id", val1, new RateLimitSettings(1, TimeUnit.MILLISECONDS)) { + @Override + public void execute( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + + } + }; + + var manager2 = new BaseRequestManager(mock(ThreadPool.class), "id", val1, new RateLimitSettings(1, TimeUnit.DAYS)) { + @Override + public void execute( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + + } + }; + + assertThat(manager1.rateLimitGrouping(), not(manager2.rateLimitGrouping())); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java index 368745b310884..2b8b5f178b3de 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java @@ -79,7 +79,7 @@ public void shutdown() throws IOException, InterruptedException { public void testCreateSender_SendsRequestAndReceivesResponse() throws Exception { var senderFactory = createSenderFactory(clientManager, threadRef); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = createSender(senderFactory)) { sender.start(); String responseJson = """ @@ -135,11 +135,11 @@ public void testHttpRequestSender_Throws_WhenCallingSendBeforeStart() throws Exc mockClusterServiceEmpty() ); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = senderFactory.createSender()) { PlainActionFuture listener = new PlainActionFuture<>(); var thrownException = expectThrows( AssertionError.class, - () -> sender.send(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener) + () -> sender.send(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener) ); assertThat(thrownException.getMessage(), is("call start() before sending a request")); } @@ -155,17 +155,12 @@ public void testHttpRequestSender_Throws_WhenATimeoutOccurs() throws Exception { mockClusterServiceEmpty() ); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = senderFactory.createSender()) { assertThat(sender, instanceOf(HttpRequestSender.class)); sender.start(); PlainActionFuture listener = new PlainActionFuture<>(); - sender.send( - ExecutableRequestCreatorTests.createMock(), - new DocumentsOnlyInput(List.of()), - TimeValue.timeValueNanos(1), - listener - ); + sender.send(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), TimeValue.timeValueNanos(1), listener); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); @@ -186,16 +181,11 @@ public void testHttpRequestSenderWithTimeout_Throws_WhenATimeoutOccurs() throws mockClusterServiceEmpty() ); - try (var sender = senderFactory.createSender("test_service")) { + try (var sender = senderFactory.createSender()) { sender.start(); PlainActionFuture listener = new PlainActionFuture<>(); - sender.send( - ExecutableRequestCreatorTests.createMock(), - new DocumentsOnlyInput(List.of()), - TimeValue.timeValueNanos(1), - listener - ); + sender.send(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), TimeValue.timeValueNanos(1), listener); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); @@ -220,6 +210,7 @@ private static HttpRequestSender.Factory createSenderFactory(HttpClientManager c when(mockThreadPool.executor(anyString())).thenReturn(mockExecutorService); when(mockThreadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); when(mockThreadPool.schedule(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.ScheduledCancellable.class)); + when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); return new HttpRequestSender.Factory( ServiceComponentsTests.createWithEmptySettings(mockThreadPool), @@ -248,7 +239,7 @@ public static HttpRequestSender.Factory createSenderFactory( ); } - public static Sender createSenderWithSingleRequestManager(HttpRequestSender.Factory factory, String serviceName) { - return factory.createSender(serviceName); + public static Sender createSender(HttpRequestSender.Factory factory) { + return factory.createSender(); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettingsTests.java index c0c0bdd49f617..489b502c04110 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettingsTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import static org.elasticsearch.xpack.inference.Utils.mockClusterService; @@ -18,12 +19,23 @@ public static RequestExecutorServiceSettings createRequestExecutorServiceSetting } public static RequestExecutorServiceSettings createRequestExecutorServiceSettings(@Nullable Integer queueCapacity) { + return createRequestExecutorServiceSettings(queueCapacity, null); + } + + public static RequestExecutorServiceSettings createRequestExecutorServiceSettings( + @Nullable Integer queueCapacity, + @Nullable TimeValue staleDuration + ) { var settingsBuilder = Settings.builder(); if (queueCapacity != null) { settingsBuilder.put(RequestExecutorServiceSettings.TASK_QUEUE_CAPACITY_SETTING.getKey(), queueCapacity); } + if (staleDuration != null) { + settingsBuilder.put(RequestExecutorServiceSettings.RATE_LIMIT_GROUP_STALE_DURATION_SETTING.getKey(), staleDuration); + } + return createRequestExecutorServiceSettings(settingsBuilder.build()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java index ff88ba221d985..9a45e10007643 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java @@ -18,13 +18,19 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.common.RateLimiter; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; import org.junit.After; import org.junit.Before; import org.mockito.ArgumentCaptor; import java.io.IOException; +import java.time.Clock; +import java.time.Duration; +import java.time.Instant; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; @@ -42,10 +48,13 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.when; public class RequestExecutorServiceTests extends ESTestCase { @@ -70,7 +79,7 @@ public void testQueueSize_IsEmpty() { public void testQueueSize_IsOne() { var service = createRequestExecutorServiceWithMocks(); - service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>()); + service.execute(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>()); assertThat(service.queueSize(), is(1)); } @@ -92,7 +101,20 @@ public void testIsTerminated_IsTrue() throws InterruptedException { assertTrue(service.isTerminated()); } - public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { + public void testCallingStartTwice_ThrowsAssertionException() throws InterruptedException { + var latch = new CountDownLatch(1); + var service = createRequestExecutorService(latch, mock(RetryingHttpSender.class)); + + service.shutdown(); + service.start(); + latch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); + + assertTrue(service.isTerminated()); + var exception = expectThrows(AssertionError.class, service::start); + assertThat(exception.getMessage(), is("start() can only be called once")); + } + + public void testIsTerminated_AfterStopFromSeparateThread() { var waitToShutdown = new CountDownLatch(1); var waitToReturnFromSend = new CountDownLatch(1); @@ -127,41 +149,48 @@ public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { assertTrue(service.isTerminated()); } - public void testSend_AfterShutdown_Throws() { + public void testExecute_AfterShutdown_Throws() { var service = createRequestExecutorServiceWithMocks(); service.shutdown(); + var requestManager = RequestManagerTests.createMock("id"); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener); + service.execute(requestManager, new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), - is("Failed to enqueue task because the http executor service [test_service] has already shutdown") + is( + Strings.format( + "Failed to enqueue task for inference id [id] because the request service [%s] has already shutdown", + requestManager.rateLimitGrouping().hashCode() + ) + ) ); assertTrue(thrownException.isExecutorShutdown()); } - public void testSend_Throws_WhenQueueIsFull() { - var service = new RequestExecutorService( - "test_service", - threadPool, - null, - createRequestExecutorServiceSettings(1), - new SingleRequestManager(mock(RetryingHttpSender.class)) - ); + public void testExecute_Throws_WhenQueueIsFull() { + var service = new RequestExecutorService(threadPool, null, createRequestExecutorServiceSettings(1), mock(RetryingHttpSender.class)); - service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>()); + service.execute(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>()); + + var requestManager = RequestManagerTests.createMock("id"); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener); + service.execute(requestManager, new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), - is("Failed to execute task because the http executor service [test_service] queue is full") + is( + Strings.format( + "Failed to execute task for inference id [id] because the request service [%s] queue is full", + requestManager.rateLimitGrouping().hashCode() + ) + ) ); assertFalse(thrownException.isExecutorShutdown()); } @@ -203,16 +232,11 @@ public void testShutdown_AllowsMultipleCalls() { assertTrue(service.isShutdown()); } - public void testSend_CallsOnFailure_WhenRequestTimesOut() { + public void testExecute_CallsOnFailure_WhenRequestTimesOut() { var service = createRequestExecutorServiceWithMocks(); var listener = new PlainActionFuture(); - service.execute( - ExecutableRequestCreatorTests.createMock(), - new DocumentsOnlyInput(List.of()), - TimeValue.timeValueNanos(1), - listener - ); + service.execute(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), TimeValue.timeValueNanos(1), listener); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); @@ -222,7 +246,7 @@ public void testSend_CallsOnFailure_WhenRequestTimesOut() { ); } - public void testSend_PreservesThreadContext() throws InterruptedException, ExecutionException, TimeoutException { + public void testExecute_PreservesThreadContext() throws InterruptedException, ExecutionException, TimeoutException { var headerKey = "not empty"; var headerValue = "value"; @@ -270,7 +294,7 @@ public void onFailure(Exception e) { } }; - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); + service.execute(RequestManagerTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); Future executorTermination = submitShutdownRequest(waitToShutdown, waitToReturnFromSend, service); @@ -280,11 +304,12 @@ public void onFailure(Exception e) { finishedOnResponse.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); } - public void testSend_NotifiesTasksOfShutdown() { + public void testExecute_NotifiesTasksOfShutdown() { var service = createRequestExecutorServiceWithMocks(); + var requestManager = RequestManagerTests.createMock(mock(RequestSender.class), "id"); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener); + service.execute(requestManager, new DocumentsOnlyInput(List.of()), null, listener); service.shutdown(); service.start(); @@ -293,47 +318,62 @@ public void testSend_NotifiesTasksOfShutdown() { assertThat( thrownException.getMessage(), - is("Failed to send request, queue service [test_service] has shutdown prior to executing request") + is( + Strings.format( + "Failed to send request, request service [%s] for inference id [id] has shutdown prior to executing request", + requestManager.rateLimitGrouping().hashCode() + ) + ) ); assertTrue(thrownException.isExecutorShutdown()); assertTrue(service.isTerminated()); } - public void testQueueTake_DoesNotCauseServiceToTerminate_WhenItThrows() throws InterruptedException { + public void testQueuePoll_DoesNotCauseServiceToTerminate_WhenItThrows() throws InterruptedException { @SuppressWarnings("unchecked") BlockingQueue queue = mock(LinkedBlockingQueue.class); + var requestSender = mock(RetryingHttpSender.class); + var service = new RequestExecutorService( - getTestName(), threadPool, mockQueueCreator(queue), null, createRequestExecutorServiceSettingsEmpty(), - new SingleRequestManager(mock(RetryingHttpSender.class)) + requestSender, + Clock.systemUTC(), + RequestExecutorService.DEFAULT_SLEEPER, + RequestExecutorService.DEFAULT_RATE_LIMIT_CREATOR ); - when(queue.take()).thenThrow(new ElasticsearchException("failed")).thenAnswer(invocation -> { + PlainActionFuture listener = new PlainActionFuture<>(); + var requestManager = RequestManagerTests.createMock(requestSender, "id"); + service.execute(requestManager, new DocumentsOnlyInput(List.of()), null, listener); + + when(queue.poll()).thenThrow(new ElasticsearchException("failed")).thenAnswer(invocation -> { service.shutdown(); return null; }); service.start(); assertTrue(service.isTerminated()); - verify(queue, times(2)).take(); } - public void testQueueTake_ThrowingInterruptedException_TerminatesService() throws Exception { + public void testSleep_ThrowingInterruptedException_TerminatesService() throws Exception { @SuppressWarnings("unchecked") BlockingQueue queue = mock(LinkedBlockingQueue.class); - when(queue.take()).thenThrow(new InterruptedException("failed")); + var sleeper = mock(RequestExecutorService.Sleeper.class); + doThrow(new InterruptedException("failed")).when(sleeper).sleep(any()); var service = new RequestExecutorService( - getTestName(), threadPool, mockQueueCreator(queue), null, createRequestExecutorServiceSettingsEmpty(), - new SingleRequestManager(mock(RetryingHttpSender.class)) + mock(RetryingHttpSender.class), + Clock.systemUTC(), + sleeper, + RequestExecutorService.DEFAULT_RATE_LIMIT_CREATOR ); Future executorTermination = threadPool.generic().submit(() -> { @@ -347,66 +387,30 @@ public void testQueueTake_ThrowingInterruptedException_TerminatesService() throw executorTermination.get(TIMEOUT.millis(), TimeUnit.MILLISECONDS); assertTrue(service.isTerminated()); - verify(queue, times(1)).take(); - } - - public void testQueueTake_RejectsTask_WhenServiceShutsDown() throws Exception { - var mockTask = mock(RejectableTask.class); - @SuppressWarnings("unchecked") - BlockingQueue queue = mock(LinkedBlockingQueue.class); - - var service = new RequestExecutorService( - "test_service", - threadPool, - mockQueueCreator(queue), - null, - createRequestExecutorServiceSettingsEmpty(), - new SingleRequestManager(mock(RetryingHttpSender.class)) - ); - - doAnswer(invocation -> { - service.shutdown(); - return mockTask; - }).doReturn(new NoopTask()).when(queue).take(); - - service.start(); - - assertTrue(service.isTerminated()); - verify(queue, times(1)).take(); - - ArgumentCaptor argument = ArgumentCaptor.forClass(Exception.class); - verify(mockTask, times(1)).onRejection(argument.capture()); - assertThat(argument.getValue(), instanceOf(EsRejectedExecutionException.class)); - assertThat( - argument.getValue().getMessage(), - is("Failed to send request, queue service [test_service] has shutdown prior to executing request") - ); - - var rejectionException = (EsRejectedExecutionException) argument.getValue(); - assertTrue(rejectionException.isExecutorShutdown()); } public void testChangingCapacity_SetsCapacityToTwo() throws ExecutionException, InterruptedException, TimeoutException { var requestSender = mock(RetryingHttpSender.class); var settings = createRequestExecutorServiceSettings(1); - var service = new RequestExecutorService("test_service", threadPool, null, settings, new SingleRequestManager(requestSender)); + var service = new RequestExecutorService(threadPool, null, settings, requestSender); - service.execute( - ExecutableRequestCreatorTests.createMock(requestSender), - new DocumentsOnlyInput(List.of()), - null, - new PlainActionFuture<>() - ); + service.execute(RequestManagerTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>()); assertThat(service.queueSize(), is(1)); PlainActionFuture listener = new PlainActionFuture<>(); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); + var requestManager = RequestManagerTests.createMock(requestSender, "id"); + service.execute(requestManager, new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), - is("Failed to execute task because the http executor service [test_service] queue is full") + is( + Strings.format( + "Failed to execute task for inference id [id] because the request service [%s] queue is full", + requestManager.rateLimitGrouping().hashCode() + ) + ) ); settings.setQueueCapacity(2); @@ -426,7 +430,7 @@ public void testChangingCapacity_SetsCapacityToTwo() throws ExecutionException, executorTermination.get(TIMEOUT.millis(), TimeUnit.MILLISECONDS); assertTrue(service.isTerminated()); - assertThat(service.remainingQueueCapacity(), is(2)); + assertThat(service.remainingQueueCapacity(requestManager), is(2)); } public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull() throws ExecutionException, InterruptedException, @@ -434,23 +438,24 @@ public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull( var requestSender = mock(RetryingHttpSender.class); var settings = createRequestExecutorServiceSettings(3); - var service = new RequestExecutorService("test_service", threadPool, null, settings, new SingleRequestManager(requestSender)); + var service = new RequestExecutorService(threadPool, null, settings, requestSender); service.execute( - ExecutableRequestCreatorTests.createMock(requestSender), + RequestManagerTests.createMock(requestSender, "id"), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>() ); service.execute( - ExecutableRequestCreatorTests.createMock(requestSender), + RequestManagerTests.createMock(requestSender, "id"), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>() ); PlainActionFuture listener = new PlainActionFuture<>(); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); + var requestManager = RequestManagerTests.createMock(requestSender, "id"); + service.execute(requestManager, new DocumentsOnlyInput(List.of()), null, listener); assertThat(service.queueSize(), is(3)); settings.setQueueCapacity(1); @@ -470,7 +475,7 @@ public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull( executorTermination.get(TIMEOUT.millis(), TimeUnit.MILLISECONDS); assertTrue(service.isTerminated()); - assertThat(service.remainingQueueCapacity(), is(1)); + assertThat(service.remainingQueueCapacity(requestManager), is(1)); assertThat(service.queueSize(), is(0)); var thrownException = expectThrows( @@ -479,7 +484,12 @@ public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull( ); assertThat( thrownException.getMessage(), - is("Failed to send request, queue service [test_service] has shutdown prior to executing request") + is( + Strings.format( + "Failed to send request, request service [%s] for inference id [id] has shutdown prior to executing request", + requestManager.rateLimitGrouping().hashCode() + ) + ) ); assertTrue(thrownException.isExecutorShutdown()); } @@ -489,23 +499,24 @@ public void testChangingCapacity_ToZero_SetsQueueCapacityToUnbounded() throws IO var requestSender = mock(RetryingHttpSender.class); var settings = createRequestExecutorServiceSettings(1); - var service = new RequestExecutorService("test_service", threadPool, null, settings, new SingleRequestManager(requestSender)); + var service = new RequestExecutorService(threadPool, null, settings, requestSender); + var requestManager = RequestManagerTests.createMock(requestSender); - service.execute( - ExecutableRequestCreatorTests.createMock(requestSender), - new DocumentsOnlyInput(List.of()), - null, - new PlainActionFuture<>() - ); + service.execute(requestManager, new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>()); assertThat(service.queueSize(), is(1)); PlainActionFuture listener = new PlainActionFuture<>(); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); + service.execute(RequestManagerTests.createMock(requestSender, "id"), new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), - is("Failed to execute task because the http executor service [test_service] queue is full") + is( + Strings.format( + "Failed to execute task for inference id [id] because the request service [%s] queue is full", + requestManager.rateLimitGrouping().hashCode() + ) + ) ); settings.setQueueCapacity(0); @@ -525,7 +536,133 @@ public void testChangingCapacity_ToZero_SetsQueueCapacityToUnbounded() throws IO executorTermination.get(TIMEOUT.millis(), TimeUnit.MILLISECONDS); assertTrue(service.isTerminated()); - assertThat(service.remainingQueueCapacity(), is(Integer.MAX_VALUE)); + assertThat(service.remainingQueueCapacity(requestManager), is(Integer.MAX_VALUE)); + } + + public void testDoesNotExecuteTask_WhenCannotReserveTokens() { + var mockRateLimiter = mock(RateLimiter.class); + RequestExecutorService.RateLimiterCreator rateLimiterCreator = (a, b, c) -> mockRateLimiter; + + var requestSender = mock(RetryingHttpSender.class); + var settings = createRequestExecutorServiceSettings(1); + var service = new RequestExecutorService( + threadPool, + RequestExecutorService.DEFAULT_QUEUE_CREATOR, + null, + settings, + requestSender, + Clock.systemUTC(), + RequestExecutorService.DEFAULT_SLEEPER, + rateLimiterCreator + ); + var requestManager = RequestManagerTests.createMock(requestSender); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.execute(requestManager, new DocumentsOnlyInput(List.of()), null, listener); + + doAnswer(invocation -> { + service.shutdown(); + return TimeValue.timeValueDays(1); + }).when(mockRateLimiter).timeToReserve(anyInt()); + + service.start(); + + verifyNoInteractions(requestSender); + } + + public void testDoesNotExecuteTask_WhenCannotReserveTokens_AndThenCanReserve_AndExecutesTask() { + var mockRateLimiter = mock(RateLimiter.class); + when(mockRateLimiter.reserve(anyInt())).thenReturn(TimeValue.timeValueDays(0)); + + RequestExecutorService.RateLimiterCreator rateLimiterCreator = (a, b, c) -> mockRateLimiter; + + var requestSender = mock(RetryingHttpSender.class); + var settings = createRequestExecutorServiceSettings(1); + var service = new RequestExecutorService( + threadPool, + RequestExecutorService.DEFAULT_QUEUE_CREATOR, + null, + settings, + requestSender, + Clock.systemUTC(), + RequestExecutorService.DEFAULT_SLEEPER, + rateLimiterCreator + ); + var requestManager = RequestManagerTests.createMock(requestSender); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.execute(requestManager, new DocumentsOnlyInput(List.of()), null, listener); + + when(mockRateLimiter.timeToReserve(anyInt())).thenReturn(TimeValue.timeValueDays(1)).thenReturn(TimeValue.timeValueDays(0)); + + doAnswer(invocation -> { + service.shutdown(); + return Void.TYPE; + }).when(requestSender).send(any(), any(), any(), any(), any(), any()); + + service.start(); + + verify(requestSender, times(1)).send(any(), any(), any(), any(), any(), any()); + } + + public void testRemovesRateLimitGroup_AfterStaleDuration() { + var now = Instant.now(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var requestSender = mock(RetryingHttpSender.class); + var settings = createRequestExecutorServiceSettings(2, TimeValue.timeValueDays(1)); + var service = new RequestExecutorService( + threadPool, + RequestExecutorService.DEFAULT_QUEUE_CREATOR, + null, + settings, + requestSender, + clock, + RequestExecutorService.DEFAULT_SLEEPER, + RequestExecutorService.DEFAULT_RATE_LIMIT_CREATOR + ); + var requestManager = RequestManagerTests.createMock(requestSender, "id1"); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.execute(requestManager, new DocumentsOnlyInput(List.of()), null, listener); + + assertThat(service.numberOfRateLimitGroups(), is(1)); + // the time is moved to after the stale duration, so now we should remove this grouping + when(clock.instant()).thenReturn(now.plus(Duration.ofDays(2))); + service.removeStaleGroupings(); + assertThat(service.numberOfRateLimitGroups(), is(0)); + + var requestManager2 = RequestManagerTests.createMock(requestSender, "id2"); + service.execute(requestManager2, new DocumentsOnlyInput(List.of()), null, listener); + + assertThat(service.numberOfRateLimitGroups(), is(1)); + } + + public void testStartsCleanupThread() { + var mockThreadPool = mock(ThreadPool.class); + + when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); + + var requestSender = mock(RetryingHttpSender.class); + var settings = createRequestExecutorServiceSettings(2, TimeValue.timeValueDays(1)); + var service = new RequestExecutorService( + mockThreadPool, + RequestExecutorService.DEFAULT_QUEUE_CREATOR, + null, + settings, + requestSender, + Clock.systemUTC(), + RequestExecutorService.DEFAULT_SLEEPER, + RequestExecutorService.DEFAULT_RATE_LIMIT_CREATOR + ); + + service.shutdown(); + service.start(); + + ArgumentCaptor argument = ArgumentCaptor.forClass(TimeValue.class); + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), argument.capture(), any()); + assertThat(argument.getValue(), is(TimeValue.timeValueDays(1))); } private Future submitShutdownRequest( @@ -552,12 +689,6 @@ private RequestExecutorService createRequestExecutorServiceWithMocks() { } private RequestExecutorService createRequestExecutorService(@Nullable CountDownLatch startupLatch, RetryingHttpSender requestSender) { - return new RequestExecutorService( - "test_service", - threadPool, - startupLatch, - createRequestExecutorServiceSettingsEmpty(), - new SingleRequestManager(requestSender) - ); + return new RequestExecutorService(threadPool, startupLatch, createRequestExecutorServiceSettingsEmpty(), requestSender); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManagerTests.java similarity index 56% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManagerTests.java index 31297ed432ef5..291de740aca34 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManagerTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; import org.elasticsearch.xpack.inference.external.request.RequestTests; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyList; @@ -21,34 +22,47 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class ExecutableRequestCreatorTests { +public class RequestManagerTests { public static RequestManager createMock() { - var mockCreator = mock(RequestManager.class); - when(mockCreator.create(any(), anyList(), any(), any(), any(), any())).thenReturn(() -> {}); + return createMock(mock(RequestSender.class)); + } - return mockCreator; + public static RequestManager createMock(String inferenceEntityId) { + return createMock(mock(RequestSender.class), inferenceEntityId); } public static RequestManager createMock(RequestSender requestSender) { - return createMock(requestSender, "id"); + return createMock(requestSender, "id", new RateLimitSettings(1)); + } + + public static RequestManager createMock(RequestSender requestSender, String inferenceEntityId) { + return createMock(requestSender, inferenceEntityId, new RateLimitSettings(1)); } - public static RequestManager createMock(RequestSender requestSender, String modelId) { - var mockCreator = mock(RequestManager.class); + public static RequestManager createMock(RequestSender requestSender, String inferenceEntityId, RateLimitSettings settings) { + var mockManager = mock(RequestManager.class); doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[5]; - return (Runnable) () -> requestSender.send( + ActionListener listener = (ActionListener) invocation.getArguments()[4]; + requestSender.send( mock(Logger.class), - RequestTests.mockRequest(modelId), + RequestTests.mockRequest(inferenceEntityId), HttpClientContext.create(), () -> false, mock(ResponseHandler.class), listener ); - }).when(mockCreator).create(any(), anyList(), any(), any(), any(), any()); - return mockCreator; + return Void.TYPE; + }).when(mockManager).execute(any(), anyList(), any(), any(), any()); + + // just return something consistent so the hashing works + when(mockManager.rateLimitGrouping()).thenReturn(inferenceEntityId); + + when(mockManager.rateLimitSettings()).thenReturn(settings); + when(mockManager.inferenceEntityId()).thenReturn(inferenceEntityId); + + return mockManager; } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManagerTests.java deleted file mode 100644 index 55965bc2354d3..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManagerTests.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.http.sender; - -import org.apache.http.client.protocol.HttpClientContext; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -public class SingleRequestManagerTests extends ESTestCase { - public void testExecute_DoesNotCallRequestCreatorCreate_WhenInputIsNull() { - var requestCreator = mock(RequestManager.class); - var request = mock(InferenceRequest.class); - when(request.getRequestCreator()).thenReturn(requestCreator); - - new SingleRequestManager(mock(RetryingHttpSender.class)).execute(mock(InferenceRequest.class), HttpClientContext.create()); - verifyNoInteractions(requestCreator); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java index 897c648eb942f..e1d786819a536 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.external.response; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentEOFException; import org.elasticsearch.xcontent.XContentParser; @@ -16,6 +17,7 @@ import java.util.Locale; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; public class XContentUtilsTests extends ESTestCase { @@ -233,4 +235,50 @@ public void testConsumeUntilObjectEnd_InArray() throws IOException { assertNull(parser.nextToken()); // fully parsed } } + + public void testParseFloat_SingleFloatValue() throws IOException { + var json = """ + { + "key": 1.23 + } + """; + var errorFormat = "Error: %s"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + XContentUtils.positionParserAtTokenAfterField(parser, "key", errorFormat); + Float value = XContentUtils.parseFloat(parser); + + assertThat(value, equalTo(1.23F)); + } + } + + public void testParseFloat_SingleIntValue() throws IOException { + var json = """ + { + "key": 1 + } + """; + var errorFormat = "Error: %s"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + XContentUtils.positionParserAtTokenAfterField(parser, "key", errorFormat); + Float value = XContentUtils.parseFloat(parser); + + assertThat(value, equalTo(1.0F)); + } + } + + public void testParseFloat_ThrowsIfNotANumber() throws IOException { + var json = """ + { + "key": "value" + } + """; + var errorFormat = "Error: %s"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + XContentUtils.positionParserAtTokenAfterField(parser, "key", errorFormat); + expectThrows(ParsingException.class, () -> XContentUtils.parseFloat(parser)); + } + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioEmbeddingsResponseEntityTests.java index 41768a6814f36..c2f93554c6b20 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureaistudio/AzureAiStudioEmbeddingsResponseEntityTests.java @@ -9,7 +9,7 @@ import org.apache.http.HttpResponse; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; @@ -50,11 +50,14 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { var entity = new AzureAiStudioEmbeddingsResponseEntity(); - var parsedResults = (TextEmbeddingResults) entity.apply( + var parsedResults = (InferenceTextEmbeddingFloatResults) entity.apply( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(TextEmbeddingResults.Embedding.of(List.of(0.014539449F, -0.015288644F))))); + assertThat( + parsedResults.embeddings(), + is(List.of(InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(List.of(0.014539449F, -0.015288644F)))) + ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java index d809635aa4f38..691064b947e23 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntityTests.java @@ -10,8 +10,8 @@ import org.apache.http.HttpResponse; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; import org.hamcrest.MatcherAssert; @@ -55,10 +55,10 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - MatcherAssert.assertThat(parsedResults, instanceOf(TextEmbeddingResults.class)); + MatcherAssert.assertThat(parsedResults, instanceOf(InferenceTextEmbeddingFloatResults.class)); MatcherAssert.assertThat( - ((TextEmbeddingResults) parsedResults).embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { -0.0018434525F, 0.01777649F }))) + ((InferenceTextEmbeddingFloatResults) parsedResults).embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.0018434525F, 0.01777649F }))) ); } @@ -89,14 +89,14 @@ public void testFromResponse_CreatesResultsForASingleItem_ObjectFormat() throws } """; - TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = (InferenceTextEmbeddingFloatResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); MatcherAssert.assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { -0.0018434525F, 0.01777649F }))) + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.0018434525F, 0.01777649F }))) ); } @@ -133,14 +133,14 @@ public void testFromResponse_UsesTheFirstValidEmbeddingsEntry() throws IOExcepti } """; - TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = (InferenceTextEmbeddingFloatResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); MatcherAssert.assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { -0.0018434525F, 0.01777649F }))) + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.0018434525F, 0.01777649F }))) ); } @@ -177,14 +177,14 @@ public void testFromResponse_UsesTheFirstValidEmbeddingsEntryInt8_WithInvalidFir } """; - TextEmbeddingByteResults parsedResults = (TextEmbeddingByteResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingByteResults parsedResults = (InferenceTextEmbeddingByteResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); MatcherAssert.assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) -1, (byte) 0 }))) + is(List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) -1, (byte) 0 }))) ); } @@ -215,14 +215,14 @@ public void testFromResponse_ParsesBytes() throws IOException { } """; - TextEmbeddingByteResults parsedResults = (TextEmbeddingByteResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingByteResults parsedResults = (InferenceTextEmbeddingByteResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); MatcherAssert.assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) -1, (byte) 0 }))) + is(List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) -1, (byte) 0 }))) ); } @@ -255,7 +255,7 @@ public void testFromResponse_CreatesResultsForMultipleItems() throws IOException } """; - TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = (InferenceTextEmbeddingFloatResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -264,8 +264,8 @@ public void testFromResponse_CreatesResultsForMultipleItems() throws IOException parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { -0.0018434525F, 0.01777649F }), - new TextEmbeddingResults.Embedding(new float[] { -0.123F, 0.123F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.0018434525F, 0.01777649F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.123F, 0.123F }) ) ) ); @@ -302,7 +302,7 @@ public void testFromResponse_CreatesResultsForMultipleItems_ObjectFormat() throw } """; - TextEmbeddingResults parsedResults = (TextEmbeddingResults) CohereEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = (InferenceTextEmbeddingFloatResults) CohereEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -311,8 +311,8 @@ public void testFromResponse_CreatesResultsForMultipleItems_ObjectFormat() throw parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { -0.0018434525F, 0.01777649F }), - new TextEmbeddingResults.Embedding(new float[] { -0.123F, 0.123F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.0018434525F, 0.01777649F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.123F, 0.123F }) ) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntityTests.java index 5d5096d0b1b51..170395e8af919 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googleaistudio/GoogleAiStudioEmbeddingsResponseEntityTests.java @@ -9,7 +9,7 @@ import org.apache.http.HttpResponse; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; @@ -36,12 +36,15 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { } """; - TextEmbeddingResults parsedResults = GoogleAiStudioEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = GoogleAiStudioEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(TextEmbeddingResults.Embedding.of(List.of(-0.00606332F, 0.058092743F))))); + assertThat( + parsedResults.embeddings(), + is(List.of(InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(List.of(-0.00606332F, 0.058092743F)))) + ); } public void testFromResponse_CreatesResultsForMultipleItems() throws IOException { @@ -64,7 +67,7 @@ public void testFromResponse_CreatesResultsForMultipleItems() throws IOException } """; - TextEmbeddingResults parsedResults = GoogleAiStudioEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = GoogleAiStudioEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -73,8 +76,8 @@ public void testFromResponse_CreatesResultsForMultipleItems() throws IOException parsedResults.embeddings(), is( List.of( - TextEmbeddingResults.Embedding.of(List.of(-0.00606332F, 0.058092743F)), - TextEmbeddingResults.Embedding.of(List.of(0.030681048F, 0.01714732F)) + InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(List.of(-0.00606332F, 0.058092743F)), + InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(List.of(0.030681048F, 0.01714732F)) ) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java index 238dab5929139..6f06a32f19a68 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java @@ -10,7 +10,7 @@ import org.apache.http.HttpResponse; import org.elasticsearch.common.ParsingException; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; @@ -32,14 +32,14 @@ public void testFromResponse_CreatesResultsForASingleItem_ArrayFormat() throws I ] """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }))) + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }))) ); } @@ -55,14 +55,14 @@ public void testFromResponse_CreatesResultsForASingleItem_ObjectFormat() throws } """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }))) + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }))) ); } @@ -80,7 +80,7 @@ public void testFromResponse_CreatesResultsForMultipleItems_ArrayFormat() throws ] """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -89,8 +89,8 @@ public void testFromResponse_CreatesResultsForMultipleItems_ArrayFormat() throws parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }), - new TextEmbeddingResults.Embedding(new float[] { 0.0123F, -0.0123F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.0123F, -0.0123F }) ) ) ); @@ -112,7 +112,7 @@ public void testFromResponse_CreatesResultsForMultipleItems_ObjectFormat() throw } """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -121,8 +121,8 @@ public void testFromResponse_CreatesResultsForMultipleItems_ObjectFormat() throw parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }), - new TextEmbeddingResults.Embedding(new float[] { 0.0123F, -0.0123F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.0123F, -0.0123F }) ) ) ); @@ -255,12 +255,15 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsInt_ArrayFormat() throw ] """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 1.0F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 1.0F }))) + ); } public void testFromResponse_SucceedsWhenEmbeddingValueIsInt_ObjectFormat() throws IOException { @@ -274,12 +277,15 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsInt_ObjectFormat() thro } """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 1.0F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 1.0F }))) + ); } public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ArrayFormat() throws IOException { @@ -291,12 +297,15 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ArrayFormat() thro ] """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 4.0294965E10F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 4.0294965E10F }))) + ); } public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ObjectFormat() throws IOException { @@ -310,12 +319,15 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ObjectFormat() thr } """; - TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 4.0294965E10F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 4.0294965E10F }))) + ); } public void testFromResponse_FailsWhenEmbeddingValueIsAnObject_ObjectFormat() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java index 6c38092f509a7..8f5bd95126fb7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java @@ -10,7 +10,7 @@ import org.apache.http.HttpResponse; import org.elasticsearch.common.ParsingException; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.request.Request; @@ -44,14 +44,14 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { } """; - TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); assertThat( parsedResults.embeddings(), - is(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }))) + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }))) ); } @@ -85,7 +85,7 @@ public void testFromResponse_CreatesResultsForMultipleItems() throws IOException } """; - TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -94,8 +94,8 @@ public void testFromResponse_CreatesResultsForMultipleItems() throws IOException parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { 0.014539449F, -0.015288644F }), - new TextEmbeddingResults.Embedding(new float[] { 0.0123F, -0.0123F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.014539449F, -0.015288644F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.0123F, -0.0123F }) ) ) ); @@ -259,12 +259,15 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsInt() throws IOExceptio } """; - TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 1.0F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 1.0F }))) + ); } public void testFromResponse_SucceedsWhenEmbeddingValueIsLong() throws IOException { @@ -288,12 +291,15 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsLong() throws IOExcepti } """; - TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(new float[] { 4.0294965E10F })))); + assertThat( + parsedResults.embeddings(), + is(List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 4.0294965E10F }))) + ); } public void testFromResponse_FailsWhenEmbeddingValueIsAnObject() { @@ -373,7 +379,7 @@ public void testFieldsInDifferentOrderServer() throws IOException { } }"""; - TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + InferenceTextEmbeddingFloatResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( mock(Request.class), new HttpResult(mock(HttpResponse.class), response.getBytes(StandardCharsets.UTF_8)) ); @@ -382,9 +388,9 @@ public void testFieldsInDifferentOrderServer() throws IOException { parsedResults.embeddings(), is( List.of( - new TextEmbeddingResults.Embedding(new float[] { -0.9F, 0.5F, 0.3F }), - new TextEmbeddingResults.Embedding(new float[] { 0.1F, 0.5F }), - new TextEmbeddingResults.Embedding(new float[] { 0.5F, 0.5F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { -0.9F, 0.5F, 0.3F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.1F, 0.5F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.5F, 0.5F }) ) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java index efc81606094bd..51fa39b595a8e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java @@ -18,10 +18,11 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.search.WeightedToken; +import org.elasticsearch.xpack.core.utils.FloatConversionUtils; import org.elasticsearch.xpack.inference.model.TestModel; import java.io.IOException; @@ -64,7 +65,7 @@ protected void assertEqualInstances(SemanticTextField expectedInstance, Semantic modelSettings.dimensions(), newInstance.contentType() ); - assertArrayEquals(expectedVector, newVector, 0f); + assertArrayEquals(expectedVector, newVector, 0.0000001f); } case SPARSE_EMBEDDING -> { List expectedTokens = parseWeightedTokens( @@ -85,7 +86,12 @@ protected void assertEqualInstances(SemanticTextField expectedInstance, Semantic @Override protected SemanticTextField createTestInstance() { List rawValues = randomList(1, 5, () -> randomAlphaOfLengthBetween(10, 20)); - return randomSemanticText(NAME, TestModel.createRandomInstance(), rawValues, randomFrom(XContentType.values())); + try { // try catch required for override + return randomSemanticText(NAME, TestModel.createRandomInstance(), rawValues, randomFrom(XContentType.values())); + } catch (IOException e) { + fail("Failed to create random SemanticTextField instance"); + } + return null; } @Override @@ -132,33 +138,37 @@ public void testModelSettingsValidation() { assertThat(ex.getMessage(), containsString("required [similarity] field is missing")); } - public static ChunkedTextEmbeddingResults randomTextEmbeddings(Model model, List inputs) { - List chunks = new ArrayList<>(); + public static InferenceChunkedTextEmbeddingFloatResults randomInferenceChunkedTextEmbeddingFloatResults( + Model model, + List inputs + ) throws IOException { + List chunks = new ArrayList<>(); for (String input : inputs) { - double[] values = new double[model.getServiceSettings().dimensions()]; + float[] values = new float[model.getServiceSettings().dimensions()]; for (int j = 0; j < values.length; j++) { - values[j] = randomDouble(); + values[j] = (float) randomDouble(); } - chunks.add(new org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk(input, values)); + chunks.add(new InferenceChunkedTextEmbeddingFloatResults.InferenceFloatEmbeddingChunk(input, values)); } - return new ChunkedTextEmbeddingResults(chunks); + return new InferenceChunkedTextEmbeddingFloatResults(chunks); } - public static ChunkedSparseEmbeddingResults randomSparseEmbeddings(List inputs) { - List chunks = new ArrayList<>(); + public static InferenceChunkedSparseEmbeddingResults randomSparseEmbeddings(List inputs) { + List chunks = new ArrayList<>(); for (String input : inputs) { var tokens = new ArrayList(); for (var token : input.split("\\s+")) { tokens.add(new WeightedToken(token, randomFloat())); } - chunks.add(new ChunkedTextExpansionResults.ChunkedResult(input, tokens)); + chunks.add(new InferenceChunkedTextExpansionResults.ChunkedResult(input, tokens)); } - return new ChunkedSparseEmbeddingResults(chunks); + return new InferenceChunkedSparseEmbeddingResults(chunks); } - public static SemanticTextField randomSemanticText(String fieldName, Model model, List inputs, XContentType contentType) { + public static SemanticTextField randomSemanticText(String fieldName, Model model, List inputs, XContentType contentType) + throws IOException { ChunkedInferenceServiceResults results = switch (model.getTaskType()) { - case TEXT_EMBEDDING -> randomTextEmbeddings(model, inputs); + case TEXT_EMBEDDING -> randomInferenceChunkedTextEmbeddingFloatResults(model, inputs); case SPARSE_EMBEDDING -> randomSparseEmbeddings(inputs); default -> throw new AssertionError("invalid task type: " + model.getTaskType().name()); }; @@ -174,19 +184,18 @@ public static SemanticTextField randomSemanticText(String fieldName, Model model ); } - public static ChunkedInferenceServiceResults toChunkedResult(SemanticTextField field) { + public static ChunkedInferenceServiceResults toChunkedResult(SemanticTextField field) throws IOException { switch (field.inference().modelSettings().taskType()) { case SPARSE_EMBEDDING -> { - List chunks = new ArrayList<>(); + List chunks = new ArrayList<>(); for (var chunk : field.inference().chunks()) { var tokens = parseWeightedTokens(chunk.rawEmbeddings(), field.contentType()); - chunks.add(new ChunkedTextExpansionResults.ChunkedResult(chunk.text(), tokens)); + chunks.add(new InferenceChunkedTextExpansionResults.ChunkedResult(chunk.text(), tokens)); } - return new ChunkedSparseEmbeddingResults(chunks); + return new InferenceChunkedSparseEmbeddingResults(chunks); } case TEXT_EMBEDDING -> { - List chunks = - new ArrayList<>(); + List chunks = new ArrayList<>(); for (var chunk : field.inference().chunks()) { double[] values = parseDenseVector( chunk.rawEmbeddings(), @@ -194,13 +203,13 @@ public static ChunkedInferenceServiceResults toChunkedResult(SemanticTextField f field.contentType() ); chunks.add( - new org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk( + new InferenceChunkedTextEmbeddingFloatResults.InferenceFloatEmbeddingChunk( chunk.text(), - values + FloatConversionUtils.floatArrayOf(values) ) ); } - return new ChunkedTextEmbeddingResults(chunks); + return new InferenceChunkedTextEmbeddingFloatResults(chunks); } default -> throw new AssertionError("Invalid task_type: " + field.inference().modelSettings().taskType().name()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index bc9408bc59dde..07713952e36c3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -44,9 +44,10 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.search.WeightedToken; import org.elasticsearch.xpack.inference.InferencePlugin; @@ -258,11 +259,9 @@ private InferenceAction.Response generateSparseEmbeddingInferenceResponse(String private InferenceAction.Response generateTextEmbeddingInferenceResponse() { double[] inference = new double[TEXT_EMBEDDING_DIMENSION_COUNT]; Arrays.fill(inference, 1.0); - TextEmbeddingResults textEmbeddingResults = new TextEmbeddingResults(DEFAULT_RESULTS_FIELD, inference, false); + MlTextEmbeddingResults textEmbeddingResults = new MlTextEmbeddingResults(DEFAULT_RESULTS_FIELD, inference, false); - return new InferenceAction.Response( - org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults.of(List.of(textEmbeddingResults)) - ); + return new InferenceAction.Response(InferenceTextEmbeddingFloatResults.of(List.of(textEmbeddingResults))); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionTests.java index 8365ebdfad786..48e5d54a62733 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestInferenceActionTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.junit.Before; import java.util.HashMap; @@ -76,7 +76,9 @@ public void testUses3SecondTimeoutFromParams() { private static InferenceAction.Response createResponse() { return new InferenceAction.Response( - new TextEmbeddingByteResults(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) -1 }))) + new InferenceTextEmbeddingByteResults( + List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) -1 })) + ) ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java deleted file mode 100644 index beb75fbfa36a6..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.results; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; - -import java.io.IOException; -import java.util.ArrayList; - -public class ChunkedTextEmbeddingFloatResultsTests extends AbstractWireSerializingTestCase { - - public static ChunkedTextEmbeddingFloatResults createRandomResults() { - int numChunks = randomIntBetween(1, 5); - var chunks = new ArrayList(numChunks); - - for (int i = 0; i < numChunks; i++) { - chunks.add(createRandomChunk()); - } - - return new ChunkedTextEmbeddingFloatResults(chunks); - } - - private static ChunkedTextEmbeddingFloatResults.EmbeddingChunk createRandomChunk() { - int columns = randomIntBetween(1, 10); - float[] floats = new float[columns]; - for (int i = 0; i < columns; i++) { - floats[i] = randomFloat(); - } - - return new ChunkedTextEmbeddingFloatResults.EmbeddingChunk(randomAlphaOfLength(6), floats); - } - - @Override - protected Writeable.Reader instanceReader() { - return ChunkedTextEmbeddingFloatResults::new; - } - - @Override - protected ChunkedTextEmbeddingFloatResults createTestInstance() { - return createRandomResults(); - } - - @Override - protected ChunkedTextEmbeddingFloatResults mutateInstance(ChunkedTextEmbeddingFloatResults instance) throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java deleted file mode 100644 index 1fc0282b5d96d..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.results; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; - -public class ChunkedTextEmbeddingResultsTests extends AbstractWireSerializingTestCase { - - public static ChunkedTextEmbeddingResults createRandomResults() { - var chunks = new ArrayList(); - int columns = randomIntBetween(5, 10); - int numChunks = randomIntBetween(1, 5); - - for (int i = 0; i < numChunks; i++) { - double[] arr = new double[columns]; - for (int j = 0; j < columns; j++) { - arr[j] = randomDouble(); - } - chunks.add( - new org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk( - randomAlphaOfLength(6), - arr - ) - ); - } - - return new ChunkedTextEmbeddingResults(chunks); - } - - /** - * Similar to {@link ChunkedTextEmbeddingResults#asMap()} but it converts the embeddings double array into a list of doubles to - * make testing equality easier. - */ - public static Map asMapWithListsInsteadOfArrays(ChunkedTextEmbeddingResults result) { - return Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, - result.getChunks() - .stream() - .map(org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResultsTests::asMapWithListsInsteadOfArrays) - .collect(Collectors.toList()) - ); - } - - public void testToXContent_CreatesTheRightJsonForASingleChunk() { - var entity = new ChunkedTextEmbeddingResults( - List.of( - new org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults.EmbeddingChunk( - "text", - new double[] { 0.1, 0.2 } - ) - ) - ); - - assertThat( - asMapWithListsInsteadOfArrays(entity), - is( - Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, - List.of(Map.of(ChunkedNlpInferenceResults.TEXT, "text", ChunkedNlpInferenceResults.INFERENCE, List.of(0.1, 0.2))) - ) - ) - ); - String xContentResult = Strings.toString(entity, true, true); - assertThat(xContentResult, is(""" - { - "text_embedding_chunk" : [ - { - "text" : "text", - "inference" : [ - 0.1, - 0.2 - ] - } - ] - }""")); - } - - public void testToXContent_CreatesTheRightJsonForASingleChunk_FromTextEmbeddingResults() { - var entity = ChunkedTextEmbeddingResults.of( - List.of("text"), - new TextEmbeddingResults(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.1f, 0.2f }))) - ); - - assertThat(entity.size(), is(1)); - - var firstEntry = entity.get(0); - assertThat(firstEntry, instanceOf(ChunkedTextEmbeddingResults.class)); - assertThat( - asMapWithListsInsteadOfArrays((ChunkedTextEmbeddingResults) firstEntry), - is( - Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, - List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "text", - ChunkedNlpInferenceResults.INFERENCE, - List.of((double) 0.1f, (double) 0.2f) - ) - ) - ) - ) - ); - String xContentResult = Strings.toString(firstEntry, true, true); - assertThat(xContentResult, is(Strings.format(""" - { - "text_embedding_chunk" : [ - { - "text" : "text", - "inference" : [ - %s, - %s - ] - } - ] - }""", (double) 0.1f, (double) 0.2f))); - } - - public void testToXContent_ThrowsWhenInputSizeIsDifferentThanEmbeddings() { - var exception = expectThrows( - IllegalArgumentException.class, - () -> ChunkedTextEmbeddingResults.of( - List.of("text", "text2"), - new TextEmbeddingResults(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.1f, 0.2f }))) - ) - ); - - assertThat(exception.getMessage(), is("The number of inputs [2] does not match the embeddings [1]")); - } - - @Override - protected Writeable.Reader instanceReader() { - return ChunkedTextEmbeddingResults::new; - } - - @Override - protected ChunkedTextEmbeddingResults createTestInstance() { - return createRandomResults(); - } - - @Override - protected ChunkedTextEmbeddingResults mutateInstance(ChunkedTextEmbeddingResults instance) throws IOException { - return randomValueOtherThan(instance, ChunkedTextEmbeddingResultsTests::createRandomResults); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedSparseEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedSparseEmbeddingResultsTests.java similarity index 68% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedSparseEmbeddingResultsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedSparseEmbeddingResultsTests.java index 073a662c1e8f2..9a2afdade296a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedSparseEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedSparseEmbeddingResultsTests.java @@ -10,10 +10,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.search.WeightedToken; import java.io.IOException; @@ -23,10 +23,10 @@ import static org.hamcrest.Matchers.is; -public class ChunkedSparseEmbeddingResultsTests extends AbstractWireSerializingTestCase { +public class InferenceChunkedSparseEmbeddingResultsTests extends AbstractWireSerializingTestCase { - public static ChunkedSparseEmbeddingResults createRandomResults() { - var chunks = new ArrayList(); + public static InferenceChunkedSparseEmbeddingResults createRandomResults() { + var chunks = new ArrayList(); int numChunks = randomIntBetween(1, 5); for (int i = 0; i < numChunks; i++) { @@ -35,22 +35,22 @@ public static ChunkedSparseEmbeddingResults createRandomResults() { for (int j = 0; j < numTokens; j++) { tokenWeights.add(new WeightedToken(Integer.toString(j), (float) randomDoubleBetween(0.0, 5.0, false))); } - chunks.add(new ChunkedTextExpansionResults.ChunkedResult(randomAlphaOfLength(6), tokenWeights)); + chunks.add(new InferenceChunkedTextExpansionResults.ChunkedResult(randomAlphaOfLength(6), tokenWeights)); } - return new ChunkedSparseEmbeddingResults(chunks); + return new InferenceChunkedSparseEmbeddingResults(chunks); } public void testToXContent_CreatesTheRightJsonForASingleChunk() { - var entity = new ChunkedSparseEmbeddingResults( - List.of(new ChunkedTextExpansionResults.ChunkedResult("text", List.of(new WeightedToken("token", 0.1f)))) + var entity = new InferenceChunkedSparseEmbeddingResults( + List.of(new InferenceChunkedTextExpansionResults.ChunkedResult("text", List.of(new WeightedToken("token", 0.1f)))) ); assertThat( entity.asMap(), is( Map.of( - ChunkedSparseEmbeddingResults.FIELD_NAME, + InferenceChunkedSparseEmbeddingResults.FIELD_NAME, List.of(Map.of(ChunkedNlpInferenceResults.TEXT, "text", ChunkedNlpInferenceResults.INFERENCE, Map.of("token", 0.1f))) ) ) @@ -71,7 +71,7 @@ public void testToXContent_CreatesTheRightJsonForASingleChunk() { } public void testToXContent_CreatesTheRightJsonForASingleChunk_FromSparseEmbeddingResults() { - var entity = ChunkedSparseEmbeddingResults.of( + var entity = InferenceChunkedSparseEmbeddingResults.listOf( List.of("text"), new SparseEmbeddingResults(List.of(new SparseEmbeddingResults.Embedding(List.of(new WeightedToken("token", 0.1f)), false))) ); @@ -84,7 +84,7 @@ public void testToXContent_CreatesTheRightJsonForASingleChunk_FromSparseEmbeddin firstEntry.asMap(), is( Map.of( - ChunkedSparseEmbeddingResults.FIELD_NAME, + InferenceChunkedSparseEmbeddingResults.FIELD_NAME, List.of(Map.of(ChunkedNlpInferenceResults.TEXT, "text", ChunkedNlpInferenceResults.INFERENCE, Map.of("token", 0.1f))) ) ) @@ -107,7 +107,7 @@ public void testToXContent_CreatesTheRightJsonForASingleChunk_FromSparseEmbeddin public void testToXContent_ThrowsWhenInputSizeIsDifferentThanEmbeddings() { var exception = expectThrows( IllegalArgumentException.class, - () -> ChunkedSparseEmbeddingResults.of( + () -> InferenceChunkedSparseEmbeddingResults.listOf( List.of("text", "text2"), new SparseEmbeddingResults(List.of(new SparseEmbeddingResults.Embedding(List.of(new WeightedToken("token", 0.1f)), false))) ) @@ -117,17 +117,17 @@ public void testToXContent_ThrowsWhenInputSizeIsDifferentThanEmbeddings() { } @Override - protected Writeable.Reader instanceReader() { - return ChunkedSparseEmbeddingResults::new; + protected Writeable.Reader instanceReader() { + return InferenceChunkedSparseEmbeddingResults::new; } @Override - protected ChunkedSparseEmbeddingResults createTestInstance() { + protected InferenceChunkedSparseEmbeddingResults createTestInstance() { return createRandomResults(); } @Override - protected ChunkedSparseEmbeddingResults mutateInstance(ChunkedSparseEmbeddingResults instance) throws IOException { - return randomValueOtherThan(instance, ChunkedSparseEmbeddingResultsTests::createRandomResults); + protected InferenceChunkedSparseEmbeddingResults mutateInstance(InferenceChunkedSparseEmbeddingResults instance) throws IOException { + return randomValueOtherThan(instance, InferenceChunkedSparseEmbeddingResultsTests::createRandomResults); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingByteResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedTextEmbeddingByteResultsTests.java similarity index 52% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingByteResultsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedTextEmbeddingByteResultsTests.java index 6d6fbe956280a..c1215e8a3d71b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingByteResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceChunkedTextEmbeddingByteResultsTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import java.io.IOException; import java.util.ArrayList; @@ -20,32 +20,33 @@ import static org.hamcrest.Matchers.is; -public class ChunkedTextEmbeddingByteResultsTests extends AbstractWireSerializingTestCase { +public class InferenceChunkedTextEmbeddingByteResultsTests extends AbstractWireSerializingTestCase< + InferenceChunkedTextEmbeddingByteResults> { - public static ChunkedTextEmbeddingByteResults createRandomResults() { + public static InferenceChunkedTextEmbeddingByteResults createRandomResults() { int numChunks = randomIntBetween(1, 5); - var chunks = new ArrayList(numChunks); + var chunks = new ArrayList(numChunks); for (int i = 0; i < numChunks; i++) { chunks.add(createRandomChunk()); } - return new ChunkedTextEmbeddingByteResults(chunks, randomBoolean()); + return new InferenceChunkedTextEmbeddingByteResults(chunks, randomBoolean()); } - private static ChunkedTextEmbeddingByteResults.EmbeddingChunk createRandomChunk() { + private static InferenceChunkedTextEmbeddingByteResults.InferenceByteEmbeddingChunk createRandomChunk() { int columns = randomIntBetween(1, 10); byte[] bytes = new byte[columns]; for (int i = 0; i < columns; i++) { bytes[i] = randomByte(); } - return new ChunkedTextEmbeddingByteResults.EmbeddingChunk(randomAlphaOfLength(6), bytes); + return new InferenceChunkedTextEmbeddingByteResults.InferenceByteEmbeddingChunk(randomAlphaOfLength(6), bytes); } public void testToXContent_CreatesTheRightJsonForASingleChunk() { - var entity = new ChunkedTextEmbeddingByteResults( - List.of(new ChunkedTextEmbeddingByteResults.EmbeddingChunk("text", new byte[] { (byte) 1 })), + var entity = new InferenceChunkedTextEmbeddingByteResults( + List.of(new InferenceChunkedTextEmbeddingByteResults.InferenceByteEmbeddingChunk("text", new byte[] { (byte) 1 })), false ); @@ -53,8 +54,8 @@ public void testToXContent_CreatesTheRightJsonForASingleChunk() { entity.asMap(), is( Map.of( - ChunkedTextEmbeddingByteResults.FIELD_NAME, - List.of(new ChunkedTextEmbeddingByteResults.EmbeddingChunk("text", new byte[] { (byte) 1 })) + InferenceChunkedTextEmbeddingByteResults.FIELD_NAME, + List.of(new InferenceChunkedTextEmbeddingByteResults.InferenceByteEmbeddingChunk("text", new byte[] { (byte) 1 })) ) ) ); @@ -73,9 +74,11 @@ public void testToXContent_CreatesTheRightJsonForASingleChunk() { } public void testToXContent_CreatesTheRightJsonForASingleChunk_ForTextEmbeddingByteResults() { - var entity = ChunkedTextEmbeddingByteResults.of( + var entity = InferenceChunkedTextEmbeddingByteResults.listOf( List.of("text"), - new TextEmbeddingByteResults(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 1 }))) + new InferenceTextEmbeddingByteResults( + List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 1 })) + ) ); assertThat(entity.size(), is(1)); @@ -86,8 +89,8 @@ public void testToXContent_CreatesTheRightJsonForASingleChunk_ForTextEmbeddingBy firstEntry.asMap(), is( Map.of( - ChunkedTextEmbeddingByteResults.FIELD_NAME, - List.of(new ChunkedTextEmbeddingByteResults.EmbeddingChunk("text", new byte[] { (byte) 1 })) + InferenceChunkedTextEmbeddingByteResults.FIELD_NAME, + List.of(new InferenceChunkedTextEmbeddingByteResults.InferenceByteEmbeddingChunk("text", new byte[] { (byte) 1 })) ) ) ); @@ -108,9 +111,11 @@ public void testToXContent_CreatesTheRightJsonForASingleChunk_ForTextEmbeddingBy public void testToXContent_ThrowsWhenInputSizeIsDifferentThanEmbeddings() { var exception = expectThrows( IllegalArgumentException.class, - () -> ChunkedTextEmbeddingByteResults.of( + () -> InferenceChunkedTextEmbeddingByteResults.listOf( List.of("text", "text2"), - new TextEmbeddingByteResults(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 1 }))) + new InferenceTextEmbeddingByteResults( + List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 1 })) + ) ) ); @@ -118,17 +123,18 @@ public void testToXContent_ThrowsWhenInputSizeIsDifferentThanEmbeddings() { } @Override - protected Writeable.Reader instanceReader() { - return ChunkedTextEmbeddingByteResults::new; + protected Writeable.Reader instanceReader() { + return InferenceChunkedTextEmbeddingByteResults::new; } @Override - protected ChunkedTextEmbeddingByteResults createTestInstance() { + protected InferenceChunkedTextEmbeddingByteResults createTestInstance() { return createRandomResults(); } @Override - protected ChunkedTextEmbeddingByteResults mutateInstance(ChunkedTextEmbeddingByteResults instance) throws IOException { - return randomValueOtherThan(instance, ChunkedTextEmbeddingByteResultsTests::createRandomResults); + protected InferenceChunkedTextEmbeddingByteResults mutateInstance(InferenceChunkedTextEmbeddingByteResults instance) + throws IOException { + return randomValueOtherThan(instance, InferenceChunkedTextEmbeddingByteResultsTests::createRandomResults); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingByteResultsTests.java similarity index 51% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingByteResultsTests.java index a15d6323d1315..c6749e9822cf4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/InferenceTextEmbeddingByteResultsTests.java @@ -10,7 +10,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; @@ -19,19 +20,19 @@ import static org.hamcrest.Matchers.is; -public class TextEmbeddingByteResultsTests extends AbstractWireSerializingTestCase { - public static TextEmbeddingByteResults createRandomResults() { +public class InferenceTextEmbeddingByteResultsTests extends AbstractWireSerializingTestCase { + public static InferenceTextEmbeddingByteResults createRandomResults() { int embeddings = randomIntBetween(1, 10); - List embeddingResults = new ArrayList<>(embeddings); + List embeddingResults = new ArrayList<>(embeddings); for (int i = 0; i < embeddings; i++) { embeddingResults.add(createRandomEmbedding()); } - return new TextEmbeddingByteResults(embeddingResults); + return new InferenceTextEmbeddingByteResults(embeddingResults); } - private static TextEmbeddingByteResults.Embedding createRandomEmbedding() { + private static InferenceTextEmbeddingByteResults.InferenceByteEmbedding createRandomEmbedding() { int columns = randomIntBetween(1, 10); byte[] bytes = new byte[columns]; @@ -39,11 +40,13 @@ private static TextEmbeddingByteResults.Embedding createRandomEmbedding() { bytes[i] = randomByte(); } - return new TextEmbeddingByteResults.Embedding(bytes); + return new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(bytes); } public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOException { - var entity = new TextEmbeddingByteResults(List.of(new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 23 }))); + var entity = new InferenceTextEmbeddingByteResults( + List.of(new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 23 })) + ); String xContentResult = Strings.toString(entity, true, true); assertThat(xContentResult, is(""" @@ -59,10 +62,10 @@ public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOE } public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws IOException { - var entity = new TextEmbeddingByteResults( + var entity = new InferenceTextEmbeddingByteResults( List.of( - new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 23 }), - new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 24 }) + new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 23 }), + new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 24 }) ) ); @@ -85,10 +88,10 @@ public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws I } public void testTransformToCoordinationFormat() { - var results = new TextEmbeddingByteResults( + var results = new InferenceTextEmbeddingByteResults( List.of( - new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 23, (byte) 24 }), - new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 25, (byte) 26 }) + new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 23, (byte) 24 }), + new InferenceTextEmbeddingByteResults.InferenceByteEmbedding(new byte[] { (byte) 25, (byte) 26 }) ) ).transformToCoordinationFormat(); @@ -96,49 +99,43 @@ public void testTransformToCoordinationFormat() { results, is( List.of( - new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( - TextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, - new double[] { 23F, 24F }, - false - ), - new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( - TextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, - new double[] { 25F, 26F }, - false - ) + new MlTextEmbeddingResults(InferenceTextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, new double[] { 23F, 24F }, false), + new MlTextEmbeddingResults(InferenceTextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, new double[] { 25F, 26F }, false) ) ) ); } @Override - protected Writeable.Reader instanceReader() { - return TextEmbeddingByteResults::new; + protected Writeable.Reader instanceReader() { + return InferenceTextEmbeddingByteResults::new; } @Override - protected TextEmbeddingByteResults createTestInstance() { + protected InferenceTextEmbeddingByteResults createTestInstance() { return createRandomResults(); } @Override - protected TextEmbeddingByteResults mutateInstance(TextEmbeddingByteResults instance) throws IOException { + protected InferenceTextEmbeddingByteResults mutateInstance(InferenceTextEmbeddingByteResults instance) throws IOException { // if true we reduce the embeddings list by a random amount, if false we add an embedding to the list if (randomBoolean()) { // -1 to remove at least one item from the list int end = randomInt(instance.embeddings().size() - 1); - return new TextEmbeddingByteResults(instance.embeddings().subList(0, end)); + return new InferenceTextEmbeddingByteResults(instance.embeddings().subList(0, end)); } else { - List embeddings = new ArrayList<>(instance.embeddings()); + List embeddings = new ArrayList<>(instance.embeddings()); embeddings.add(createRandomEmbedding()); - return new TextEmbeddingByteResults(embeddings); + return new InferenceTextEmbeddingByteResults(embeddings); } } public static Map buildExpectationByte(List> embeddings) { return Map.of( - TextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, - embeddings.stream().map(embedding -> Map.of(TextEmbeddingByteResults.Embedding.EMBEDDING, embedding)).toList() + InferenceTextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, + embeddings.stream() + .map(embedding -> Map.of(InferenceTextEmbeddingByteResults.InferenceByteEmbedding.EMBEDDING, embedding)) + .toList() ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyMlTextEmbeddingResultsTests.java similarity index 97% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResultsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyMlTextEmbeddingResultsTests.java index 1fa08231dd6ba..f7ed3f34d364b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyMlTextEmbeddingResultsTests.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.is; @SuppressWarnings("deprecation") -public class LegacyTextEmbeddingResultsTests extends AbstractWireSerializingTestCase { +public class LegacyMlTextEmbeddingResultsTests extends AbstractWireSerializingTestCase { public static LegacyTextEmbeddingResults createRandomResults() { int embeddings = randomIntBetween(1, 10); List embeddingResults = new ArrayList<>(embeddings); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java index 716568fdb5645..2c405aaeaba3f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java @@ -10,8 +10,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; @@ -20,30 +21,32 @@ import static org.hamcrest.Matchers.is; -public class TextEmbeddingResultsTests extends AbstractWireSerializingTestCase { - public static TextEmbeddingResults createRandomResults() { +public class TextEmbeddingResultsTests extends AbstractWireSerializingTestCase { + public static InferenceTextEmbeddingFloatResults createRandomResults() { int embeddings = randomIntBetween(1, 10); - List embeddingResults = new ArrayList<>(embeddings); + List embeddingResults = new ArrayList<>(embeddings); for (int i = 0; i < embeddings; i++) { embeddingResults.add(createRandomEmbedding()); } - return new TextEmbeddingResults(embeddingResults); + return new InferenceTextEmbeddingFloatResults(embeddingResults); } - private static TextEmbeddingResults.Embedding createRandomEmbedding() { + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding createRandomEmbedding() { int columns = randomIntBetween(1, 10); float[] floats = new float[columns]; for (int i = 0; i < columns; i++) { floats[i] = randomFloat(); } - return new TextEmbeddingResults.Embedding(floats); + return new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(floats); } public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOException { - var entity = new TextEmbeddingResults(List.of(new TextEmbeddingResults.Embedding(new float[] { 0.1F }))); + var entity = new InferenceTextEmbeddingFloatResults( + List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.1F })) + ); String xContentResult = Strings.toString(entity, true, true); assertThat(xContentResult, is(""" @@ -59,8 +62,11 @@ public void testToXContent_CreatesTheRightFormatForASingleEmbedding() throws IOE } public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws IOException { - var entity = new TextEmbeddingResults( - List.of(new TextEmbeddingResults.Embedding(new float[] { 0.1F }), new TextEmbeddingResults.Embedding(new float[] { 0.2F })) + var entity = new InferenceTextEmbeddingFloatResults( + List.of( + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.1F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.2F }) + ) ); @@ -83,10 +89,10 @@ public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws I } public void testTransformToCoordinationFormat() { - var results = new TextEmbeddingResults( + var results = new InferenceTextEmbeddingFloatResults( List.of( - new TextEmbeddingResults.Embedding(new float[] { 0.1F, 0.2F }), - new TextEmbeddingResults.Embedding(new float[] { 0.3F, 0.4F }) + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.1F, 0.2F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.3F, 0.4F }) ) ).transformToCoordinationFormat(); @@ -94,53 +100,48 @@ public void testTransformToCoordinationFormat() { results, is( List.of( - new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( - TextEmbeddingResults.TEXT_EMBEDDING, - new double[] { 0.1F, 0.2F }, - false - ), - new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( - TextEmbeddingResults.TEXT_EMBEDDING, - new double[] { 0.3F, 0.4F }, - false - ) + new MlTextEmbeddingResults(InferenceTextEmbeddingFloatResults.TEXT_EMBEDDING, new double[] { 0.1F, 0.2F }, false), + new MlTextEmbeddingResults(InferenceTextEmbeddingFloatResults.TEXT_EMBEDDING, new double[] { 0.3F, 0.4F }, false) ) ) ); } @Override - protected Writeable.Reader instanceReader() { - return TextEmbeddingResults::new; + protected Writeable.Reader instanceReader() { + return InferenceTextEmbeddingFloatResults::new; } @Override - protected TextEmbeddingResults createTestInstance() { + protected InferenceTextEmbeddingFloatResults createTestInstance() { return createRandomResults(); } @Override - protected TextEmbeddingResults mutateInstance(TextEmbeddingResults instance) throws IOException { + protected InferenceTextEmbeddingFloatResults mutateInstance(InferenceTextEmbeddingFloatResults instance) throws IOException { // if true we reduce the embeddings list by a random amount, if false we add an embedding to the list if (randomBoolean()) { // -1 to remove at least one item from the list int end = randomInt(instance.embeddings().size() - 1); - return new TextEmbeddingResults(instance.embeddings().subList(0, end)); + return new InferenceTextEmbeddingFloatResults(instance.embeddings().subList(0, end)); } else { - List embeddings = new ArrayList<>(instance.embeddings()); + List embeddings = new ArrayList<>(instance.embeddings()); embeddings.add(createRandomEmbedding()); - return new TextEmbeddingResults(embeddings); + return new InferenceTextEmbeddingFloatResults(embeddings); } } public static Map buildExpectationFloat(List embeddings) { - return Map.of(TextEmbeddingResults.TEXT_EMBEDDING, embeddings.stream().map(TextEmbeddingResults.Embedding::new).toList()); + return Map.of( + InferenceTextEmbeddingFloatResults.TEXT_EMBEDDING, + embeddings.stream().map(InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding::new).toList() + ); } public static Map buildExpectationByte(List embeddings) { return Map.of( - TextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, - embeddings.stream().map(TextEmbeddingByteResults.Embedding::new).toList() + InferenceTextEmbeddingByteResults.TEXT_EMBEDDING_BYTES, + embeddings.stream().map(InferenceTextEmbeddingByteResults.InferenceByteEmbedding::new).toList() ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java index ee3403492c423..974b31e73b499 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java @@ -33,7 +33,6 @@ import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -59,7 +58,7 @@ public void testStart_InitializesTheSender() throws IOException { var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); - when(factory.createSender(anyString())).thenReturn(sender); + when(factory.createSender()).thenReturn(sender); try (var service = new TestSenderService(factory, createWithEmptySettings(threadPool))) { PlainActionFuture listener = new PlainActionFuture<>(); @@ -67,7 +66,7 @@ public void testStart_InitializesTheSender() throws IOException { listener.actionGet(TIMEOUT); verify(sender, times(1)).start(); - verify(factory, times(1)).createSender(anyString()); + verify(factory, times(1)).createSender(); } verify(sender, times(1)).close(); @@ -79,7 +78,7 @@ public void testStart_CallingStartTwiceKeepsSameSenderReference() throws IOExcep var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); - when(factory.createSender(anyString())).thenReturn(sender); + when(factory.createSender()).thenReturn(sender); try (var service = new TestSenderService(factory, createWithEmptySettings(threadPool))) { PlainActionFuture listener = new PlainActionFuture<>(); @@ -89,7 +88,7 @@ public void testStart_CallingStartTwiceKeepsSameSenderReference() throws IOExcep service.start(mock(Model.class), listener); listener.actionGet(TIMEOUT); - verify(factory, times(1)).createSender(anyString()); + verify(factory, times(1)).createSender(); verify(sender, times(2)).start(); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index 44e3f34ffcc15..599df8d1cfb3b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -19,9 +19,9 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; -import org.elasticsearch.xpack.inference.results.TextEmbeddingByteResultsTests; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; +import org.elasticsearch.xpack.inference.results.InferenceTextEmbeddingByteResultsTests; import org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests; import java.util.EnumSet; @@ -719,7 +719,7 @@ public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingResults_IsEmpty() doAnswer(invocation -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[6]; - listener.onResponse(new TextEmbeddingResults(List.of())); + listener.onResponse(new InferenceTextEmbeddingFloatResults(List.of())); return Void.TYPE; }).when(service).infer(any(), any(), any(), any(), any(), any(), any()); @@ -742,7 +742,7 @@ public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingByteResults_IsEmp doAnswer(invocation -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[6]; - listener.onResponse(new TextEmbeddingByteResults(List.of())); + listener.onResponse(new InferenceTextEmbeddingByteResults(List.of())); return Void.TYPE; }).when(service).infer(any(), any(), any(), any(), any(), any(), any()); @@ -786,7 +786,7 @@ public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingByteResults() { var model = mock(Model.class); when(model.getTaskType()).thenReturn(TaskType.TEXT_EMBEDDING); - var textEmbedding = TextEmbeddingByteResultsTests.createRandomResults(); + var textEmbedding = InferenceTextEmbeddingByteResultsTests.createRandomResults(); doAnswer(invocation -> { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index 5869366ac2e22..18d7b6e072fe3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -62,13 +62,13 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; -import static org.elasticsearch.xpack.inference.results.ChunkedTextEmbeddingResultsTests.asMapWithListsInsteadOfArrays; +import static org.elasticsearch.xpack.inference.external.request.azureaistudio.AzureAiStudioRequestFields.API_KEY_HEADER; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.azureaistudio.AzureAiStudioConstants.API_KEY_FIELD; import static org.hamcrest.CoreMatchers.is; @@ -76,7 +76,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -819,7 +818,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAzureAiStudioModel() throws IOExc var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); - when(factory.createSender(anyString())).thenReturn(sender); + when(factory.createSender()).thenReturn(sender); var mockModel = getInvalidModel("model_id", "service_name"); @@ -841,7 +840,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAzureAiStudioModel() throws IOExc is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") ); - verify(factory, times(1)).createSender(anyString()); + verify(factory, times(1)).createSender(); verify(sender, times(1)).start(); } @@ -857,22 +856,22 @@ public void testChunkedInfer_Embeddings_CallsInfer_ConvertsFloatResponse() throw String responseJson = """ { - "object": "list", - "data": [ - { - "object": "embedding", - "index": 0, - "embedding": [ - 0.0123, - -0.0123 - ] - } - ], - "model": "text-embedding-ada-002-v2", - "usage": { - "prompt_tokens": 8, - "total_tokens": 8 - } + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } } """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); @@ -902,20 +901,15 @@ public void testChunkedInfer_Embeddings_CallsInfer_ConvertsFloatResponse() throw ); var result = listener.actionGet(TIMEOUT).get(0); - assertThat(result, CoreMatchers.instanceOf(ChunkedTextEmbeddingResults.class)); + assertThat(result, CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); assertThat( - asMapWithListsInsteadOfArrays((ChunkedTextEmbeddingResults) result), + asMapWithListsInsteadOfArrays((InferenceChunkedTextEmbeddingFloatResults) result), Matchers.is( Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, + InferenceChunkedTextEmbeddingFloatResults.FIELD_NAME, List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "abc", - ChunkedNlpInferenceResults.INFERENCE, - List.of((double) 0.0123f, (double) -0.0123f) - ) + Map.of(ChunkedNlpInferenceResults.TEXT, "abc", ChunkedNlpInferenceResults.INFERENCE, List.of(0.0123f, -0.0123f)) ) ) ) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionServiceSettingsTests.java index 79d6e384d7693..d46a5f190017a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionServiceSettingsTests.java @@ -112,7 +112,8 @@ public void testToFilteredXContent_WritesAllValues() throws IOException { String xContentResult = Strings.toString(builder); assertThat(xContentResult, CoreMatchers.is(""" - {"target":"target_value","provider":"openai","endpoint_type":"token"}""")); + {"target":"target_value","provider":"openai","endpoint_type":"token",""" + """ + "rate_limit":{"requests_per_minute":3}}""")); } public static HashMap createRequestSettingsMap(String target, String provider, String endpointType) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettingsTests.java index 283bfa1490df2..a592dd6e1f956 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettingsTests.java @@ -295,7 +295,7 @@ public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() t assertThat(xContentResult, CoreMatchers.is(""" {"target":"target_value","provider":"openai","endpoint_type":"token",""" + """ - "dimensions":1024,"max_input_tokens":512}""")); + "rate_limit":{"requests_per_minute":3},"dimensions":1024,"max_input_tokens":512}""")); } public static HashMap createRequestSettingsMap( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index 9fe8b472b22a5..e59664d0e0129 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -55,13 +55,13 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; -import static org.elasticsearch.xpack.inference.results.ChunkedTextEmbeddingResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettingsTests.getAzureOpenAiSecretSettingsMap; @@ -73,7 +73,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -594,7 +593,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAzureOpenAiModel() throws IOExcep var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); - when(factory.createSender(anyString())).thenReturn(sender); + when(factory.createSender()).thenReturn(sender); var mockModel = getInvalidModel("model_id", "service_name"); @@ -616,7 +615,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotAzureOpenAiModel() throws IOExcep is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") ); - verify(factory, times(1)).createSender(anyString()); + verify(factory, times(1)).createSender(); verify(sender, times(1)).start(); } @@ -1074,22 +1073,22 @@ public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOExcepti String responseJson = """ { - "object": "list", - "data": [ - { - "object": "embedding", - "index": 0, - "embedding": [ - 0.0123, - -0.0123 - ] - } - ], - "model": "text-embedding-ada-002-v2", - "usage": { - "prompt_tokens": 8, - "total_tokens": 8 - } + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } } """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); @@ -1108,20 +1107,15 @@ public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOExcepti ); var result = listener.actionGet(TIMEOUT).get(0); - assertThat(result, CoreMatchers.instanceOf(ChunkedTextEmbeddingResults.class)); + assertThat(result, CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); assertThat( - asMapWithListsInsteadOfArrays((ChunkedTextEmbeddingResults) result), + asMapWithListsInsteadOfArrays((InferenceChunkedTextEmbeddingFloatResults) result), Matchers.is( Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, + InferenceChunkedTextEmbeddingFloatResults.FIELD_NAME, List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "abc", - ChunkedNlpInferenceResults.INFERENCE, - List.of((double) 0.0123f, (double) -0.0123f) - ) + Map.of(ChunkedNlpInferenceResults.TEXT, "abc", ChunkedNlpInferenceResults.INFERENCE, List.of(0.0123f, -0.0123f)) ) ) ) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java index 46e514c8b16c4..797cad8f300ae 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; import java.io.IOException; @@ -46,7 +47,8 @@ public void testFromMap_Request_CreatesSettingsCorrectly() { AzureOpenAiServiceFields.API_VERSION, apiVersion ) - ) + ), + ConfigurationParseContext.PERSISTENT ); assertThat(serviceSettings, is(new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null))); @@ -63,18 +65,6 @@ public void testToXContent_WritesAllValues() throws IOException { {"resource_name":"resource","deployment_id":"deployment","api_version":"2024","rate_limit":{"requests_per_minute":120}}""")); } - public void testToFilteredXContent_WritesAllValues_Except_RateLimit() throws IOException { - var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - var filteredXContent = entity.getFilteredXContentObject(); - filteredXContent.toXContent(builder, null); - String xContentResult = Strings.toString(builder); - - assertThat(xContentResult, is(""" - {"resource_name":"resource","deployment_id":"deployment","api_version":"2024"}""")); - } - @Override protected Writeable.Reader instanceReader() { return AzureOpenAiCompletionServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java index f4c6f9b2a4f07..cbb9eea223802 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java @@ -389,7 +389,7 @@ public void testToXContent_WritesAllValues() throws IOException { "dimensions":1024,"max_input_tokens":512,"rate_limit":{"requests_per_minute":3},"dimensions_set_by_user":false}""")); } - public void testToFilteredXContent_WritesAllValues_Except_DimensionsSetByUser_RateLimit() throws IOException { + public void testToFilteredXContent_WritesAllValues_Except_DimensionsSetByUser() throws IOException { var entity = new AzureOpenAiEmbeddingsServiceSettings( "resource", "deployment", @@ -408,7 +408,7 @@ public void testToFilteredXContent_WritesAllValues_Except_DimensionsSetByUser_Ra assertThat(xContentResult, is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ - "dimensions":1024,"max_input_tokens":512}""")); + "dimensions":1024,"max_input_tokens":512,"rate_limit":{"requests_per_minute":1}}""")); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index f06fee4b0b9c4..20eb183c17900 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -73,7 +73,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -613,7 +612,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotCohereModel() throws IOException var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); - when(factory.createSender(anyString())).thenReturn(sender); + when(factory.createSender()).thenReturn(sender); var mockModel = getInvalidModel("model_id", "service_name"); @@ -635,7 +634,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotCohereModel() throws IOException is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") ); - verify(factory, times(1)).createSender(anyString()); + verify(factory, times(1)).createSender(); verify(sender, times(1)).start(); } @@ -1222,15 +1221,15 @@ public void testChunkedInfer_BatchesCalls() throws IOException { var results = listener.actionGet(TIMEOUT); assertThat(results, hasSize(2)); { - assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(results.get(0), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); assertEquals("foo", floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.123f, -0.123f }, floatResult.chunks().get(0).embedding())); } { - assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(results.get(1), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); assertEquals("bar", floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.223f, -0.223f }, floatResult.chunks().get(0).embedding())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionModelTests.java index aac04e301ece7..b9fc7ee7b9952 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionModelTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; import java.util.HashMap; @@ -28,7 +29,8 @@ public void testCreateModel_AlwaysWithEmptyTaskSettings() { "service", new HashMap<>(Map.of()), new HashMap<>(Map.of("model", "overridden model")), - null + null, + ConfigurationParseContext.PERSISTENT ); assertThat(model.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettingsTests.java index f4cab3c2b0f1e..ed8bc90d32140 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettingsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; @@ -34,7 +35,8 @@ public void testFromMap_WithRateLimitSettingsNull() { var model = "model"; var serviceSettings = CohereCompletionServiceSettings.fromMap( - new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.MODEL_ID, model)) + new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.MODEL_ID, model)), + ConfigurationParseContext.PERSISTENT ); assertThat(serviceSettings, is(new CohereCompletionServiceSettings(url, model, null))); @@ -55,7 +57,8 @@ public void testFromMap_WithRateLimitSettings() { RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, requestsPerMinute)) ) - ) + ), + ConfigurationParseContext.PERSISTENT ); assertThat(serviceSettings, is(new CohereCompletionServiceSettings(url, model, new RateLimitSettings(requestsPerMinute)))); @@ -72,18 +75,6 @@ public void testToXContent_WritesAllValues() throws IOException { {"url":"url","model_id":"model","rate_limit":{"requests_per_minute":3}}""")); } - public void testToXContent_WithFilteredObject_WritesAllValues_Except_RateLimit() throws IOException { - var serviceSettings = new CohereCompletionServiceSettings("url", "model", new RateLimitSettings(3)); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - var filteredXContent = serviceSettings.getFilteredXContentObject(); - filteredXContent.toXContent(builder, null); - String xContentResult = Strings.toString(builder); - - assertThat(xContentResult, is(""" - {"url":"url","model_id":"model"}""")); - } - @Override protected Writeable.Reader instanceReader() { return CohereCompletionServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index 6f8fe6344b57f..73ebd6c6c0505 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -331,21 +331,6 @@ public void testToXContent_WritesAllValues() throws IOException { "rate_limit":{"requests_per_minute":3},"embedding_type":"byte"}""")); } - public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { - var serviceSettings = new CohereEmbeddingsServiceSettings( - new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)), - CohereEmbeddingType.INT8 - ); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - var filteredXContent = serviceSettings.getFilteredXContentObject(); - filteredXContent.toXContent(builder, null); - String xContentResult = Strings.toString(builder); - assertThat(xContentResult, is(""" - {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id",""" + """ - "embedding_type":"byte"}""")); - } - @Override protected Writeable.Reader instanceReader() { return CohereEmbeddingsServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java index 4943ddf74fda1..1ce5a9fb12833 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java @@ -51,20 +51,6 @@ public void testToXContent_WritesAllValues() throws IOException { "rate_limit":{"requests_per_minute":3}}""")); } - public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { - var serviceSettings = new CohereRerankServiceSettings( - new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)) - ); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - var filteredXContent = serviceSettings.getFilteredXContentObject(); - filteredXContent.toXContent(builder, null); - String xContentResult = Strings.toString(builder); - // TODO we probably shouldn't allow configuring these fields for reranking - assertThat(xContentResult, is(""" - {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id"}""")); - } - @Override protected Writeable.Reader instanceReader() { return CohereRerankServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index ea11e9d0343e3..8f8c73eaab79a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -27,14 +27,19 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; +import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResultsTests; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResultsTests; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; +import org.elasticsearch.xpack.core.utils.FloatConversionUtils; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import org.junit.After; @@ -465,24 +470,19 @@ public void testParsePersistedConfig() { @SuppressWarnings("unchecked") public void testChunkInfer() { var mlTrainedModelResults = new ArrayList(); - mlTrainedModelResults.add(ChunkedTextEmbeddingResultsTests.createRandomResults()); - mlTrainedModelResults.add(ChunkedTextEmbeddingResultsTests.createRandomResults()); + mlTrainedModelResults.add(MlChunkedTextEmbeddingFloatResultsTests.createRandomResults()); + mlTrainedModelResults.add(MlChunkedTextEmbeddingFloatResultsTests.createRandomResults()); mlTrainedModelResults.add(new ErrorInferenceResults(new RuntimeException("boom"))); - var response = new InferTrainedModelDeploymentAction.Response(mlTrainedModelResults); + var response = new InferModelAction.Response(mlTrainedModelResults, "foo", true); ThreadPool threadpool = new TestThreadPool("test"); Client client = mock(Client.class); when(client.threadPool()).thenReturn(threadpool); doAnswer(invocationOnMock -> { - var listener = (ActionListener) invocationOnMock.getArguments()[2]; + var listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onResponse(response); return null; - }).when(client) - .execute( - same(InferTrainedModelDeploymentAction.INSTANCE), - any(InferTrainedModelDeploymentAction.Request.class), - any(ActionListener.class) - ); + }).when(client).execute(same(InferModelAction.INSTANCE), any(InferModelAction.Request.class), any(ActionListener.class)); var model = new MultilingualE5SmallModel( "foo", @@ -495,18 +495,43 @@ public void testChunkInfer() { var gotResults = new AtomicBoolean(); var resultsListener = ActionListener.>wrap(chunkedResponse -> { assertThat(chunkedResponse, hasSize(3)); - assertThat(chunkedResponse.get(0), instanceOf(ChunkedTextEmbeddingResults.class)); - var result1 = (ChunkedTextEmbeddingResults) chunkedResponse.get(0); + assertThat(chunkedResponse.get(0), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var result1 = (InferenceChunkedTextEmbeddingFloatResults) chunkedResponse.get(0); + assertEquals( + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(0)).getChunks().size(), + result1.getChunks().size() + ); + assertEquals( + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(0)).getChunks().get(0).matchedText(), + result1.getChunks().get(0).matchedText() + ); + assertArrayEquals( + (FloatConversionUtils.floatArrayOf( + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(0)).getChunks().get(0).embedding() + )), + result1.getChunks().get(0).embedding(), + 0.0001f + ); + assertThat(chunkedResponse.get(1), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var result2 = (InferenceChunkedTextEmbeddingFloatResults) chunkedResponse.get(1); + // assertEquals(((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks(), result2.getChunks()); + assertEquals( - ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults) mlTrainedModelResults.get(0)).getChunks(), - result1.getChunks() + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks().size(), + result2.getChunks().size() ); - assertThat(chunkedResponse.get(1), instanceOf(ChunkedTextEmbeddingResults.class)); - var result2 = (ChunkedTextEmbeddingResults) chunkedResponse.get(1); assertEquals( - ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults) mlTrainedModelResults.get(1)).getChunks(), - result2.getChunks() + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks().get(0).matchedText(), + result2.getChunks().get(0).matchedText() ); + assertArrayEquals( + (FloatConversionUtils.floatArrayOf( + ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks().get(0).embedding() + )), + result2.getChunks().get(0).embedding(), + 0.0001f + ); + var result3 = (ErrorChunkedInferenceResults) chunkedResponse.get(2); assertThat(result3.getException(), instanceOf(RuntimeException.class)); assertThat(result3.getException().getMessage(), containsString("boom")); @@ -644,6 +669,31 @@ public void testParsePersistedConfig_Rerank() { } } + public void testBuildInferenceRequest() { + var id = randomAlphaOfLength(5); + var inputs = randomList(1, 3, () -> randomAlphaOfLength(4)); + var inputType = randomFrom(InputType.SEARCH, InputType.INGEST); + var timeout = randomTimeValue(); + var chunk = randomBoolean(); + var request = ElasticsearchInternalService.buildInferenceRequest( + id, + TextEmbeddingConfigUpdate.EMPTY_INSTANCE, + inputs, + inputType, + timeout, + chunk + ); + + assertEquals(id, request.getId()); + assertEquals(inputs, request.getTextInput()); + assertEquals( + inputType == InputType.INGEST ? TrainedModelPrefixStrings.PrefixType.INGEST : TrainedModelPrefixStrings.PrefixType.SEARCH, + request.getPrefixType() + ); + assertEquals(timeout, request.getInferenceTimeout()); + assertEquals(chunk, request.isChunked()); + } + private ElasticsearchInternalService createService(Client client) { var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); return new ElasticsearchInternalService(context); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java index dcbb523cceed9..27db8143f0c83 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java @@ -23,11 +23,13 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResultsTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import java.util.ArrayList; @@ -334,24 +336,19 @@ public void testParseRequestConfig_DefaultModel() { @SuppressWarnings("unchecked") public void testChunkInfer() { var mlTrainedModelResults = new ArrayList(); - mlTrainedModelResults.add(ChunkedTextExpansionResultsTests.createRandomResults()); - mlTrainedModelResults.add(ChunkedTextExpansionResultsTests.createRandomResults()); + mlTrainedModelResults.add(InferenceChunkedTextExpansionResultsTests.createRandomResults()); + mlTrainedModelResults.add(InferenceChunkedTextExpansionResultsTests.createRandomResults()); mlTrainedModelResults.add(new ErrorInferenceResults(new RuntimeException("boom"))); - var response = new InferTrainedModelDeploymentAction.Response(mlTrainedModelResults); + var response = new InferModelAction.Response(mlTrainedModelResults, "foo", true); ThreadPool threadpool = new TestThreadPool("test"); Client client = mock(Client.class); when(client.threadPool()).thenReturn(threadpool); doAnswer(invocationOnMock -> { - var listener = (ActionListener) invocationOnMock.getArguments()[2]; + var listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onResponse(response); return null; - }).when(client) - .execute( - same(InferTrainedModelDeploymentAction.INSTANCE), - any(InferTrainedModelDeploymentAction.Request.class), - any(ActionListener.class) - ); + }).when(client).execute(same(InferModelAction.INSTANCE), any(InferModelAction.Request.class), any(ActionListener.class)); var model = new ElserInternalModel( "foo", @@ -365,18 +362,12 @@ public void testChunkInfer() { var gotResults = new AtomicBoolean(); var resultsListener = ActionListener.>wrap(chunkedResponse -> { assertThat(chunkedResponse, hasSize(3)); - assertThat(chunkedResponse.get(0), instanceOf(ChunkedSparseEmbeddingResults.class)); - var result1 = (ChunkedSparseEmbeddingResults) chunkedResponse.get(0); - assertEquals( - ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults) mlTrainedModelResults.get(0)).getChunks(), - result1.getChunkedResults() - ); - assertThat(chunkedResponse.get(1), instanceOf(ChunkedSparseEmbeddingResults.class)); - var result2 = (ChunkedSparseEmbeddingResults) chunkedResponse.get(1); - assertEquals( - ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults) mlTrainedModelResults.get(1)).getChunks(), - result2.getChunkedResults() - ); + assertThat(chunkedResponse.get(0), instanceOf(InferenceChunkedSparseEmbeddingResults.class)); + var result1 = (InferenceChunkedSparseEmbeddingResults) chunkedResponse.get(0); + assertEquals(((InferenceChunkedTextExpansionResults) mlTrainedModelResults.get(0)).getChunks(), result1.getChunkedResults()); + assertThat(chunkedResponse.get(1), instanceOf(InferenceChunkedSparseEmbeddingResults.class)); + var result2 = (InferenceChunkedSparseEmbeddingResults) chunkedResponse.get(1); + assertEquals(((InferenceChunkedTextExpansionResults) mlTrainedModelResults.get(1)).getChunks(), result2.getChunkedResults()); var result3 = (ErrorChunkedInferenceResults) chunkedResponse.get(2); assertThat(result3.getException(), instanceOf(RuntimeException.class)); assertThat(result3.getException().getMessage(), containsString("boom")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java index 32e912ff8529a..1cdd7997b96c0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -73,7 +73,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.hasSize; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -494,7 +493,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotGoogleAiStudioModel() throws IOEx var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); - when(factory.createSender(anyString())).thenReturn(sender); + when(factory.createSender()).thenReturn(sender); var mockModel = getInvalidModel("model_id", "service_name"); @@ -516,7 +515,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotGoogleAiStudioModel() throws IOEx is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") ); - verify(factory, times(1)).createSender(anyString()); + verify(factory, times(1)).createSender(); verify(sender, times(1)).start(); } @@ -711,8 +710,8 @@ public void testChunkedInfer_Batches() throws IOException { // first result { - assertThat(results.get(0), instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(results.get(0), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); assertEquals(input.get(0), floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.0123f, -0.0123f }, floatResult.chunks().get(0).embedding())); @@ -720,8 +719,8 @@ public void testChunkedInfer_Batches() throws IOException { // second result { - assertThat(results.get(1), instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(results.get(1), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); assertEquals(input.get(1), floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.0456f, -0.0456f }, floatResult.chunks().get(0).embedding())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModelTests.java index 025317fbe025a..f4c13db78c4bc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionModelTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; import java.net.URISyntaxException; @@ -28,7 +29,8 @@ public void testCreateModel_AlwaysWithEmptyTaskSettings() { "service", new HashMap<>(Map.of("model_id", "model")), new HashMap<>(Map.of()), - null + null, + ConfigurationParseContext.PERSISTENT ); assertThat(model.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettingsTests.java index 46e6e60af493c..6652af26e09e1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/completion/GoogleAiStudioCompletionServiceSettingsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; @@ -31,7 +32,10 @@ public static GoogleAiStudioCompletionServiceSettings createRandom() { public void testFromMap_Request_CreatesSettingsCorrectly() { var model = "some model"; - var serviceSettings = GoogleAiStudioCompletionServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.MODEL_ID, model))); + var serviceSettings = GoogleAiStudioCompletionServiceSettings.fromMap( + new HashMap<>(Map.of(ServiceFields.MODEL_ID, model)), + ConfigurationParseContext.PERSISTENT + ); assertThat(serviceSettings, is(new GoogleAiStudioCompletionServiceSettings(model, null))); } @@ -47,18 +51,6 @@ public void testToXContent_WritesAllValues() throws IOException { {"model_id":"model","rate_limit":{"requests_per_minute":360}}""")); } - public void testToFilteredXContent_WritesAllValues_Except_RateLimit() throws IOException { - var entity = new GoogleAiStudioCompletionServiceSettings("model", null); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - var filteredXContent = entity.getFilteredXContentObject(); - filteredXContent.toXContent(builder, null); - String xContentResult = Strings.toString(builder); - - assertThat(xContentResult, is(""" - {"model_id":"model"}""")); - } - @Override protected Writeable.Reader instanceReader() { return GoogleAiStudioCompletionServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettingsTests.java index b5fbd28b476ba..cc195333adfd4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/embeddings/GoogleAiStudioEmbeddingsServiceSettingsTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; @@ -55,7 +56,8 @@ public void testFromMap_Request_CreatesSettingsCorrectly() { ServiceFields.SIMILARITY, similarity.toString() ) - ) + ), + ConfigurationParseContext.PERSISTENT ); assertThat(serviceSettings, is(new GoogleAiStudioEmbeddingsServiceSettings(model, maxInputTokens, dims, similarity, null))); @@ -80,23 +82,6 @@ public void testToXContent_WritesAllValues() throws IOException { }""")); } - public void testToFilteredXContent_WritesAllValues_Except_RateLimit() throws IOException { - var entity = new GoogleAiStudioEmbeddingsServiceSettings("model", 1024, 8, SimilarityMeasure.DOT_PRODUCT, null); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - var filteredXContent = entity.getFilteredXContentObject(); - filteredXContent.toXContent(builder, null); - String xContentResult = Strings.toString(builder); - - assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" - { - "model_id":"model", - "max_input_tokens": 1024, - "dimensions": 8, - "similarity": "dot_product" - }""")); - } - @Override protected Writeable.Reader instanceReader() { return GoogleAiStudioEmbeddingsServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java index 398b21312a03a..fd7e1b48b7e03 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.junit.After; import org.junit.Before; @@ -33,7 +34,6 @@ import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.hamcrest.CoreMatchers.is; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -59,7 +59,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotHuggingFaceModel() throws IOExcep var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); - when(factory.createSender(anyString())).thenReturn(sender); + when(factory.createSender()).thenReturn(sender); var mockModel = getInvalidModel("model_id", "service_name"); @@ -81,7 +81,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotHuggingFaceModel() throws IOExcep is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") ); - verify(factory, times(1)).createSender(anyString()); + verify(factory, times(1)).createSender(); verify(sender, times(1)).start(); } @@ -111,7 +111,8 @@ protected HuggingFaceModel createModel( TaskType taskType, Map serviceSettings, Map secretSettings, - String failureMessage + String failureMessage, + ConfigurationParseContext context ) { return null; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java index 91b91593adee7..04e9697b08877 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -57,7 +58,10 @@ public void testFromMap() { var dims = 384; var maxInputTokens = 128; { - var serviceSettings = HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url))); + var serviceSettings = HuggingFaceServiceSettings.fromMap( + new HashMap<>(Map.of(ServiceFields.URL, url)), + ConfigurationParseContext.PERSISTENT + ); assertThat(serviceSettings, is(new HuggingFaceServiceSettings(url))); } { @@ -73,7 +77,8 @@ public void testFromMap() { ServiceFields.MAX_INPUT_TOKENS, maxInputTokens ) - ) + ), + ConfigurationParseContext.PERSISTENT ); assertThat( serviceSettings, @@ -95,7 +100,8 @@ public void testFromMap() { RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, 3)) ) - ) + ), + ConfigurationParseContext.PERSISTENT ); assertThat( serviceSettings, @@ -105,7 +111,10 @@ public void testFromMap() { } public void testFromMap_MissingUrl_ThrowsError() { - var thrownException = expectThrows(ValidationException.class, () -> HuggingFaceServiceSettings.fromMap(new HashMap<>())); + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(), ConfigurationParseContext.PERSISTENT) + ); assertThat( thrownException.getMessage(), @@ -118,7 +127,7 @@ public void testFromMap_MissingUrl_ThrowsError() { public void testFromMap_EmptyUrl_ThrowsError() { var thrownException = expectThrows( ValidationException.class, - () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, ""))) + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, "")), ConfigurationParseContext.PERSISTENT) ); assertThat( @@ -136,7 +145,7 @@ public void testFromMap_InvalidUrl_ThrowsError() { var url = "https://www.abc^.com"; var thrownException = expectThrows( ValidationException.class, - () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url))) + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url)), ConfigurationParseContext.PERSISTENT) ); assertThat( @@ -152,7 +161,10 @@ public void testFromMap_InvalidSimilarity_ThrowsError() { var similarity = "by_size"; var thrownException = expectThrows( ValidationException.class, - () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.SIMILARITY, similarity))) + () -> HuggingFaceServiceSettings.fromMap( + new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.SIMILARITY, similarity)), + ConfigurationParseContext.PERSISTENT + ) ); assertThat( @@ -175,18 +187,6 @@ public void testToXContent_WritesAllValues() throws IOException { {"url":"url","rate_limit":{"requests_per_minute":3}}""")); } - public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { - var serviceSettings = new HuggingFaceServiceSettings(ServiceUtils.createUri("url"), null, null, null, new RateLimitSettings(3)); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - var filteredXContent = serviceSettings.getFilteredXContentObject(); - filteredXContent.toXContent(builder, null); - String xContentResult = org.elasticsearch.common.Strings.toString(builder); - - assertThat(xContentResult, is(""" - {"url":"url"}""")); - } - @Override protected Writeable.Reader instanceReader() { return HuggingFaceServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index 914775bf9fa61..a36306e40f5cb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -31,8 +31,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -56,11 +56,11 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.results.ChunkedTextEmbeddingResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettingsTests.getServiceSettingsMap; @@ -591,6 +591,7 @@ public void testCheckModelConfig_LeavesSimilarityAsNull_WhenUnspecified() throws } } + // TODO public void testChunkedInfer_CallsInfer_TextEmbedding_ConvertsFloatResponse() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); @@ -598,12 +599,12 @@ public void testChunkedInfer_CallsInfer_TextEmbedding_ConvertsFloatResponse() th String responseJson = """ { - "embeddings": [ - [ - -0.0123, - 0.0123 - ] - ] + "embeddings": [ + [ + -0.0123, + 0.0123 + ] + ] { """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); @@ -621,20 +622,15 @@ public void testChunkedInfer_CallsInfer_TextEmbedding_ConvertsFloatResponse() th ); var result = listener.actionGet(TIMEOUT).get(0); - assertThat(result, CoreMatchers.instanceOf(ChunkedTextEmbeddingResults.class)); + assertThat(result, CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); MatcherAssert.assertThat( - asMapWithListsInsteadOfArrays((ChunkedTextEmbeddingResults) result), + asMapWithListsInsteadOfArrays((InferenceChunkedTextEmbeddingFloatResults) result), Matchers.is( Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, + InferenceChunkedTextEmbeddingFloatResults.FIELD_NAME, List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "abc", - ChunkedNlpInferenceResults.INFERENCE, - List.of((double) -0.0123f, (double) 0.0123f) - ) + Map.of(ChunkedNlpInferenceResults.TEXT, "abc", ChunkedNlpInferenceResults.INFERENCE, List.of(-0.0123f, 0.0123f)) ) ) ) @@ -685,7 +681,7 @@ public void testChunkedInfer_CallsInfer_Elser_ConvertsFloatResponse() throws IOE result.asMap(), Matchers.is( Map.of( - ChunkedSparseEmbeddingResults.FIELD_NAME, + InferenceChunkedSparseEmbeddingResults.FIELD_NAME, List.of( Map.of(ChunkedNlpInferenceResults.TEXT, "abc", ChunkedNlpInferenceResults.INFERENCE, Map.of(".", 0.13315596f)) ) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java index 57f9c59b65e12..2a44429687fb3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -32,7 +33,10 @@ public static HuggingFaceElserServiceSettings createRandom() { public void testFromMap() { var url = "https://www.abc.com"; - var serviceSettings = HuggingFaceElserServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserServiceSettings.URL, url))); + var serviceSettings = HuggingFaceElserServiceSettings.fromMap( + new HashMap<>(Map.of(HuggingFaceElserServiceSettings.URL, url)), + ConfigurationParseContext.PERSISTENT + ); assertThat(new HuggingFaceElserServiceSettings(url), is(serviceSettings)); } @@ -40,7 +44,10 @@ public void testFromMap() { public void testFromMap_EmptyUrl_ThrowsError() { var thrownException = expectThrows( ValidationException.class, - () -> HuggingFaceElserServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserServiceSettings.URL, ""))) + () -> HuggingFaceElserServiceSettings.fromMap( + new HashMap<>(Map.of(HuggingFaceElserServiceSettings.URL, "")), + ConfigurationParseContext.PERSISTENT + ) ); assertThat( @@ -55,7 +62,10 @@ public void testFromMap_EmptyUrl_ThrowsError() { } public void testFromMap_MissingUrl_ThrowsError() { - var thrownException = expectThrows(ValidationException.class, () -> HuggingFaceElserServiceSettings.fromMap(new HashMap<>())); + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceElserServiceSettings.fromMap(new HashMap<>(), ConfigurationParseContext.PERSISTENT) + ); assertThat( thrownException.getMessage(), @@ -72,7 +82,10 @@ public void testFromMap_InvalidUrl_ThrowsError() { var url = "https://www.abc^.com"; var thrownException = expectThrows( ValidationException.class, - () -> HuggingFaceElserServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserServiceSettings.URL, url))) + () -> HuggingFaceElserServiceSettings.fromMap( + new HashMap<>(Map.of(HuggingFaceElserServiceSettings.URL, url)), + ConfigurationParseContext.PERSISTENT + ) ); assertThat( @@ -98,18 +111,6 @@ public void testToXContent_WritesAllValues() throws IOException { {"url":"url","max_input_tokens":512,"rate_limit":{"requests_per_minute":3}}""")); } - public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { - var serviceSettings = new HuggingFaceElserServiceSettings(ServiceUtils.createUri("url"), new RateLimitSettings(3)); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - var filteredXContent = serviceSettings.getFilteredXContentObject(); - filteredXContent.toXContent(builder, null); - String xContentResult = org.elasticsearch.common.Strings.toString(builder); - - assertThat(xContentResult, is(""" - {"url":"url","max_input_tokens":512}""")); - } - @Override protected Writeable.Reader instanceReader() { return HuggingFaceElserServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java index 3ead273e78110..508d5a97fe564 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -67,7 +67,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -393,7 +392,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotMistralEmbeddingsModel() throws I var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); - when(factory.createSender(anyString())).thenReturn(sender); + when(factory.createSender()).thenReturn(sender); var mockModel = getInvalidModel("model_id", "service_name"); @@ -415,7 +414,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotMistralEmbeddingsModel() throws I is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") ); - verify(factory, times(1)).createSender(anyString()); + verify(factory, times(1)).createSender(); verify(sender, times(1)).start(); } @@ -477,14 +476,14 @@ public void testChunkedInfer_Embeddings_CallsInfer_ConvertsFloatResponse() throw assertThat(results, hasSize(2)); { - assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(results.get(0), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); assertTrue(Arrays.equals(new float[] { 0.123f, -0.123f }, floatResult.chunks().get(0).embedding())); } { - assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(results.get(1), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); assertTrue(Arrays.equals(new float[] { 0.223f, -0.223f }, floatResult.chunks().get(0).embedding())); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/embeddings/MistralEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/embeddings/MistralEmbeddingsServiceSettingsTests.java index 13f43a5f31ad3..076986acdcee6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/embeddings/MistralEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/embeddings/MistralEmbeddingsServiceSettingsTests.java @@ -98,18 +98,6 @@ public void testToXContent_WritesAllValues() throws IOException { "rate_limit":{"requests_per_minute":3}}""")); } - public void testToFilteredXContent_WritesFilteredValues() throws IOException { - var entity = new MistralEmbeddingsServiceSettings("model_name", 1024, 512, null, new RateLimitSettings(3)); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - var filteredXContent = entity.getFilteredXContentObject(); - filteredXContent.toXContent(builder, null); - String xContentResult = Strings.toString(builder); - - assertThat(xContentResult, CoreMatchers.is(""" - {"model":"model_name","dimensions":1024,"max_input_tokens":512}""")); - } - public void testStreamInputAndOutput_WritesValuesCorrectly() throws IOException { var outputBuffer = new BytesStreamOutput(); var settings = new MistralEmbeddingsServiceSettings("model_name", 1024, 512, null, new RateLimitSettings(3)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index cbac29c452772..e0e1ee3e81aef 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -72,7 +72,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -675,7 +674,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotOpenAiModel() throws IOException var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); - when(factory.createSender(anyString())).thenReturn(sender); + when(factory.createSender()).thenReturn(sender); var mockModel = getInvalidModel("model_id", "service_name"); @@ -697,7 +696,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotOpenAiModel() throws IOException is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") ); - verify(factory, times(1)).createSender(anyString()); + verify(factory, times(1)).createSender(); verify(sender, times(1)).start(); } @@ -1263,15 +1262,15 @@ public void testChunkedInfer_Batches() throws IOException { var results = listener.actionGet(TIMEOUT); assertThat(results, hasSize(2)); { - assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(results.get(0), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); assertEquals("foo", floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.123f, -0.123f }, floatResult.chunks().get(0).embedding())); } { - assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); - var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(results.get(1), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); assertEquals("bar", floatResult.chunks().get(0).matchedText()); assertTrue(Arrays.equals(new float[] { 0.223f, -0.223f }, floatResult.chunks().get(0).embedding())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java index 186ca89426418..051a9bc6d9bef 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; @@ -48,7 +49,8 @@ public void testFromMap_Request_CreatesSettingsCorrectly() { ServiceFields.MAX_INPUT_TOKENS, maxInputTokens ) - ) + ), + ConfigurationParseContext.PERSISTENT ); assertThat( @@ -77,7 +79,8 @@ public void testFromMap_Request_CreatesSettingsCorrectly_WithRateLimit() { RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, rateLimit)) ) - ) + ), + ConfigurationParseContext.PERSISTENT ); assertThat( @@ -101,7 +104,8 @@ public void testFromMap_MissingUrl_DoesNotThrowException() { ServiceFields.MAX_INPUT_TOKENS, maxInputTokens ) - ) + ), + ConfigurationParseContext.PERSISTENT ); assertNull(serviceSettings.uri()); @@ -113,7 +117,10 @@ public void testFromMap_MissingUrl_DoesNotThrowException() { public void testFromMap_EmptyUrl_ThrowsError() { var thrownException = expectThrows( ValidationException.class, - () -> OpenAiChatCompletionServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, "", ServiceFields.MODEL_ID, "model"))) + () -> OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>(Map.of(ServiceFields.URL, "", ServiceFields.MODEL_ID, "model")), + ConfigurationParseContext.PERSISTENT + ) ); assertThat( @@ -132,7 +139,8 @@ public void testFromMap_MissingOrganization_DoesNotThrowException() { var maxInputTokens = 8192; var serviceSettings = OpenAiChatCompletionServiceSettings.fromMap( - new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId, ServiceFields.MAX_INPUT_TOKENS, maxInputTokens)) + new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId, ServiceFields.MAX_INPUT_TOKENS, maxInputTokens)), + ConfigurationParseContext.PERSISTENT ); assertNull(serviceSettings.uri()); @@ -144,7 +152,8 @@ public void testFromMap_EmptyOrganization_ThrowsError() { var thrownException = expectThrows( ValidationException.class, () -> OpenAiChatCompletionServiceSettings.fromMap( - new HashMap<>(Map.of(OpenAiServiceFields.ORGANIZATION, "", ServiceFields.MODEL_ID, "model")) + new HashMap<>(Map.of(OpenAiServiceFields.ORGANIZATION, "", ServiceFields.MODEL_ID, "model")), + ConfigurationParseContext.PERSISTENT ) ); @@ -164,7 +173,8 @@ public void testFromMap_InvalidUrl_ThrowsError() { var thrownException = expectThrows( ValidationException.class, () -> OpenAiChatCompletionServiceSettings.fromMap( - new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.MODEL_ID, "model")) + new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.MODEL_ID, "model")), + ConfigurationParseContext.PERSISTENT ) ); @@ -213,19 +223,6 @@ public void testToXContent_DoesNotWriteOptionalValues() throws IOException { {"model_id":"model","rate_limit":{"requests_per_minute":500}}""")); } - public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { - var serviceSettings = new OpenAiChatCompletionServiceSettings("model", "url", "org", 1024, new RateLimitSettings(2)); - - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - var filteredXContent = serviceSettings.getFilteredXContentObject(); - filteredXContent.toXContent(builder, null); - String xContentResult = org.elasticsearch.common.Strings.toString(builder); - - assertThat(xContentResult, is(""" - {"model_id":"model","url":"url","organization_id":"org",""" + """ - "max_input_tokens":1024}""")); - } - @Override protected Writeable.Reader instanceReader() { return OpenAiChatCompletionServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java index 438f895fe48ad..cc0004a2d678c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java @@ -406,7 +406,7 @@ public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() t assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ - "dimensions":1,"max_input_tokens":2}""")); + "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":3000}}""")); } public void testToFilteredXContent_WritesAllValues_WithSpecifiedRateLimit() throws IOException { @@ -428,7 +428,7 @@ public void testToFilteredXContent_WritesAllValues_WithSpecifiedRateLimit() thro assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ - "dimensions":1,"max_input_tokens":2}""")); + "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":2000}}""")); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettingsTests.java index cdee7c452ff52..7e3bdd6b8e5dc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettingsTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.settings; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.Writeable; @@ -14,6 +15,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import java.io.IOException; import java.util.HashMap; @@ -49,7 +51,7 @@ public void testOf() { Map settings = new HashMap<>( Map.of(RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, 100))) ); - var res = RateLimitSettings.of(settings, new RateLimitSettings(1), validation); + var res = RateLimitSettings.of(settings, new RateLimitSettings(1), validation, "test", ConfigurationParseContext.PERSISTENT); assertThat(res, is(new RateLimitSettings(100))); assertTrue(validation.validationErrors().isEmpty()); @@ -60,7 +62,7 @@ public void testOf_UsesDefaultValue_WhenRateLimit_IsAbsent() { Map settings = new HashMap<>( Map.of("abc", new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, 100))) ); - var res = RateLimitSettings.of(settings, new RateLimitSettings(1), validation); + var res = RateLimitSettings.of(settings, new RateLimitSettings(1), validation, "test", ConfigurationParseContext.PERSISTENT); assertThat(res, is(new RateLimitSettings(1))); assertTrue(validation.validationErrors().isEmpty()); @@ -69,12 +71,24 @@ public void testOf_UsesDefaultValue_WhenRateLimit_IsAbsent() { public void testOf_UsesDefaultValue_WhenRequestsPerMinute_IsAbsent() { var validation = new ValidationException(); Map settings = new HashMap<>(Map.of(RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of("abc", 100)))); - var res = RateLimitSettings.of(settings, new RateLimitSettings(1), validation); + var res = RateLimitSettings.of(settings, new RateLimitSettings(1), validation, "test", ConfigurationParseContext.PERSISTENT); assertThat(res, is(new RateLimitSettings(1))); assertTrue(validation.validationErrors().isEmpty()); } + public void testOf_ThrowsException_WithUnknownField_InRequestContext() { + var validation = new ValidationException(); + Map settings = new HashMap<>(Map.of(RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of("abc", 100)))); + + var exception = expectThrows( + ElasticsearchStatusException.class, + () -> RateLimitSettings.of(settings, new RateLimitSettings(1), validation, "test", ConfigurationParseContext.REQUEST) + ); + + assertThat(exception.getMessage(), is("Model configuration contains settings [{abc=100}] unknown to the [test] service")); + } + public void testToXContent() throws IOException { var settings = new RateLimitSettings(100); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index d54cac9dca496..004d87d643962 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -290,6 +290,7 @@ private void inferAgainstAllocatedModel( deploymentRequest.setPrefixType(request.getPrefixType()); deploymentRequest.setNodes(node.v1()); deploymentRequest.setParentTask(parentTaskId); + deploymentRequest.setChunkResults(request.isChunked()); startPos += node.v2(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java index 22d9294783e7c..6b6ab43e10c58 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.ml.inference.nlp; import org.elasticsearch.inference.InferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; @@ -60,22 +60,24 @@ static InferenceResults processResult( boolean chunkResults ) { if (chunkResults) { - var embeddings = new ArrayList(); + var embeddings = new ArrayList(); for (int i = 0; i < pyTorchResult.getInferenceResult()[0].length; i++) { int startOffset = tokenization.getTokenization(i).tokens().get(0).get(0).startOffset(); int lastIndex = tokenization.getTokenization(i).tokens().get(0).size() - 1; int endOffset = tokenization.getTokenization(i).tokens().get(0).get(lastIndex).endOffset(); String matchedText = tokenization.getTokenization(i).input().get(0).substring(startOffset, endOffset); - embeddings.add(new ChunkedTextEmbeddingResults.EmbeddingChunk(matchedText, pyTorchResult.getInferenceResult()[0][i])); + embeddings.add( + new MlChunkedTextEmbeddingFloatResults.EmbeddingChunk(matchedText, pyTorchResult.getInferenceResult()[0][i]) + ); } - return new ChunkedTextEmbeddingResults( + return new MlChunkedTextEmbeddingFloatResults( Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD), embeddings, tokenization.anyTruncated() ); } else { - return new TextEmbeddingResults( + return new MlTextEmbeddingResults( Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD), pyTorchResult.getInferenceResult()[0][0], tokenization.anyTruncated() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessor.java index 1b44614bf4a2b..603abe6394b93 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessor.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.inference.nlp; import org.elasticsearch.inference.InferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; import org.elasticsearch.xpack.core.ml.search.WeightedToken; @@ -72,7 +72,7 @@ static InferenceResults processResult( boolean chunkResults ) { if (chunkResults) { - var chunkedResults = new ArrayList(); + var chunkedResults = new ArrayList(); for (int i = 0; i < pyTorchResult.getInferenceResult()[0].length; i++) { int startOffset = tokenization.getTokenization(i).tokens().get(0).get(0).startOffset(); @@ -82,10 +82,10 @@ static InferenceResults processResult( var weightedTokens = sparseVectorToTokenWeights(pyTorchResult.getInferenceResult()[0][i], tokenization, replacementVocab); weightedTokens.sort((t1, t2) -> Float.compare(t2.weight(), t1.weight())); - chunkedResults.add(new ChunkedTextExpansionResults.ChunkedResult(matchedText, weightedTokens)); + chunkedResults.add(new InferenceChunkedTextExpansionResults.ChunkedResult(matchedText, weightedTokens)); } - return new ChunkedTextExpansionResults( + return new InferenceChunkedTextExpansionResults( Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD), chunkedResults, tokenization.anyTruncated() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java index 82d19f9d72273..83572b02f754d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java @@ -325,7 +325,7 @@ private static boolean isIrrecoverable(Exception ex) { } @SuppressWarnings("NonAtomicOperationOnVolatileField") - private static class BulkRequestRewriter { + static class BulkRequestRewriter { private volatile BulkRequest bulkRequest; BulkRequestRewriter(BulkRequest initialRequest) { @@ -533,7 +533,7 @@ public void cancel(Exception e) { } } - private static BulkRequest buildNewRequestFromFailures(BulkRequest bulkRequest, BulkResponse bulkResponse) { + static BulkRequest buildNewRequestFromFailures(BulkRequest bulkRequest, BulkResponse bulkResponse) { // If we failed, lets set the bulkRequest to be a collection of the failed requests BulkRequest bulkRequestOfFailures = new BulkRequest(); Set failedDocIds = Arrays.stream(bulkResponse.getItems()) @@ -542,6 +542,9 @@ private static BulkRequest buildNewRequestFromFailures(BulkRequest bulkRequest, .collect(Collectors.toSet()); bulkRequest.requests().forEach(docWriteRequest -> { if (failedDocIds.contains(docWriteRequest.id())) { + if (docWriteRequest instanceof IndexRequest ir) { + ir.reset(); + } bulkRequestOfFailures.add(docWriteRequest); } }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java index bd0916065ec5f..6c7d9ef1b8a1c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java @@ -22,7 +22,7 @@ import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; @@ -110,14 +110,14 @@ public void buildVector(Client client, ActionListener listener) { return; } - if (response.getInferenceResults().get(0) instanceof TextEmbeddingResults textEmbeddingResults) { + if (response.getInferenceResults().get(0) instanceof MlTextEmbeddingResults textEmbeddingResults) { listener.onResponse(textEmbeddingResults.getInferenceAsFloat()); } else if (response.getInferenceResults().get(0) instanceof WarningInferenceResults warning) { listener.onFailure(new IllegalStateException(warning.getWarning())); } else { throw new IllegalStateException( "expected a result of type [" - + TextEmbeddingResults.NAME + + MlTextEmbeddingResults.NAME + "] received [" + response.getInferenceResults().get(0).getWriteableName() + "]. Is [" diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessorTests.java index ba93feee5c42c..bba2844784117 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessorTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.ml.inference.nlp; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizationResult; @@ -35,9 +35,9 @@ public void testSingleResult() { var tokenization = tokenizer.tokenize(input, Tokenization.Truncate.NONE, 0, 0, null); var tokenizationResult = new BertTokenizationResult(TextExpansionProcessorTests.TEST_CASED_VOCAB, tokenization, 0); var inferenceResult = TextEmbeddingProcessor.processResult(tokenizationResult, pytorchResult, "foo", false); - assertThat(inferenceResult, instanceOf(TextEmbeddingResults.class)); + assertThat(inferenceResult, instanceOf(MlTextEmbeddingResults.class)); - var result = (TextEmbeddingResults) inferenceResult; + var result = (MlTextEmbeddingResults) inferenceResult; assertThat(result.getInference().length, greaterThan(0)); } } @@ -57,9 +57,9 @@ public void testChunking() { var tokenization = tokenizer.tokenize(input, Tokenization.Truncate.NONE, 0, 0, null); var tokenizationResult = new BertTokenizationResult(TextExpansionProcessorTests.TEST_CASED_VOCAB, tokenization, 0); var inferenceResult = TextEmbeddingProcessor.processResult(tokenizationResult, pytorchResult, "foo", true); - assertThat(inferenceResult, instanceOf(ChunkedTextEmbeddingResults.class)); + assertThat(inferenceResult, instanceOf(MlChunkedTextEmbeddingFloatResults.class)); - var chunkedResult = (ChunkedTextEmbeddingResults) inferenceResult; + var chunkedResult = (MlChunkedTextEmbeddingFloatResults) inferenceResult; assertThat(chunkedResult.getChunks(), hasSize(2)); assertEquals("Elasticsearch darts champion little red", chunkedResult.getChunks().get(0).matchedText()); assertEquals("is fun car", chunkedResult.getChunks().get(1).matchedText()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessorTests.java index 9f2d8bcbfe4ab..add071b0a0de0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextExpansionProcessorTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.inference.nlp; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfig; @@ -137,9 +137,9 @@ public void testChunking() { var tokenization = tokenizer.tokenize(input, Tokenization.Truncate.NONE, 0, 0, null); var tokenizationResult = new BertTokenizationResult(TEST_CASED_VOCAB, tokenization, 0); var inferenceResult = TextExpansionProcessor.processResult(tokenizationResult, pytorchResult, Map.of(), "foo", true); - assertThat(inferenceResult, instanceOf(ChunkedTextExpansionResults.class)); + assertThat(inferenceResult, instanceOf(InferenceChunkedTextExpansionResults.class)); - var chunkedResult = (ChunkedTextExpansionResults) inferenceResult; + var chunkedResult = (InferenceChunkedTextExpansionResults) inferenceResult; assertThat(chunkedResult.getChunks(), hasSize(2)); assertEquals("Elasticsearch darts champion little red", chunkedResult.getChunks().get(0).matchedText()); assertEquals("is fun car", chunkedResult.getChunks().get(1).matchedText()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java index 2acf2e3da3cf6..e109f2995d215 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java @@ -60,6 +60,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; @@ -375,6 +376,34 @@ public void testBulkRequestRetriesMsgHandlerIsCalled() { assertThat(lastMessage.get(), containsString("failed to index after [1] attempts. Will attempt again")); } + public void testBuildNewRequestFromFailures_resetsId() { + var bulkRequest = new BulkRequest(); + var indexRequestAutoGeneratedId = new IndexRequest("index-foo"); + indexRequestAutoGeneratedId.autoGenerateId(); + var autoGenId = indexRequestAutoGeneratedId.id(); + var plainIndexRequest = new IndexRequest("index-foo2").id("id-set"); + + bulkRequest.add(indexRequestAutoGeneratedId); + bulkRequest.add(plainIndexRequest); + + var bulkResponse = mock(BulkResponse.class); + + var failed = mock(BulkItemResponse.class); + when(failed.isFailed()).thenReturn(Boolean.TRUE); + when(failed.getId()).thenReturn(autoGenId); + + var sucessful = mock(BulkItemResponse.class); + when(sucessful.isFailed()).thenReturn(Boolean.FALSE); + + when(bulkResponse.getItems()).thenReturn(new BulkItemResponse[] { failed, sucessful }); + + var modifiedRequestForRetry = ResultsPersisterService.buildNewRequestFromFailures(bulkRequest, bulkResponse); + assertThat(modifiedRequestForRetry.requests(), hasSize(1)); // only the failed item is in the new request + assertThat(modifiedRequestForRetry.requests().get(0), instanceOf(IndexRequest.class)); + var ir = (IndexRequest) modifiedRequestForRetry.requests().get(0); + assertEquals(ir.getAutoGeneratedTimestamp(), -1L); // failed request was reset + } + private static Stubber doAnswerWithResponses(Response response1, Response response2) { return doAnswer(withResponse(response1)).doAnswer(withResponse(response2)); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java index 8575c7e1f4bf3..7721c4c23953b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; -import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.ml.MachineLearningTests; import java.io.IOException; @@ -51,7 +51,7 @@ public ActionResponse createResponse(float[] array, TextEmbeddingQueryVectorBuil embedding[i] = array[i]; } return new InferModelAction.Response( - List.of(new TextEmbeddingResults("foo", embedding, randomBoolean())), + List.of(new MlTextEmbeddingResults("foo", embedding, randomBoolean())), builder.getModelId(), true ); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java index 2ccdd66089c79..b0fad3da6b036 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java @@ -171,7 +171,7 @@ protected final ExecutionInfo executeWithInfo(TreeType plan) { if (tf.hasChanged()) { hasChanged = true; if (log.isTraceEnabled()) { - log.trace("Rule {} applied\n{}", rule, NodeUtils.diffString(tf.before, tf.after)); + log.trace("Rule {} applied with changes\n{}", rule, NodeUtils.diffString(tf.before, tf.after)); } } else { if (log.isTraceEnabled()) { diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java index af465658a0b52..b501967524a6b 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java @@ -29,7 +29,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; -@ESIntegTestCase.ClusterScope(maxNumDataNodes = 3) +@ESIntegTestCase.ClusterScope(minNumDataNodes = 2, maxNumDataNodes = 4) @ESIntegTestCase.SuiteScopeTestCase public class RRFRankMultiShardIT extends ESIntegTestCase { @@ -973,4 +973,234 @@ public void testMultiBM25AndMultipleKnnWithAggregation() { } ); } + + public void testBasicRRFExplain() { + // our query here is a top-level knn query for vector [9] and a term query for "text0: 10" + // the first result should be the one present in both queries (i.e. doc with text0: 10 and vector: [10]) and the other ones + // should only match the knn query + float[] queryVector = { 9f }; + KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector_asc", queryVector, 101, 1001, null).queryName("my_knn_search"); + assertResponse( + prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(100, 1)) + .setKnnSearch(List.of(knnSearch)) + .setQuery(QueryBuilders.termQuery("text0", "10")) + .setExplain(true) + .setSize(3), + response -> { + // we cast to Number when looking at values in vector fields because different xContentTypes may return Float or Double + assertEquals(3, response.getHits().getHits().length); + + // first result is the one which matches the term (10) so we should expect an explanation for both queries + SearchHit hit = response.getHits().getAt(0); + assertEquals(1, hit.getRank()); + assertTrue(hit.getExplanation().isMatch()); + assertTrue(hit.getExplanation().getDescription().contains("initial ranks")); + assertEquals(2, hit.getExplanation().getDetails().length); + assertTrue(hit.getExplanation().getDetails()[0].isMatch()); + assertEquals(1, hit.getExplanation().getDetails()[0].getValue().intValue()); + assertTrue(hit.getExplanation().getDetails()[0].getDescription().contains("query at index [0]")); + assertTrue(hit.getExplanation().getDetails()[0].getDetails().length > 0); + assertTrue(hit.getExplanation().getDetails()[1].isMatch()); + assertTrue(hit.getExplanation().getDetails()[1].getDescription().contains("[my_knn_search]")); + assertTrue(hit.getExplanation().getDetails()[1].getDetails().length > 0); + + // second result matched only on the knn query so no match should be expected for the term query + hit = response.getHits().getAt(1); + assertEquals(2, hit.getRank()); + assertTrue(hit.getExplanation().isMatch()); + assertTrue(hit.getExplanation().getDescription().contains("initial ranks")); + assertEquals(2, hit.getExplanation().getDetails().length); + assertFalse(hit.getExplanation().getDetails()[0].isMatch()); + assertEquals(0, hit.getExplanation().getDetails()[0].getValue().intValue()); + assertEquals( + "rrf score: [0], result not found in query at index [0]", + hit.getExplanation().getDetails()[0].getDescription() + ); + assertEquals(0, hit.getExplanation().getDetails()[0].getDetails().length); + assertTrue(hit.getExplanation().getDetails()[1].isMatch()); + assertTrue(hit.getExplanation().getDetails()[1].getDescription().contains("[my_knn_search]")); + assertTrue(hit.getExplanation().getDetails()[1].getDetails().length > 0); + + // third result matched only on the knn query so no match should be expected for the term query + hit = response.getHits().getAt(2); + assertEquals(3, hit.getRank()); + assertTrue(hit.getExplanation().isMatch()); + assertTrue(hit.getExplanation().getDescription().contains("initial ranks")); + assertEquals(2, hit.getExplanation().getDetails().length); + assertFalse(hit.getExplanation().getDetails()[0].isMatch()); + assertEquals(0, hit.getExplanation().getDetails()[0].getValue().intValue()); + assertEquals( + "rrf score: [0], result not found in query at index [0]", + hit.getExplanation().getDetails()[0].getDescription() + ); + assertEquals(0, hit.getExplanation().getDetails()[0].getDetails().length); + assertTrue(hit.getExplanation().getDetails()[1].isMatch()); + assertTrue(hit.getExplanation().getDetails()[1].getDescription().contains("[my_knn_search]")); + assertTrue(hit.getExplanation().getDetails()[1].getDetails().length > 0); + } + ); + } + + public void testRRFExplainUnknownField() { + // in this test we try knn with a query on an unknown field that would be rewritten to MatchNoneQuery + // so we expect results and explanations only for the first part + float[] queryVector = { 9f }; + KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector_asc", queryVector, 101, 1001, null).queryName("my_knn_search"); + assertResponse( + prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(100, 1)) + .setKnnSearch(List.of(knnSearch)) + .setQuery(QueryBuilders.termQuery("unknown_field", "10")) + .setExplain(true) + .setSize(3), + response -> { + // we cast to Number when looking at values in vector fields because different xContentTypes may return Float or Double + assertEquals(3, response.getHits().getHits().length); + + SearchHit hit = response.getHits().getAt(0); + assertEquals(1, hit.getRank()); + assertTrue(hit.getExplanation().isMatch()); + assertTrue(hit.getExplanation().getDescription().contains("initial ranks")); + assertEquals(2, hit.getExplanation().getDetails().length); + assertFalse(hit.getExplanation().getDetails()[0].isMatch()); + assertEquals(0, hit.getExplanation().getDetails()[0].getValue().intValue()); + assertEquals( + "rrf score: [0], result not found in query at index [0]", + hit.getExplanation().getDetails()[0].getDescription() + ); + assertEquals(0, hit.getExplanation().getDetails()[0].getDetails().length); + assertTrue(hit.getExplanation().getDetails()[1].isMatch()); + assertTrue(hit.getExplanation().getDetails()[1].getDescription().contains("[my_knn_search]")); + assertTrue(hit.getExplanation().getDetails()[1].getDetails().length > 0); + + hit = response.getHits().getAt(1); + assertEquals(2, hit.getRank()); + assertTrue(hit.getExplanation().isMatch()); + assertTrue(hit.getExplanation().getDescription().contains("initial ranks")); + assertEquals(2, hit.getExplanation().getDetails().length); + assertFalse(hit.getExplanation().getDetails()[0].isMatch()); + assertEquals(0, hit.getExplanation().getDetails()[0].getValue().intValue()); + assertEquals( + "rrf score: [0], result not found in query at index [0]", + hit.getExplanation().getDetails()[0].getDescription() + ); + assertEquals(0, hit.getExplanation().getDetails()[0].getDetails().length); + assertTrue(hit.getExplanation().getDetails()[1].isMatch()); + assertTrue(hit.getExplanation().getDetails()[1].getDescription().contains("[my_knn_search]")); + assertTrue(hit.getExplanation().getDetails()[1].getDetails().length > 0); + + hit = response.getHits().getAt(2); + assertEquals(3, hit.getRank()); + assertTrue(hit.getExplanation().isMatch()); + assertTrue(hit.getExplanation().getDescription().contains("initial ranks")); + assertEquals(2, hit.getExplanation().getDetails().length); + assertFalse(hit.getExplanation().getDetails()[0].isMatch()); + assertEquals(0, hit.getExplanation().getDetails()[0].getValue().intValue()); + assertEquals( + "rrf score: [0], result not found in query at index [0]", + hit.getExplanation().getDetails()[0].getDescription() + ); + assertEquals(0, hit.getExplanation().getDetails()[0].getDetails().length, 0); + assertTrue(hit.getExplanation().getDetails()[1].isMatch()); + assertTrue(hit.getExplanation().getDetails()[1].getDescription().contains("[my_knn_search]")); + assertTrue(hit.getExplanation().getDetails()[1].getDetails().length > 0); + } + ); + } + + public void testRRFExplainOneUnknownFieldSubSearches() { + // this test is similar to the above with the difference that we have a list of subsearches that one would fail, + // while the other one would produce a match. + // So, we'd have a total of 3 queries, a (rewritten) MatchNoneQuery, a TermQuery, and a kNN query + float[] queryVector = { 9f }; + KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector_asc", queryVector, 101, 1001, null).queryName("my_knn_search"); + assertResponse( + prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(100, 1)) + .setKnnSearch(List.of(knnSearch)) + .setSubSearches( + List.of( + new SubSearchSourceBuilder(QueryBuilders.termQuery("unknown_field", "10")), + new SubSearchSourceBuilder(QueryBuilders.termQuery("text0", "10")) + ) + ) + .setExplain(true) + .setSize(3), + response -> { + // we cast to Number when looking at values in vector fields because different xContentTypes may return Float or Double + assertEquals(3, response.getHits().getHits().length); + + // first result is the one which matches the term (10) and is 3rd closest to our query vector (9) + SearchHit hit = response.getHits().getAt(0); + assertEquals(1, hit.getRank()); + assertTrue(hit.getExplanation().isMatch()); + assertTrue(hit.getExplanation().getDescription().contains("initial ranks")); + assertEquals(3, hit.getExplanation().getDetails().length); + // MatchNone query + assertFalse(hit.getExplanation().getDetails()[0].isMatch()); + assertEquals(0, hit.getExplanation().getDetails()[0].getValue().intValue()); + assertEquals( + "rrf score: [0], result not found in query at index [0]", + hit.getExplanation().getDetails()[0].getDescription() + ); + assertEquals(0, hit.getExplanation().getDetails()[0].getDetails().length); + // Term query + assertTrue(hit.getExplanation().getDetails()[1].isMatch()); + assertTrue(hit.getExplanation().getDetails()[1].getDescription().contains("query at index [1]")); + assertTrue(hit.getExplanation().getDetails()[1].getDetails().length > 0); + // knn query + assertTrue(hit.getExplanation().getDetails()[2].isMatch()); + assertTrue(hit.getExplanation().getDetails()[2].getDescription().contains("[my_knn_search]")); + assertTrue(hit.getExplanation().getDetails()[2].getDetails().length > 0); + + // rest of hits match only on the knn query so no match should be expected for the term query either + hit = response.getHits().getAt(1); + assertEquals(2, hit.getRank()); + assertTrue(hit.getExplanation().isMatch()); + assertTrue(hit.getExplanation().getDescription().contains("initial ranks")); + assertEquals(3, hit.getExplanation().getDetails().length); + // MatchNone query + assertFalse(hit.getExplanation().getDetails()[0].isMatch()); + assertEquals(0, hit.getExplanation().getDetails()[0].getValue().intValue()); + assertEquals( + "rrf score: [0], result not found in query at index [0]", + hit.getExplanation().getDetails()[0].getDescription() + ); + // term query - should not match + assertFalse(hit.getExplanation().getDetails()[1].isMatch()); + assertEquals( + "rrf score: [0], result not found in query at index [1]", + hit.getExplanation().getDetails()[1].getDescription() + ); + assertEquals(0, hit.getExplanation().getDetails()[1].getDetails().length); + // knn query + assertTrue(hit.getExplanation().getDetails()[2].isMatch()); + assertTrue(hit.getExplanation().getDetails()[2].getDescription().contains("[my_knn_search]")); + assertTrue(hit.getExplanation().getDetails()[2].getDetails().length > 0); + + // rest of hits match only on the knn query so no match should be expected for the term query either + hit = response.getHits().getAt(2); + assertEquals(3, hit.getRank()); + assertTrue(hit.getExplanation().isMatch()); + assertTrue(hit.getExplanation().getDescription().contains("initial ranks")); + assertEquals(3, hit.getExplanation().getDetails().length); + // MatchNone query + assertFalse(hit.getExplanation().getDetails()[0].isMatch()); + assertEquals(0, hit.getExplanation().getDetails()[0].getValue().intValue()); + assertEquals( + "rrf score: [0], result not found in query at index [0]", + hit.getExplanation().getDetails()[0].getDescription() + ); + // term query - should not match + assertFalse(hit.getExplanation().getDetails()[1].isMatch()); + assertEquals( + "rrf score: [0], result not found in query at index [1]", + hit.getExplanation().getDetails()[1].getDescription() + ); + assertEquals(0, hit.getExplanation().getDetails()[1].getDetails().length); + // knn query + assertTrue(hit.getExplanation().getDetails()[2].isMatch()); + assertTrue(hit.getExplanation().getDetails()[2].getDescription().contains("[my_knn_search]")); + assertTrue(hit.getExplanation().getDetails()[2].getDetails().length > 0); + } + ); + } } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java index 8f3ed15037c08..dfd47f122cb61 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.rank.rrf; +import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; @@ -14,8 +15,11 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.search.rank.RankBuilder; +import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,6 +27,7 @@ import org.elasticsearch.xpack.core.XPackPlugin; import java.io.IOException; +import java.util.Arrays; import java.util.List; import java.util.Objects; @@ -38,7 +43,7 @@ public class RRFRankBuilder extends RankBuilder { public static final ParseField RANK_CONSTANT_FIELD = new ParseField("rank_constant"); static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(RRFRankPlugin.NAME, args -> { - int windowSize = args[0] == null ? DEFAULT_WINDOW_SIZE : (int) args[0]; + int windowSize = args[0] == null ? DEFAULT_RANK_WINDOW_SIZE : (int) args[0]; int rankConstant = args[1] == null ? DEFAULT_RANK_CONSTANT : (int) args[1]; if (rankConstant < 1) { throw new IllegalArgumentException("[rank_constant] must be greater than [0] for [rrf]"); @@ -94,6 +99,67 @@ public int rankConstant() { return rankConstant; } + @Override + public boolean isCompoundBuilder() { + return true; + } + + @Override + public Explanation explainHit(Explanation baseExplanation, RankDoc rankDoc, List queryNames) { + if (rankDoc == null) { + return baseExplanation; + } + if (false == baseExplanation.isMatch()) { + return baseExplanation; + } + final Explanation hitExplanation = baseExplanation.getDetails().length == 0 + ? Explanation.match(baseExplanation.getValue(), baseExplanation.getDescription(), baseExplanation) + : baseExplanation; + + assert rankDoc instanceof RRFRankDoc : "ScoreDoc is not an instance of RRFRankDoc"; + RRFRankDoc rrfRankDoc = (RRFRankDoc) rankDoc; + int queries = rrfRankDoc.positions.length; + assert queryNames.size() == queries; + Explanation[] details = new Explanation[queries]; + int queryExplainIndex = 0; + for (int i = 0; i < queries; i++) { + final String queryName = queryNames.get(i) != null ? "[" + queryNames.get(i) + "]" : "at index [" + i + "]"; + if (rrfRankDoc.positions[i] == RRFRankDoc.NO_RANK) { + final String description = "rrf score: [0], result not found in query " + queryName; + details[i] = Explanation.noMatch(description); + } else { + final int rank = rrfRankDoc.positions[i] + 1; + details[i] = Explanation.match( + rank, + "rrf score: [" + + (1f / (rank + rankConstant)) + + "], " + + "for rank [" + + (rank) + + "] in query " + + queryName + + " computed as [1 / (" + + (rank) + + " + " + + rankConstant + + "]), for matching query with score: ", + hitExplanation.getDetails()[queryExplainIndex++] + ); + } + } + return Explanation.match( + rrfRankDoc.score, + "rrf score: [" + + rrfRankDoc.score + + "] computed for initial ranks " + + Arrays.toString(Arrays.stream(rrfRankDoc.positions).map(x -> x + 1).toArray()) + + " with rankConstant: [" + + rankConstant + + "] as sum of [1 / (rank + rankConstant)] for each query", + details + ); + } + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { return new RRFQueryPhaseRankShardContext(queries, rankWindowSize(), rankConstant); } @@ -103,6 +169,16 @@ public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int si return new RRFQueryPhaseRankCoordinatorContext(size, from, rankWindowSize(), rankConstant); } + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + return null; + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from) { + return null; + } + @Override protected boolean doEquals(RankBuilder other) { return Objects.equals(rankConstant, ((RRFRankBuilder) other).rankConstant); diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java index 37bac43f827cf..8f078c0c4d116 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java @@ -20,6 +20,8 @@ */ public class RRFRankDoc extends RankDoc { + static final String NAME = "rrf_rank_doc"; + /** * The position within each result set per query. The length * of {@code positions} is the number of queries that are part @@ -89,4 +91,9 @@ public String toString() { + shardIndex + '}'; } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java index 4d7c60f00ec1c..ece08d1a3d558 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.rank.RankBuilder; +import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.rank.RankShardResult; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -33,7 +34,8 @@ public class RRFRankPlugin extends Plugin implements SearchPlugin { public List getNamedWriteables() { return List.of( new NamedWriteableRegistry.Entry(RankBuilder.class, NAME, RRFRankBuilder::new), - new NamedWriteableRegistry.Entry(RankShardResult.class, NAME, RRFRankShardResult::new) + new NamedWriteableRegistry.Entry(RankShardResult.class, NAME, RRFRankShardResult::new), + new NamedWriteableRegistry.Entry(RankDoc.class, RRFRankDoc.NAME, RRFRankDoc::new) ); } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java index 077c933fa9add..e5a7983107278 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java @@ -71,7 +71,7 @@ public static RRFRetrieverBuilder fromXContent(XContentParser parser, RetrieverP } List retrieverBuilders = Collections.emptyList(); - int rankWindowSize = RRFRankBuilder.DEFAULT_WINDOW_SIZE; + int rankWindowSize = RRFRankBuilder.DEFAULT_RANK_WINDOW_SIZE; int rankConstant = RRFRankBuilder.DEFAULT_RANK_CONSTANT; @Override diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/500_rrf_retriever_explain.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/500_rrf_retriever_explain.yml new file mode 100644 index 0000000000000..8d74ecbccd328 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/500_rrf_retriever_explain.yml @@ -0,0 +1,275 @@ +setup: + - requires: + cluster_features: "gte_v8.15.0" + reason: 'explain for rrf was added in 8.15' + test_runner_features: close_to + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + text: + type: text + integer: + type: integer + vector: + type: dense_vector + dims: 1 + index: true + similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 + + - do: + index: + index: test + id: "1" + body: + text: "term" + integer: 1 + vector: [5] + + - do: + index: + index: test + id: "2" + body: + text: "term term" + integer: 2 + vector: [4] + + - do: + index: + index: test + id: "3" + body: + text: "term term term" + integer: 3 + vector: [3] + - do: + index: + index: test + id: "4" + body: + text: "term term term term" + integer: 3 + + - do: + index: + index: test + id: "5" + body: + integer: 1 + vector: [0] + + - do: + indices.refresh: {} + +--- +"using rrf retriever": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + retriever: + rrf: + retrievers: [ + { + standard: { + query: { + term: { + text: "term" + } + } + } + }, + { + knn: { + field: "vector", + query_vector: [ 3 ], + num_candidates: 5, + k: 5 + } + } + ] + rank_window_size: 5 + rank_constant: 1 + size: 3 + explain: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - close_to: { hits.hits.0._explanation.value: { value: 0.8333334, error: 0.000001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.value: 2} + - match: {hits.hits.0._explanation.details.0.description: "/rrf.score:.\\[0.33333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.0._explanation.details.1.value: 1} + - match: {hits.hits.0._explanation.details.1.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.0._explanation.details.1.details.0.description: "/within.top.*/" } + + - close_to: { hits.hits.1._explanation.value: { value: 0.5833334, error: 0.000001 } } + - match: {hits.hits.1._explanation.description: "/rrf.score:.\\[0.5833334\\].*/" } + - match: {hits.hits.1._explanation.details.0.value: 3} + - match: {hits.hits.1._explanation.details.0.description: "/rrf.score:.\\[0.25\\].*/" } + - match: {hits.hits.1._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.1._explanation.details.1.value: 2} + - match: {hits.hits.1._explanation.details.1.description: "/rrf.score:.\\[0.33333334\\].*/" } + - match: {hits.hits.1._explanation.details.1.details.0.description: "/within.top.*/" } + + - match: {hits.hits.2._explanation.value: 0.5} + - match: {hits.hits.2._explanation.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.value: 1} + - match: {hits.hits.2._explanation.details.0.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.2._explanation.details.1.value: 0} + - match: {hits.hits.2._explanation.details.1.description: "/rrf.score:.\\[0\\],.result.not.found./"} + - length: {hits.hits.2._explanation.details.1.details: 0} + +--- +"using named retrievers": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + retriever: + rrf: + retrievers: [ + { + standard: { + query: { + term: { + text: { + value: "term", + _name: "my_query" + } + } + } + } + }, + { + knn: { + field: "vector", + query_vector: [ 3 ], + num_candidates: 5, + k: 5, + _name: "my_top_knn" + } + } + ] + rank_window_size: 5 + rank_constant: 1 + size: 3 + explain: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - close_to: { hits.hits.0._explanation.value: { value: 0.8333334, error: 0.000001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.value: 2} + - match: {hits.hits.0._explanation.details.0.description: "/.*my_query.*/" } + - match: {hits.hits.0._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.0._explanation.details.1.value: 1} + - match: {hits.hits.0._explanation.details.1.description: "/.*my_top_knn.*/" } + - match: {hits.hits.0._explanation.details.1.details.0.description: "/within.top.*/" } + + - close_to: { hits.hits.1._explanation.value: { value: 0.5833334, error: 0.000001 } } + - match: {hits.hits.1._explanation.description: "/rrf.score:.\\[0.5833334\\].*/" } + - match: {hits.hits.1._explanation.details.0.value: 3} + - match: {hits.hits.1._explanation.details.0.description: "/.*my_query.*/" } + - match: {hits.hits.1._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.1._explanation.details.1.value: 2} + - match: {hits.hits.1._explanation.details.1.description: "/.*my_top_knn.*/" } + - match: {hits.hits.1._explanation.details.1.details.0.description: "/within.top.*/" } + + - match: {hits.hits.2._explanation.value: 0.5} + - match: {hits.hits.2._explanation.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.value: 1} + - match: {hits.hits.2._explanation.details.0.description: "/.*my_query.*/" } + - match: {hits.hits.2._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.2._explanation.details.1.value: 0} + - match: {hits.hits.2._explanation.details.1.description: "/.*my_top_knn.*/" } + - length: {hits.hits.2._explanation.details.1.details: 0} + +--- +"using a mix of named and unnamed retrievers": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + retriever: + rrf: + retrievers: [ + { + standard: { + query: { + term: { + text: { + value: "term" + } + } + } + } + }, + { + knn: { + field: "vector", + query_vector: [ 3 ], + num_candidates: 5, + k: 5, + _name: "my_top_knn" + } + } + ] + rank_window_size: 5 + rank_constant: 1 + size: 3 + explain: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - close_to: { hits.hits.0._explanation.value: { value: 0.8333334, error: 0.000001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.value: 2} + - match: {hits.hits.0._explanation.details.0.description: "/.*at.index.\\[0\\].*/" } + - match: {hits.hits.0._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.0._explanation.details.1.value: 1} + - match: {hits.hits.0._explanation.details.1.description: "/.*my_top_knn.*/" } + - match: {hits.hits.0._explanation.details.1.details.0.description: "/within.top.*/" } + + - close_to: { hits.hits.1._explanation.value: { value: 0.5833334, error: 0.000001 } } + - match: {hits.hits.1._explanation.description: "/rrf.score:.\\[0.5833334\\].*/" } + - match: {hits.hits.1._explanation.details.0.value: 3} + - match: {hits.hits.1._explanation.details.0.description: "/.*at.index.\\[0\\].*/" } + - match: {hits.hits.1._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.1._explanation.details.1.value: 2} + - match: {hits.hits.1._explanation.details.1.description: "/.*my_top_knn.*/" } + - match: {hits.hits.1._explanation.details.1.details.0.description: "/within.top.*/" } + + - match: {hits.hits.2._explanation.value: 0.5} + - match: {hits.hits.2._explanation.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.value: 1} + - match: {hits.hits.2._explanation.details.0.description: "/.*at.index.\\[0\\].*/" } + - match: {hits.hits.2._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.2._explanation.details.1.value: 0} + - match: {hits.hits.2._explanation.details.1.description: "/.*my_top_knn.*/" } + - length: {hits.hits.2._explanation.details.1.details: 0} diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml new file mode 100644 index 0000000000000..5718cd3455526 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/550_rrf_sub_searches_explain.yml @@ -0,0 +1,386 @@ +setup: + - requires: + cluster_features: "gte_v8.15.0" + reason: 'explain for rrf was added in 8.15' + test_runner_features: close_to + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + text: + type: text + integer: + type: integer + vector: + type: dense_vector + dims: 1 + index: true + similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 + + - do: + index: + index: test + id: "1" + body: + text: "term" + integer: 1 + vector: [5] + + - do: + index: + index: test + id: "2" + body: + text: "term term" + integer: 2 + vector: [4] + + - do: + index: + index: test + id: "3" + body: + text: "term term term" + integer: 3 + vector: [3] + - do: + index: + index: test + id: "4" + body: + text: "term term term term" + integer: 3 + + - do: + index: + index: test + id: "5" + body: + integer: 1 + vector: [0] + + - do: + indices.refresh: {} + +--- +"using a top level knn and query": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + knn: + field: vector + query_vector: [3] + k: 5 + num_candidates: 5 + query: + term: + text: term + rank: + rrf: + rank_window_size: 5 + rank_constant: 1 + size: 3 + explain: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - close_to: { hits.hits.0._explanation.value: { value: 0.8333334, error: 0.000001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.value: 2} + - match: {hits.hits.0._explanation.details.0.description: "/rrf.score:.\\[0.33333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.0._explanation.details.1.value: 1} + - match: {hits.hits.0._explanation.details.1.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.0._explanation.details.1.details.0.description: "/within.top.*/" } + + - close_to: { hits.hits.1._explanation.value: { value: 0.5833334, error: 0.000001 } } + - match: {hits.hits.1._explanation.description: "/rrf.score:.\\[0.5833334\\].*/" } + - match: {hits.hits.1._explanation.details.0.value: 3} + - match: {hits.hits.1._explanation.details.0.description: "/rrf.score:.\\[0.25\\].*/" } + - match: {hits.hits.1._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.1._explanation.details.1.value: 2} + - match: {hits.hits.1._explanation.details.1.description: "/rrf.score:.\\[0.33333334\\].*/" } + - match: {hits.hits.1._explanation.details.1.details.0.description: "/within.top.*/" } + + - match: {hits.hits.2._explanation.value: 0.5} + - match: {hits.hits.2._explanation.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.value: 1} + - match: {hits.hits.2._explanation.details.0.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.2._explanation.details.1.value: 0} + - match: {hits.hits.2._explanation.details.1.description: "/rrf.score:.\\[0\\],.result.not.found./"} + - length: {hits.hits.2._explanation.details.1.details: 0} + +--- +"using sub_searches": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + sub_searches: [ + { + "query": { + "term": { + "text": "term" + } + } + }, + { + "query": { + "knn": + { + "field": "vector", + "query_vector": [ 3 ], + "num_candidates": 5 + } + } + } + ] + rank: + rrf: + rank_window_size: 5 + rank_constant: 1 + size: 3 + explain: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - close_to: { hits.hits.0._explanation.value: { value: 0.8333334, error: 0.000001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.value: 2} + - match: {hits.hits.0._explanation.details.0.description: "/rrf.score:.\\[0.33333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.0._explanation.details.1.value: 1} + - match: {hits.hits.0._explanation.details.1.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.0._explanation.details.1.details.0.description: "/within.top.*/" } + + - close_to: { hits.hits.1._explanation.value: { value: 0.5833334, error: 0.000001 } } + - match: {hits.hits.1._explanation.description: "/rrf.score:.\\[0.5833334\\].*/" } + - match: {hits.hits.1._explanation.details.0.value: 3} + - match: {hits.hits.1._explanation.details.0.description: "/rrf.score:.\\[0.25\\].*/" } + - match: {hits.hits.1._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.1._explanation.details.1.value: 2} + - match: {hits.hits.1._explanation.details.1.description: "/rrf.score:.\\[0.33333334\\].*/" } + - match: {hits.hits.1._explanation.details.1.details.0.description: "/within.top.*/" } + + - match: {hits.hits.2._explanation.value: 0.5} + - match: {hits.hits.2._explanation.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.value: 1} + - match: {hits.hits.2._explanation.details.0.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.2._explanation.details.1.value: 0} + - match: {hits.hits.2._explanation.details.1.description: "/rrf.score:.\\[0\\],.result.not.found./"} + - length: {hits.hits.2._explanation.details.1.details: 0} + +--- +"using named top level knn and query": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + knn: + field: vector + query_vector: [3] + k: 5 + num_candidates: 5 + _name: my_top_knn + query: + term: + text: + value: term + _name: my_query + rank: + rrf: + rank_window_size: 5 + rank_constant: 1 + size: 3 + explain: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - close_to: { hits.hits.0._explanation.value: { value: 0.8333334, error: 0.000001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.value: 2} + - match: {hits.hits.0._explanation.details.0.description: "/.*my_query.*/" } + - match: {hits.hits.0._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.0._explanation.details.1.value: 1} + - match: {hits.hits.0._explanation.details.1.description: "/.*my_top_knn.*/" } + - match: {hits.hits.0._explanation.details.1.details.0.description: "/within.top.*/" } + + - close_to: { hits.hits.1._explanation.value: { value: 0.5833334, error: 0.000001 } } + - match: {hits.hits.1._explanation.description: "/rrf.score:.\\[0.5833334\\].*/" } + - match: {hits.hits.1._explanation.details.0.value: 3} + - match: {hits.hits.1._explanation.details.0.description: "/.*my_query.*/" } + - match: {hits.hits.1._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.1._explanation.details.1.value: 2} + - match: {hits.hits.1._explanation.details.1.description: "/.*my_top_knn.*/" } + - match: {hits.hits.1._explanation.details.1.details.0.description: "/within.top.*/" } + + - match: {hits.hits.2._explanation.value: 0.5} + - match: {hits.hits.2._explanation.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.value: 1} + - match: {hits.hits.2._explanation.details.0.description: "/.*my_query.*/" } + - match: {hits.hits.2._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.2._explanation.details.1.value: 0} + - match: {hits.hits.2._explanation.details.1.description: "/.*my_top_knn.*/" } + - length: {hits.hits.2._explanation.details.1.details: 0} + +--- +"using named sub_searches": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + sub_searches: [ + { + "query": { + "term": { + "text": { + "value": "term", + "_name": "my_query" + } + } + } + }, + { + "query": { + "knn": + { + "field": "vector", + "query_vector": [ 3 ], + "num_candidates": 5, + "_name": "my_top_knn" + } + } + } + ] + rank: + rrf: + rank_window_size: 5 + rank_constant: 1 + size: 3 + explain: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - close_to: { hits.hits.0._explanation.value: { value: 0.8333334, error: 0.000001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.value: 2} + - match: {hits.hits.0._explanation.details.0.description: "/.*my_query.*/" } + - match: {hits.hits.0._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.0._explanation.details.1.value: 1} + - match: {hits.hits.0._explanation.details.1.description: "/.*my_top_knn.*/" } + - match: {hits.hits.0._explanation.details.1.details.0.description: "/within.top.*/" } + + - close_to: { hits.hits.1._explanation.value: { value: 0.5833334, error: 0.000001 } } + - match: {hits.hits.1._explanation.description: "/rrf.score:.\\[0.5833334\\].*/" } + - match: {hits.hits.1._explanation.details.0.value: 3} + - match: {hits.hits.1._explanation.details.0.description: "/.*my_query.*/" } + - match: {hits.hits.1._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.1._explanation.details.1.value: 2} + - match: {hits.hits.1._explanation.details.1.description: "/.*my_top_knn.*/" } + - match: {hits.hits.1._explanation.details.1.details.0.description: "/within.top.*/" } + + - match: {hits.hits.2._explanation.value: 0.5} + - match: {hits.hits.2._explanation.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.value: 1} + - match: {hits.hits.2._explanation.details.0.description: "/.*my_query.*/" } + - match: {hits.hits.2._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.2._explanation.details.1.value: 0} + - match: {hits.hits.2._explanation.details.1.description: "/.*my_top_knn.*/" } + - length: {hits.hits.2._explanation.details.1.details: 0} + +--- +"using a mix of named and unnamed queries": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + sub_searches: [ + { + "query": { + "term": { + "text": { + "value": "term" + } + } + } + }, + { + "query": { + "knn": + { + "field": "vector", + "query_vector": [ 3 ], + "num_candidates": 5, + "_name": "my_top_knn" + } + } + } + ] + rank: + rrf: + rank_window_size: 5 + rank_constant: 1 + size: 3 + explain: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - close_to: { hits.hits.0._explanation.value: { value: 0.8333334, error: 0.000001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.8333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.value: 2} + - match: {hits.hits.0._explanation.details.0.description: "/.*at.index.\\[0\\].*/" } + - match: {hits.hits.0._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.0._explanation.details.1.value: 1} + - match: {hits.hits.0._explanation.details.1.description: "/.*my_top_knn.*/" } + - match: {hits.hits.0._explanation.details.1.details.0.description: "/within.top.*/" } + + - close_to: { hits.hits.1._explanation.value: { value: 0.5833334, error: 0.000001 } } + - match: {hits.hits.1._explanation.description: "/rrf.score:.\\[0.5833334\\].*/" } + - match: {hits.hits.1._explanation.details.0.value: 3} + - match: {hits.hits.1._explanation.details.0.description: "/.*at.index.\\[0\\].*/" } + - match: {hits.hits.1._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.1._explanation.details.1.value: 2} + - match: {hits.hits.1._explanation.details.1.description: "/.*my_top_knn.*/" } + - match: {hits.hits.1._explanation.details.1.details.0.description: "/within.top.*/" } + + - match: {hits.hits.2._explanation.value: 0.5} + - match: {hits.hits.2._explanation.description: "/rrf.score:.\\[0.5\\].*/" } + - match: {hits.hits.2._explanation.details.0.value: 1} + - match: {hits.hits.2._explanation.details.0.description: "/.*at.index.\\[0\\].*/" } + - match: {hits.hits.2._explanation.details.0.details.0.description: "/weight\\(text:term.*/" } + - match: {hits.hits.2._explanation.details.1.value: 0} + - match: {hits.hits.2._explanation.details.1.description: "/.*my_top_knn.*/" } + - length: {hits.hits.2._explanation.details.1.details: 0} diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml new file mode 100644 index 0000000000000..7308ce8947db7 --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/600_rrf_retriever_profile.yml @@ -0,0 +1,218 @@ +setup: + - requires: + cluster_features: "gte_v8.15.0" + reason: 'profile for rrf was enabled in 8.15' + test_runner_features: close_to + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + text: + type: text + integer: + type: integer + vector: + type: dense_vector + dims: 1 + index: true + similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 + + - do: + index: + index: test + id: "1" + body: + text: "term" + integer: 1 + vector: [5] + + - do: + index: + index: test + id: "2" + body: + text: "term term" + integer: 2 + vector: [4] + + - do: + index: + index: test + id: "3" + body: + text: "term term term" + integer: 3 + vector: [3] + - do: + index: + index: test + id: "4" + body: + text: "term term term term" + integer: 3 + + - do: + index: + index: test + id: "5" + body: + integer: 1 + vector: [0] + + - do: + indices.refresh: {} + +--- +"profile standard and knn query": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + retriever: + rrf: + retrievers: [ + { + standard: { + query: { + term: { + text: "term" + } + } + } + }, + { + standard: { + query: { + knn: { + field: "vector", + query_vector: [ 3 ], + num_candidates: 5 + } + } + } + } + ] + rank_window_size: 5 + rank_constant: 1 + size: 3 + profile: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - not_exists: profile.shards.0.dfs + - match: { profile.shards.0.searches.0.query.0.type: ConstantScoreQuery } + - length: { profile.shards.0.searches.0.query.0.children: 1 } + - match: { profile.shards.0.searches.0.query.0.children.0.type: BooleanQuery } + - length: { profile.shards.0.searches.0.query.0.children.0.children: 2 } + - match: { profile.shards.0.searches.0.query.0.children.0.children.0.type: TermQuery } + - match: { profile.shards.0.searches.0.query.0.children.0.children.1.type: DocAndScoreQuery } + +--- +"profile standard and knn dfs retrievers": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + retriever: + rrf: + retrievers: [ + { + standard: { + query: { + term: { + text: "term" + } + } + } + }, + { + knn: { + field: "vector", + query_vector: [ 3 ], + num_candidates: 5, + k: 5 + } + } + ] + rank_window_size: 5 + rank_constant: 1 + size: 3 + profile: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - exists: profile.shards.0.dfs + - length: { profile.shards.0.dfs.knn: 1 } + - length: { profile.shards.0.dfs.knn.0.query: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.type: DocAndScoreQuery } + + - match: { profile.shards.0.searches.0.query.0.type: ConstantScoreQuery } + - length: { profile.shards.0.searches.0.query.0.children: 1 } + - match: { profile.shards.0.searches.0.query.0.children.0.type: BooleanQuery } + - length: { profile.shards.0.searches.0.query.0.children.0.children: 2 } + - match: { profile.shards.0.searches.0.query.0.children.0.children.0.type: TermQuery } + - match: { profile.shards.0.searches.0.query.0.children.0.children.1.type: KnnScoreDocQuery } + +--- +"using query and dfs knn search": + + - do: + search: + index: test + body: + fields: [ "text", "integer" ] + query: { + term: { + text: { + value: "term" + } + } + } + knn: { + field: "vector", + query_vector: [ 3 ], + num_candidates: 5, + k: 5 + } + rank: { + rrf: { + rank_window_size: 5, + rank_constant: 1 + } + } + size: 3 + profile: true + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "4" } + + - exists: profile.shards.0.dfs + - length: { profile.shards.0.dfs.knn: 1 } + - length: { profile.shards.0.dfs.knn.0.query: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.type: DocAndScoreQuery } + + - match: { profile.shards.0.searches.0.query.0.type: ConstantScoreQuery } + - length: { profile.shards.0.searches.0.query.0.children: 1 } + - match: { profile.shards.0.searches.0.query.0.children.0.type: BooleanQuery } + - length: { profile.shards.0.searches.0.query.0.children.0.children: 2 } + - match: { profile.shards.0.searches.0.query.0.children.0.children.0.type: TermQuery } + - match: { profile.shards.0.searches.0.query.0.children.0.children.1.type: KnnScoreDocQuery } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/ClusterStateApplierOrderingTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/ClusterStateApplierOrderingTests.java index ffedcb8f9ebd3..ee19fc07e45cb 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/ClusterStateApplierOrderingTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/ClusterStateApplierOrderingTests.java @@ -96,7 +96,7 @@ public Settings onNodeStopped(String nodeName) { for (RoutingNode routingNode : event.state().getRoutingNodes()) { for (ShardRouting shardRouting : routingNode) { if (shardRouting.unassignedInfo() != null) { - unassignedReasons.add(shardRouting.unassignedInfo().getReason()); + unassignedReasons.add(shardRouting.unassignedInfo().reason()); } } } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotAllocator.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotAllocator.java index ee018578ce143..b05f7e4844908 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotAllocator.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotAllocator.java @@ -332,8 +332,8 @@ private AllocateUnassignedDecision decideAllocation(RoutingAllocation allocation } private static boolean isDelayedDueToNodeRestart(RoutingAllocation allocation, ShardRouting shardRouting) { - if (shardRouting.unassignedInfo().isDelayed()) { - String lastAllocatedNodeId = shardRouting.unassignedInfo().getLastAllocatedNodeId(); + if (shardRouting.unassignedInfo().delayed()) { + String lastAllocatedNodeId = shardRouting.unassignedInfo().lastAllocatedNodeId(); if (lastAllocatedNodeId != null) { return allocation.metadata().nodeShutdowns().contains(lastAllocatedNodeId, SingleNodeShutdownMetadata.Type.RESTART); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index 6eb49ec1ab8ae..69331fa448113 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.remotecluster; import org.apache.http.util.EntityUtils; +import org.elasticsearch.Build; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -15,13 +16,16 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Strings; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.junit.RunnableTestRuleAdapter; import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -37,6 +41,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; import java.util.stream.Collectors; import static org.hamcrest.Matchers.anEmptyMap; @@ -59,6 +64,7 @@ public class RemoteClusterSecurityRestIT extends AbstractRemoteClusterSecurityTe static { fulfillingCluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) .name("fulfilling-cluster") .nodes(3) .apply(commonClusterConfig) @@ -74,6 +80,7 @@ public class RemoteClusterSecurityRestIT extends AbstractRemoteClusterSecurityTe .build(); queryCluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) .name("query-cluster") .apply(commonClusterConfig) .setting("xpack.security.remote_cluster_client.ssl.enabled", () -> String.valueOf(SSL_ENABLED_REF.get())) @@ -138,6 +145,169 @@ public class RemoteClusterSecurityRestIT extends AbstractRemoteClusterSecurityTe INVALID_SECRET_LENGTH.set(randomValueOtherThan(22, () -> randomIntBetween(0, 99))); })).around(fulfillingCluster).around(queryCluster); + public void testTaskCancellation() throws Exception { + assumeTrue("[error_query] is only available in snapshot builds", Build.current().isSnapshot()); + configureRemoteCluster(); + + final String indexName = "index_fulfilling"; + final String roleName = "taskCancellationRoleName"; + final String userName = "taskCancellationUsername"; + try { + // create some index on the fulfilling cluster, to be searched from the querying cluster + { + Request bulkRequest = new Request("POST", "/_bulk?refresh=true"); + bulkRequest.setJsonEntity(Strings.format(""" + { "index": { "_index": "%s" } } + { "foo": "bar" } + """, indexName)); + assertOK(performRequestAgainstFulfillingCluster(bulkRequest)); + } + + // Create user and role with privileges for remote indices + var putRoleRequest = new Request("PUT", "/_security/role/" + roleName); + putRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "Role with privileges for remote index for the test of task cancellation.", + "remote_indices": [ + { + "names": ["%s"], + "privileges": ["read", "read_cross_cluster"], + "clusters": ["my_remote_cluster"] + } + ] + }""", indexName)); + assertOK(adminClient().performRequest(putRoleRequest)); + var putUserRequest = new Request("PUT", "/_security/user/" + userName); + putUserRequest.setJsonEntity(Strings.format(""" + { + "password": "%s", + "roles" : ["%s"] + }""", PASS, roleName)); + assertOK(adminClient().performRequest(putUserRequest)); + var submitAsyncSearchRequest = new Request( + "POST", + Strings.format( + "/%s:%s/_async_search?ccs_minimize_roundtrips=%s", + randomFrom("my_remote_cluster", "*", "my_remote_*"), + indexName, + randomBoolean() + ) + ); + + // submit a stalling remote async search + submitAsyncSearchRequest.setJsonEntity(""" + { + "query": { + "error_query": { + "indices": [ + { + "name": "*:*", + "error_type": "exception", + "stall_time_seconds": 60 + } + ] + } + } + }"""); + String asyncSearchOpaqueId = "async-search-opaque-id-" + randomUUID(); + submitAsyncSearchRequest.setOptions( + RequestOptions.DEFAULT.toBuilder() + .addHeader("Authorization", headerFromRandomAuthMethod(userName, PASS)) + .addHeader("X-Opaque-Id", asyncSearchOpaqueId) + ); + Response submitAsyncSearchResponse = client().performRequest(submitAsyncSearchRequest); + assertOK(submitAsyncSearchResponse); + Map submitAsyncSearchResponseMap = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(submitAsyncSearchResponse.getEntity()), + false + ); + assertThat(submitAsyncSearchResponseMap.get("is_running"), equalTo(true)); + String asyncSearchId = (String) submitAsyncSearchResponseMap.get("id"); + assertThat(asyncSearchId, notNullValue()); + // wait for the tasks to show up on the querying cluster + assertBusy(() -> { + try { + Response queryingClusterTasks = adminClient().performRequest(new Request("GET", "/_tasks")); + assertOK(queryingClusterTasks); + Map responseMap = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(queryingClusterTasks.getEntity()), + false + ); + AtomicBoolean someTasks = new AtomicBoolean(false); + selectTasksWithOpaqueId(responseMap, asyncSearchOpaqueId, task -> { + // search tasks should not be cancelled at this point (but some transitory ones might be, + // e.g. for action "indices:admin/seq_no/global_checkpoint_sync") + if (task.get("action") instanceof String action && action.contains("indices:data/read/search")) { + assertThat(task.get("cancelled"), equalTo(false)); + someTasks.set(true); + } + }); + assertTrue(someTasks.get()); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + // wait for the tasks to show up on the fulfilling cluster + assertBusy(() -> { + try { + Response fulfillingClusterTasks = performRequestAgainstFulfillingCluster(new Request("GET", "/_tasks")); + assertOK(fulfillingClusterTasks); + Map responseMap = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(fulfillingClusterTasks.getEntity()), + false + ); + AtomicBoolean someTasks = new AtomicBoolean(false); + selectTasksWithOpaqueId(responseMap, asyncSearchOpaqueId, task -> { + // search tasks should not be cancelled at this point (but some transitory ones might be, + // e.g. for action "indices:admin/seq_no/global_checkpoint_sync") + if (task.get("action") instanceof String action && action.contains("indices:data/read/search")) { + assertThat(task.get("cancelled"), equalTo(false)); + someTasks.set(true); + } + }); + assertTrue(someTasks.get()); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + // delete the stalling async search + var deleteAsyncSearchRequest = new Request("DELETE", Strings.format("/_async_search/%s", asyncSearchId)); + deleteAsyncSearchRequest.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(userName, PASS)) + ); + assertOK(client().performRequest(deleteAsyncSearchRequest)); + // ensure any remaining tasks are all cancelled on the querying cluster + { + Response queryingClusterTasks = adminClient().performRequest(new Request("GET", "/_tasks")); + assertOK(queryingClusterTasks); + Map responseMap = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(queryingClusterTasks.getEntity()), + false + ); + selectTasksWithOpaqueId(responseMap, asyncSearchOpaqueId, task -> assertThat(task.get("cancelled"), equalTo(true))); + } + // ensure any remaining tasks are all cancelled on the fulfilling cluster + { + Response fulfillingClusterTasks = performRequestAgainstFulfillingCluster(new Request("GET", "/_tasks")); + assertOK(fulfillingClusterTasks); + Map responseMap = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(fulfillingClusterTasks.getEntity()), + false + ); + selectTasksWithOpaqueId(responseMap, asyncSearchOpaqueId, task -> assertThat(task.get("cancelled"), equalTo(true))); + } + } finally { + assertOK(adminClient().performRequest(new Request("DELETE", "/_security/user/" + userName))); + assertOK(adminClient().performRequest(new Request("DELETE", "/_security/role/" + roleName))); + assertOK(performRequestAgainstFulfillingCluster(new Request("DELETE", indexName))); + } + } + public void testCrossClusterSearch() throws Exception { configureRemoteCluster(); final String crossClusterAccessApiKeyId = (String) API_KEY_MAP_REF.get().get("id"); @@ -491,4 +661,24 @@ private Response performRequestWithLocalSearchUser(final Request request) throws ); return client().performRequest(request); } + + @SuppressWarnings("unchecked") + private static void selectTasksWithOpaqueId( + Map tasksResponse, + String opaqueId, + Consumer> taskConsumer + ) { + Map> nodes = (Map>) tasksResponse.get("nodes"); + for (Map node : nodes.values()) { + Map> tasks = (Map>) node.get("tasks"); + for (Map task : tasks.values()) { + if (task.get("headers") != null) { + Map headers = (Map) task.get("headers"); + if (opaqueId.equals(headers.get("X-Opaque-Id"))) { + taskConsumer.accept(task); + } + } + } + } + } } diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index ae5af54f078dd..33503bc558795 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -99,6 +99,7 @@ public class Constants { "cluster:admin/features/reset", "cluster:admin/tasks/cancel", "cluster:admin/transform/delete", + "cluster:admin/transform/node_stats", "cluster:admin/transform/preview", "cluster:admin/transform/put", "cluster:admin/transform/reset", @@ -133,6 +134,7 @@ public class Constants { "indices:data/write/xpack/connector/update_api_key_id", "indices:data/write/xpack/connector/update_configuration", "indices:data/write/xpack/connector/update_error", + "indices:data/write/xpack/connector/update_features", "indices:data/write/xpack/connector/update_filtering", "indices:data/write/xpack/connector/update_filtering/activate", "indices:data/write/xpack/connector/update_filtering/draft_validation", diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportGetSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportGetSecuritySettingsAction.java index 8b883b01bd16f..73abfffcd3a2f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportGetSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportGetSecuritySettingsAction.java @@ -47,12 +47,12 @@ public TransportGetSecuritySettingsAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - GetSecuritySettingsAction.NAME, + GetSecuritySettingsAction.INSTANCE.name(), transportService, clusterService, threadPool, actionFilters, - GetSecuritySettingsAction.Request::new, + GetSecuritySettingsAction.Request::readFrom, indexNameExpressionResolver, GetSecuritySettingsAction.Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java index fc38bf16da8ce..20bab85a50921 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java @@ -57,12 +57,12 @@ public TransportUpdateSecuritySettingsAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - UpdateSecuritySettingsAction.NAME, + UpdateSecuritySettingsAction.INSTANCE.name(), transportService, clusterService, threadPool, actionFilters, - UpdateSecuritySettingsAction.Request::new, + UpdateSecuritySettingsAction.Request::readFrom, indexNameExpressionResolver, AcknowledgedResponse::readFrom, EsExecutors.DIRECT_EXECUTOR_SERVICE diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 718602b758072..0dfdf0861e321 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -533,7 +533,7 @@ private void authorizeAction( ) ); } else { - logger.warn("denying access as action [{}] is not an index or cluster action", action); + logger.warn("denying access for [{}] as action [{}] is not an index or cluster action", authentication, action); auditTrail.accessDenied(requestId, authentication, action, request, authzInfo); listener.onFailure(actionDenied(authentication, authzInfo, action, request)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index bf1bf7b7d3cee..42a1d89a9aa00 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -27,6 +27,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.transport.NoSuchRemoteClusterException; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteConnectionStrategy; import org.elasticsearch.transport.TransportRequest; @@ -164,6 +165,18 @@ ResolvedIndices resolveIndicesAndAliasesWithoutWildcards(String action, IndicesR final ResolvedIndices split; if (indicesRequest instanceof IndicesRequest.SingleIndexNoWildcards single && single.allowsRemoteIndices()) { split = remoteClusterResolver.splitLocalAndRemoteIndexNames(indicesRequest.indices()); + // all indices can come back empty when the remote index expression included a cluster alias with a wildcard + // and no remote clusters are configured that match it + if (split.getLocal().isEmpty() && split.getRemote().isEmpty()) { + for (String indexExpression : indices) { + String[] clusterAndIndex = indexExpression.split(":", 2); + if (clusterAndIndex.length == 2) { + if (clusterAndIndex[0].contains("*")) { + throw new NoSuchRemoteClusterException(clusterAndIndex[0]); + } + } + } + } } else { split = new ResolvedIndices(Arrays.asList(indicesRequest.indices()), List.of()); } @@ -473,5 +486,4 @@ ResolvedIndices splitLocalAndRemoteIndexNames(String... indices) { return new ResolvedIndices(local == null ? List.of() : local, remote); } } - } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/PreAuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/PreAuthorizationUtils.java index aeb6bfc8de796..221b7a65e1f8f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/PreAuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/PreAuthorizationUtils.java @@ -45,6 +45,7 @@ public final class PreAuthorizationUtils { SearchTransportService.QUERY_ACTION_NAME, SearchTransportService.QUERY_ID_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME, + SearchTransportService.RANK_FEATURE_SHARD_ACTION_NAME, SearchTransportService.QUERY_CAN_MATCH_NODE_NAME ) ); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/RoleDescriptorStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/RoleDescriptorStore.java index 50a4658c27ee4..ac8d84d95fd1d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/RoleDescriptorStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/RoleDescriptorStore.java @@ -108,19 +108,6 @@ public void resolveApiKeyRoleReference( || (apiKeyRoleReference.getRoleType() == RoleReference.ApiKeyRoleType.LIMITED_BY && rolesRetrievalResult.getRoleDescriptors().stream().noneMatch(RoleDescriptor::hasRestriction)) : "there should be zero limited-by role descriptors with restriction and no more than one assigned"; - // TODO we need unit tests for edge-cases here, for instance, we need to test the REST API keys are never checked for invalid legacy - // role descriptors - if (apiKeyRoleReference.checkForInvalidLegacyRoleDescriptorsForCrossClusterAccess()) { - try { - CrossClusterApiKeyRoleDescriptorBuilder.checkForInvalidLegacyRoleDescriptors( - apiKeyRoleReference.getApiKeyId(), - roleDescriptors - ); - } catch (IllegalArgumentException e) { - listener.onFailure(e); - return; - } - } listener.onResponse(rolesRetrievalResult); } @@ -182,6 +169,32 @@ public void resolveCrossClusterAccessRoleReference( listener.onResponse(rolesRetrievalResult); } + @Override + public void resolveCrossClusterApiKeyRoleReference( + RoleReference.CrossClusterApiKeyRoleReference crossClusterApiKeyRoleReference, + ActionListener listener + ) { + final List roleDescriptors = apiKeyService.parseRoleDescriptorsBytes( + crossClusterApiKeyRoleReference.getApiKeyId(), + crossClusterApiKeyRoleReference.getRoleDescriptorsBytes(), + crossClusterApiKeyRoleReference.getRoleType() + ); + final RolesRetrievalResult rolesRetrievalResult = new RolesRetrievalResult(); + rolesRetrievalResult.addDescriptors(Set.copyOf(roleDescriptors)); + assert rolesRetrievalResult.getRoleDescriptors().stream().noneMatch(RoleDescriptor::hasRestriction) + : "there should be no role descriptors with restriction"; + try { + CrossClusterApiKeyRoleDescriptorBuilder.checkForInvalidLegacyRoleDescriptors( + crossClusterApiKeyRoleReference.getApiKeyId(), + roleDescriptors + ); + } catch (IllegalArgumentException e) { + listener.onFailure(e); + return; + } + listener.onResponse(rolesRetrievalResult); + } + private void resolveRoleNames(Set roleNames, ActionListener listener) { roleDescriptors(roleNames, ActionListener.wrap(rolesRetrievalResult -> { logDeprecatedRoles(rolesRetrievalResult.getRoleDescriptors()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java index 033f692d7b1e2..0b4ced0a20444 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestGetSecuritySettingsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.security.action.settings.GetSecuritySettingsAction; import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; @@ -36,7 +37,7 @@ public List routes() { @Override protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - GetSecuritySettingsAction.Request req = new GetSecuritySettingsAction.Request(); + final var req = new GetSecuritySettingsAction.Request(RestUtils.getMasterNodeTimeout(request)); return restChannel -> client.execute(GetSecuritySettingsAction.INSTANCE, req, new RestToXContentListener<>(restChannel)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java index b2e8719b25c24..27ed6d2475d2c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.security.action.settings.UpdateSecuritySettingsAction; import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; @@ -36,9 +37,18 @@ public List routes() { @Override protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - UpdateSecuritySettingsAction.Request req; + final UpdateSecuritySettingsAction.Request req; try (var parser = request.contentParser()) { - req = UpdateSecuritySettingsAction.Request.parse(parser); + req = UpdateSecuritySettingsAction.Request.parse( + parser, + (mainIndexSettings, tokensIndexSettings, profilesIndexSettings) -> new UpdateSecuritySettingsAction.Request( + RestUtils.getMasterNodeTimeout(request), + RestUtils.getAckTimeout(request), + mainIndexSettings, + tokensIndexSettings, + profilesIndexSettings + ) + ); } return restChannel -> client.execute(UpdateSecuritySettingsAction.INSTANCE, req, new RestToXContentListener<>(restChannel)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java index d8914d4646fd3..268f9e6375f0e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java @@ -23,6 +23,7 @@ import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancellationService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteConnectionManager; import org.elasticsearch.transport.RemoteConnectionManager.RemoteClusterAliasWithCredentials; @@ -81,7 +82,11 @@ public class SecurityServerTransportInterceptor implements TransportInterceptor "internal:data/read/esql/open_exchange", "cluster:internal:data/read/esql/open_exchange", "internal:data/read/esql/exchange", - "cluster:internal:data/read/esql/exchange" + "cluster:internal:data/read/esql/exchange", + TaskCancellationService.BAN_PARENT_ACTION_NAME, + TaskCancellationService.REMOTE_CLUSTER_BAN_PARENT_ACTION_NAME, + TaskCancellationService.CANCEL_CHILD_ACTION_NAME, + TaskCancellationService.REMOTE_CLUSTER_CANCEL_CHILD_ACTION_NAME ); private final AuthenticationService authcService; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index be0516ab180c9..82ac95a21086d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.NoSuchRemoteClusterException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; @@ -104,6 +105,7 @@ import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.RESTRICTED_INDICES; import static org.elasticsearch.xpack.security.authz.AuthorizedIndicesTests.getRequestInfo; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.arrayContaining; @@ -455,6 +457,102 @@ public void testAllIsNotAllowedInShardLevelRequests() { ); } + public void testResolveIndicesAndAliasesWithoutWildcardsWithSingleIndexNoWildcardsRequest() { + // test 1: matching local index + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "index10" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(0)); + assertThat(resolvedIndices.getLocal().size(), equalTo(1)); + assertThat(resolvedIndices.getLocal().get(0), equalTo("index10")); + } + + // test 2: matching remote index + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "remote:indexName" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(1)); + assertThat(resolvedIndices.getRemote().get(0), equalTo("remote:indexName")); + assertThat(resolvedIndices.getLocal().size(), equalTo(0)); + } + + // test 3: missing local index + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "zzz_no_such_index_zzz" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(0)); + assertThat(resolvedIndices.getLocal().size(), equalTo(1)); + assertThat(resolvedIndices.getLocal().get(0), equalTo("zzz_no_such_index_zzz")); + } + + // test 4: missing remote index + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "remote:zzz_no_such_index_zzz" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(1)); + assertThat(resolvedIndices.getRemote().get(0), equalTo("remote:zzz_no_such_index_zzz")); + assertThat(resolvedIndices.getLocal().size(), equalTo(0)); + } + + // test 5: both local and remote indexes + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "index10", "remote:indexName" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(1)); + assertThat(resolvedIndices.getRemote().get(0), equalTo("remote:indexName")); + assertThat(resolvedIndices.getLocal().size(), equalTo(1)); + assertThat(resolvedIndices.getLocal().get(0), equalTo("index10")); + } + + // test 6: remote cluster name with wildcards that does not match any configured remotes + { + NoSuchRemoteClusterException exception = expectThrows( + NoSuchRemoteClusterException.class, + () -> defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "x*x:test" }) + ) + ); + assertThat(exception.getMessage(), containsString("no such remote cluster: [x*x]")); + } + + // test 7: mix and test 2 and test 6 - should not result in exception (wildcard without matches has no effect) + { + ResolvedIndices resolvedIndices = defaultIndicesResolver.resolveIndicesAndAliasesWithoutWildcards( + TransportSearchAction.TYPE.name() + "[s]", + createSingleIndexNoWildcardsRequest(new String[] { "x*x:test", "remote:indexName" }) + ); + assertThat(resolvedIndices.getRemote().size(), equalTo(1)); + assertThat(resolvedIndices.getRemote().get(0), equalTo("remote:indexName")); + assertThat(resolvedIndices.getLocal().size(), equalTo(0)); + } + } + + private static IndicesRequest.SingleIndexNoWildcards createSingleIndexNoWildcardsRequest(String[] indexExpression) { + IndicesRequest.SingleIndexNoWildcards singleIndexNoWildcardsRequest = new IndicesRequest.SingleIndexNoWildcards() { + @Override + public String[] indices() { + return indexExpression; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.DEFAULT; + } + }; + return singleIndexNoWildcardsRequest; + } + public void testExplicitDashIndices() { SearchRequest request = new SearchRequest("-index10", "-index20"); List indices = resolveIndices(request, buildAuthorizedIndices(userDashIndices, TransportSearchAction.TYPE.name())) diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java index 9e8c54ba594ea..69043c606ef15 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java @@ -219,7 +219,7 @@ static ShutdownShardMigrationStatus shardMigrationStatus( .unassigned() .stream() .peek(s -> cancellableTask.ensureNotCancelled()) - .filter(s -> Objects.equals(s.unassignedInfo().getLastAllocatedNodeId(), nodeId)) + .filter(s -> Objects.equals(s.unassignedInfo().lastAllocatedNodeId(), nodeId)) .filter(s -> s.primary() || hasShardCopyOnAnotherNode(currentState, s, shuttingDownNodes) == false) .toList(); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java index 1657a3bf7fbce..2a2a6ad164677 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java @@ -180,6 +180,11 @@ public FieldMapper.Builder getMergeBuilder() { return new Builder(simpleName(), builder.ignoreMalformed.getDefaultValue().value()).init(this); } + @Override + protected SyntheticSourceMode syntheticSourceMode() { + return SyntheticSourceMode.FALLBACK; + } + public static class PointFieldType extends AbstractPointFieldType implements ShapeQueryable { private final ShapeQueryPointProcessor queryProcessor; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index 83e434f829591..4d6a909f96c3f 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -249,6 +249,11 @@ public ShapeFieldType fieldType() { return (ShapeFieldType) super.fieldType(); } + @Override + protected SyntheticSourceMode syntheticSourceMode() { + return SyntheticSourceMode.FALLBACK; + } + public static class CartesianShapeDocValuesField extends AbstractScriptFieldFactory implements Field, diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianFieldMapperTests.java index b17aa1b175f2b..a2560bb38c6ce 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianFieldMapperTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; @@ -20,6 +21,7 @@ import java.io.IOException; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.containsString; @@ -32,7 +34,15 @@ public abstract class CartesianFieldMapperTests extends MapperTestCase { @Override protected Collection getPlugins() { - return Collections.singletonList(new LocalStateSpatialPlugin()); + var plugin = new LocalStateSpatialPlugin(); + plugin.loadExtensions(new ExtensiblePlugin.ExtensionLoader() { + @Override + public List loadExtensions(Class extensionPointType) { + return List.of(); + } + }); + + return Collections.singletonList(plugin); } @Override diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 507fe80291d89..55fcf1b7d39b4 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -9,10 +9,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.geo.GeoJson; -import org.elasticsearch.common.geo.GeometryNormalizer; import org.elasticsearch.common.geo.Orientation; -import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.WellKnownBinary; @@ -30,17 +27,14 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; import org.junit.AssumptionViolatedException; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -434,144 +428,7 @@ protected Object generateRandomInputValue(MappedFieldType ft) { @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - // Almost like GeoShapeType but no circles - enum ShapeType { - POINT, - LINESTRING, - POLYGON, - MULTIPOINT, - MULTILINESTRING, - MULTIPOLYGON, - GEOMETRYCOLLECTION, - ENVELOPE - } - - return new SyntheticSourceSupport() { - @Override - public boolean preservesExactSource() { - return true; - } - - @Override - public SyntheticSourceExample example(int maxValues) throws IOException { - if (randomBoolean()) { - Value v = generateValue(); - if (v.blockLoaderOutput != null) { - return new SyntheticSourceExample(v.input, v.output, v.blockLoaderOutput, this::mapping); - } - return new SyntheticSourceExample(v.input, v.output, this::mapping); - } - - List values = randomList(1, maxValues, this::generateValue); - List in = values.stream().map(Value::input).toList(); - List out = values.stream().map(Value::output).toList(); - - // Block loader infrastructure will never return nulls - List outBlockList = values.stream() - .filter(v -> v.input != null) - .map(v -> v.blockLoaderOutput != null ? v.blockLoaderOutput : v.output) - .toList(); - var outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; - - return new SyntheticSourceExample(in, out, outBlock, this::mapping); - } - - private record Value(Object input, Object output, String blockLoaderOutput) { - Value(Object input, Object output) { - this(input, output, null); - } - } - - private Value generateValue() { - if (ignoreMalformed && randomBoolean()) { - List> choices = List.of( - () -> randomAlphaOfLength(3), - ESTestCase::randomInt, - ESTestCase::randomLong, - ESTestCase::randomFloat, - ESTestCase::randomDouble - ); - Object v = randomFrom(choices).get(); - return new Value(v, v); - } - if (randomBoolean()) { - return new Value(null, null); - } - - var type = randomFrom(ShapeType.values()); - var isGeoJson = randomBoolean(); - - switch (type) { - case POINT -> { - var point = GeometryTestUtils.randomPoint(false); - return value(point, isGeoJson); - } - case LINESTRING -> { - var line = GeometryTestUtils.randomLine(false); - return value(line, isGeoJson); - } - case POLYGON -> { - var polygon = GeometryTestUtils.randomPolygon(false); - return value(polygon, isGeoJson); - } - case MULTIPOINT -> { - var multiPoint = GeometryTestUtils.randomMultiPoint(false); - return value(multiPoint, isGeoJson); - } - case MULTILINESTRING -> { - var multiPoint = GeometryTestUtils.randomMultiLine(false); - return value(multiPoint, isGeoJson); - } - case MULTIPOLYGON -> { - var multiPolygon = GeometryTestUtils.randomMultiPolygon(false); - return value(multiPolygon, isGeoJson); - } - case GEOMETRYCOLLECTION -> { - var multiPolygon = GeometryTestUtils.randomGeometryCollectionWithoutCircle(false); - return value(multiPolygon, isGeoJson); - } - case ENVELOPE -> { - var rectangle = GeometryTestUtils.randomRectangle(); - var wktString = WellKnownText.toWKT(rectangle); - - return new Value(wktString, wktString); - } - default -> throw new UnsupportedOperationException("Unsupported shape"); - } - } - - private static Value value(Geometry geometry, boolean isGeoJson) { - var wktString = WellKnownText.toWKT(geometry); - var normalizedWktString = GeometryNormalizer.needsNormalize(Orientation.RIGHT, geometry) - ? WellKnownText.toWKT(GeometryNormalizer.apply(Orientation.RIGHT, geometry)) - : wktString; - - if (isGeoJson) { - var map = GeoJson.toMap(geometry); - return new Value(map, map, normalizedWktString); - } - - return new Value(wktString, wktString, normalizedWktString); - } - - private void mapping(XContentBuilder b) throws IOException { - b.field("type", "geo_shape"); - if (rarely()) { - b.field("index", false); - } - if (rarely()) { - b.field("doc_values", false); - } - if (ignoreMalformed) { - b.field("ignore_malformed", true); - } - } - - @Override - public List invalidExample() throws IOException { - return List.of(); - } - }; + return new GeometricShapeSyntheticSourceSupport(ignoreMalformed); } @Override diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeometricShapeSyntheticSourceSupport.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeometricShapeSyntheticSourceSupport.java new file mode 100644 index 0000000000000..4325eb41ceefa --- /dev/null +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeometricShapeSyntheticSourceSupport.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.spatial.index.mapper; + +import org.elasticsearch.common.geo.GeoJson; +import org.elasticsearch.common.geo.GeometryNormalizer; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.ShapeType; +import org.elasticsearch.geometry.utils.WellKnownText; +import org.elasticsearch.index.mapper.MapperTestCase; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.function.Supplier; + +import static org.apache.lucene.tests.util.LuceneTestCase.rarely; +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomFrom; +import static org.elasticsearch.test.ESTestCase.randomList; + +/** + * Synthetic source support for fields the index geometry shapes: shape, geo_shape. + */ +public class GeometricShapeSyntheticSourceSupport implements MapperTestCase.SyntheticSourceSupport { + private final boolean ignoreMalformed; + + public GeometricShapeSyntheticSourceSupport(boolean ignoreMalformed) { + this.ignoreMalformed = ignoreMalformed; + } + + @Override + public boolean preservesExactSource() { + return true; + } + + @Override + public MapperTestCase.SyntheticSourceExample example(int maxValues) throws IOException { + if (randomBoolean()) { + Value v = generateValue(); + if (v.blockLoaderOutput != null) { + return new MapperTestCase.SyntheticSourceExample(v.input, v.output, v.blockLoaderOutput, this::mapping); + } + return new MapperTestCase.SyntheticSourceExample(v.input, v.output, this::mapping); + } + + List values = randomList(1, maxValues, this::generateValue); + List in = values.stream().map(Value::input).toList(); + List out = values.stream().map(Value::output).toList(); + + // Block loader infrastructure will never return nulls + List outBlockList = values.stream() + .filter(v -> v.input != null) + .map(v -> v.blockLoaderOutput != null ? v.blockLoaderOutput : v.output) + .toList(); + var outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; + + return new MapperTestCase.SyntheticSourceExample(in, out, outBlock, this::mapping); + } + + private record Value(Object input, Object output, String blockLoaderOutput) { + Value(Object input, Object output) { + this(input, output, null); + } + } + + private Value generateValue() { + if (ignoreMalformed && randomBoolean()) { + List> choices = List.of( + () -> randomAlphaOfLength(3), + ESTestCase::randomInt, + ESTestCase::randomLong, + ESTestCase::randomFloat, + ESTestCase::randomDouble + ); + Object v = randomFrom(choices).get(); + return new Value(v, v); + } + if (randomBoolean()) { + return new Value(null, null); + } + + var type = randomFrom(ShapeType.values()); + var isGeoJson = randomBoolean(); + + return switch (type) { + // LINEARRING and CIRCLE are not supported as inputs to fields so just return points + case POINT, LINEARRING, CIRCLE -> { + var point = GeometryTestUtils.randomPoint(false); + yield value(point, isGeoJson); + } + case MULTIPOINT -> { + var multiPoint = GeometryTestUtils.randomMultiPoint(false); + yield value(multiPoint, isGeoJson); + } + case LINESTRING -> { + var line = GeometryTestUtils.randomLine(false); + yield value(line, isGeoJson); + } + case MULTILINESTRING -> { + var multiPoint = GeometryTestUtils.randomMultiLine(false); + yield value(multiPoint, isGeoJson); + } + case POLYGON -> { + var polygon = GeometryTestUtils.randomPolygon(false); + yield value(polygon, isGeoJson); + } + case MULTIPOLYGON -> { + var multiPolygon = GeometryTestUtils.randomMultiPolygon(false); + yield value(multiPolygon, isGeoJson); + } + case GEOMETRYCOLLECTION -> { + var multiPolygon = GeometryTestUtils.randomGeometryCollectionWithoutCircle(false); + yield value(multiPolygon, isGeoJson); + } + case ENVELOPE -> { + var rectangle = GeometryTestUtils.randomRectangle(); + var wktString = WellKnownText.toWKT(rectangle); + + yield new Value(wktString, wktString); + } + }; + } + + private static Value value(Geometry geometry, boolean isGeoJson) { + var wktString = WellKnownText.toWKT(geometry); + var normalizedWktString = GeometryNormalizer.needsNormalize(Orientation.RIGHT, geometry) + ? WellKnownText.toWKT(GeometryNormalizer.apply(Orientation.RIGHT, geometry)) + : wktString; + + if (isGeoJson) { + var map = GeoJson.toMap(geometry); + return new Value(map, map, normalizedWktString); + } + + return new Value(wktString, wktString, normalizedWktString); + } + + private void mapping(XContentBuilder b) throws IOException { + b.field("type", "geo_shape"); + if (rarely()) { + b.field("index", false); + } + if (rarely()) { + b.field("doc_values", false); + } + if (ignoreMalformed) { + b.field("ignore_malformed", true); + } + } + + @Override + public List invalidExample() throws IOException { + return List.of(); + } +} diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java index b2b250c6d81bd..ad622109e1748 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java @@ -6,9 +6,17 @@ */ package org.elasticsearch.xpack.spatial.index.mapper; +import org.apache.lucene.document.XYDocValuesField; import org.apache.lucene.document.XYPointField; +import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.MappedFieldType; @@ -22,7 +30,11 @@ import org.junit.AssumptionViolatedException; import java.io.IOException; +import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; import static org.elasticsearch.geometry.utils.Geohash.stringEncode; import static org.hamcrest.Matchers.containsString; @@ -419,7 +431,132 @@ protected Object generateRandomInputValue(MappedFieldType ft) { @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - throw new AssumptionViolatedException("not supported"); + return syntheticSourceSupport(ignoreMalformed, false); + } + + @Override + protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed, boolean columnReader) { + return new SyntheticSourceSupport() { + private final boolean ignoreZValue = usually(); + private final CartesianPoint nullValue = usually() ? null : randomCartesianPoint(); + + @Override + public boolean preservesExactSource() { + return true; + } + + @Override + public SyntheticSourceExample example(int maxVals) { + if (randomBoolean()) { + Value v = generateValue(); + + if (v.point == null) { + return new SyntheticSourceExample(v.representation(), v.representation(), null, this::mapping); + } else if (columnReader) { + return new SyntheticSourceExample(v.representation(), v.representation(), encode(v.point()), this::mapping); + } + return new SyntheticSourceExample(v.representation(), v.representation(), v.point().toWKT(), this::mapping); + } + List values = randomList(1, maxVals, this::generateValue); + var representations = values.stream().map(Value::representation).toList(); + + if (columnReader) { + // When reading doc-values, the block is a list of encoded longs + List outBlockList = values.stream() + .map(Value::point) + .filter(Objects::nonNull) + .map(this::encode) + .sorted() + .toList(); + Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; + return new SyntheticSourceExample(representations, representations, outBlock, this::mapping); + } else { + // When reading row-stride, the block is a list of WKT encoded BytesRefs + List outBlockList = values.stream() + .map(Value::point) + .filter(Objects::nonNull) + .map(CartesianPoint::toWKT) + .toList(); + Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; + return new SyntheticSourceExample(representations, representations, outBlock, this::mapping); + } + } + + private record Value(CartesianPoint point, Object representation) {} + + private Value generateValue() { + if (nullValue != null && randomBoolean()) { + return new Value(nullValue, null); + } + + if (ignoreMalformed) { + // #exampleMalformedValues() covers a lot of cases + + // nice complex object + return new Value(null, Map.of("one", 1, "two", List.of(2, 22, 222), "three", Map.of("three", 33))); + } + + CartesianPoint point = randomCartesianPoint(); + return new Value(point, randomInputFormat(point)); + } + + private CartesianPoint randomCartesianPoint() { + Point point = GeometryTestUtils.randomPoint(false); + return decode(encode(new CartesianPoint(point.getLat(), point.getLon()))); + } + + private Object randomInputFormat(CartesianPoint point) { + return switch (randomInt(4)) { + case 0 -> Map.of("x", point.getX(), "y", point.getY()); + case 1 -> new double[] { point.getX(), point.getY() }; + case 2 -> "POINT( " + point.getX() + " " + point.getY() + " )"; + case 3 -> point.toString(); + default -> { + List coords = new ArrayList<>(); + coords.add(point.getX()); + coords.add(point.getY()); + if (ignoreZValue) { + coords.add(randomDouble()); + } + yield Map.of("coordinates", coords, "type", "point"); + } + }; + } + + private long encode(CartesianPoint point) { + return new XYDocValuesField("f", (float) point.getX(), (float) point.getY()).numericValue().longValue(); + } + + private CartesianPoint decode(long point) { + double lat = GeoEncodingUtils.decodeLatitude((int) (point >> 32)); + double lon = GeoEncodingUtils.decodeLongitude((int) (point & 0xFFFFFFFF)); + return new CartesianPoint(lat, lon); + } + + private void mapping(XContentBuilder b) throws IOException { + b.field("type", "point"); + if (ignoreZValue == false || rarely()) { + b.field("ignore_z_value", ignoreZValue); + } + if (nullValue != null) { + b.field("null_value", randomInputFormat(nullValue)); + } + if (rarely()) { + b.field("index", false); + } + if (rarely()) { + b.field("store", false); + } + if (ignoreMalformed) { + b.field("ignore_malformed", true); + } + } + + @Override + public List invalidExample() throws IOException { + return List.of(); + } + }; } @Override @@ -427,11 +564,36 @@ protected IngestScriptSupport ingestScriptSupport() { throw new AssumptionViolatedException("not supported"); } + @Override + protected Function loadBlockExpected(BlockReaderSupport blockReaderSupport, boolean columnReader) { + if (columnReader) { + // When using column reader, we expect the output to be doc-values (which means encoded longs) + return v -> asJacksonNumberOutput(((Number) v).longValue()); + } else { + // When using row-stride reader, we expect the output to be WKT encoded BytesRef + return v -> asWKT((BytesRef) v); + } + } + + protected static Object asJacksonNumberOutput(long l) { + // Cast to int to mimic jackson-core behaviour in NumberOutput.outputLong() + // that is called when deserializing expected value in SyntheticSourceExample. + if (l < 0 && l >= Integer.MIN_VALUE || l >= 0 && l <= Integer.MAX_VALUE) { + return (int) l; + } else { + return l; + } + } + + protected static Object asWKT(BytesRef value) { + // Internally we use WKB in BytesRef, but for test assertions we want to use WKT for readability + Geometry geometry = WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, value.bytes); + return WellKnownText.toWKT(geometry); + } + @Override protected BlockReaderSupport getSupportedReaders(MapperService mapper, String loaderFieldName) { - // TODO: Support testing both reading from source as well as reading from doc-values MappedFieldType ft = mapper.fieldType(loaderFieldName); - PointFieldMapper.PointFieldType point = (PointFieldMapper.PointFieldType) ft; - return new BlockReaderSupport(point.isIndexed() == false && ft.hasDocValues(), false, mapper, loaderFieldName); + return new BlockReaderSupport(ft.hasDocValues(), false, mapper, loaderFieldName); } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java index 26d349a7ee5a6..28297f32297e6 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java @@ -8,8 +8,13 @@ import org.apache.lucene.document.ShapeField; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper; @@ -29,6 +34,7 @@ import java.io.IOException; import java.util.Collections; import java.util.List; +import java.util.function.Function; import static org.elasticsearch.geometry.utils.Geohash.stringEncode; import static org.hamcrest.Matchers.containsString; @@ -362,7 +368,24 @@ protected Object generateRandomInputValue(MappedFieldType ft) { @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - throw new AssumptionViolatedException("not supported"); + return new GeometricShapeSyntheticSourceSupport(ignoreMalformed); + } + + @Override + protected Function loadBlockExpected(BlockReaderSupport blockReaderSupport, boolean columnReader) { + return v -> asWKT((BytesRef) v); + } + + protected static Object asWKT(BytesRef value) { + // Internally we use WKB in BytesRef, but for test assertions we want to use WKT for readability + Geometry geometry = WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, value.bytes); + return WellKnownText.toWKT(geometry); + } + + @Override + protected BlockReaderSupport getSupportedReaders(MapperService mapper, String loaderFieldName) { + // Synthetic source is currently not supported. + return new BlockReaderSupport(false, false, mapper, loaderFieldName); } @Override diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml index 52d390e7b288b..ab0261d916630 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml @@ -333,3 +333,15 @@ setup: - match: {values.0: ["1",2.0,null,true,123,123]} - match: {values.1: ["1",2.0,null,true,123,123]} - match: {values.2: ["1",2.0,null,true,123,123]} + +--- +version is not allowed: + - requires: + cluster_features: ["gte_v8.14.0"] + reason: version allowed in 8.13.latest + - do: + catch: /unknown field \[version\]/ + esql.query: + body: + query: 'from test' + version: cat diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/150_lookup.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/150_lookup.yml index e8b372e6d7e8e..dcffca2f28fb0 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/150_lookup.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/150_lookup.yml @@ -1,32 +1,210 @@ -index named lookup: +--- +setup: + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 5 + mappings: + properties: + data: + type: long + data_d: + type: double + count: + type: long + count_d: + type: double + time: + type: long + color: + type: keyword - do: bulk: - index: lookup + index: "test" refresh: true body: - - { index: { } } - - { f: 1 } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275187, "color": "red" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275188, "color": "blue" } + +--- +basic: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [lookup_command] + reason: "uses LOOKUP" - do: esql.query: body: - query: 'FROM lookup | LIMIT 1' - - match: { columns.0.name: f } - - match: { columns.0.type: long } - - length: { values: 1 } - - match: { values.0.0: 1 } + query: 'FROM test | SORT time | KEEP color | LOOKUP colors ON color | LIMIT 2' + columnar: true + tables: + colors: + "color:keyword": ["red", "green", "blue"] + "rgb:integer": [16711680, 65280, 255] + + - match: {columns.0.name: "color"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "rgb"} + - match: {columns.1.type: "integer"} + - match: {values.0: ["red", "blue"]} + - match: {values.1: [16711680, 255]} --- -lookup command unsupported: +read multivalue keyword: - requires: test_runner_features: [capabilities] capabilities: - method: POST path: /_query - parameters: [method, path, parameters, capabilities] - capabilities: [lookup] - reason: "LOOKUP command required" + parameters: [] + capabilities: [lookup_command] + reason: "uses LOOKUP" + - do: + esql.query: + body: + query: 'FROM test | SORT time | KEEP color | LOOKUP color_associations ON color | LIMIT 2' + columnar: true + tables: + color_associations: + "color:keyword": ["red", "green", "blue"] + "association:keyword": + - ["love", "passion", "blood", "happiness"] + - ["nature", "healing", "health", "youth"] + - ["serenity", "wisdom", "ocean", "sky"] + + - match: {columns.0.name: "color"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "association"} + - match: {columns.1.type: "keyword"} + - match: {values.0: ["red", "blue"]} + - match: {values.1: [["love", "passion", "blood", "happiness"], ["serenity", "wisdom", "ocean", "sky"]]} + +--- +keyword matches text: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [lookup_command] + reason: "uses LOOKUP" + + - do: + indices.create: + index: test_text + body: + settings: + number_of_shards: 1 + mappings: + properties: + color: + type: text + - do: + bulk: + index: test_text + refresh: true + body: + - { "index": { } } + - { "color": "red" } + + - do: + esql.query: + body: + query: 'FROM test_text | LOOKUP colors ON color | LIMIT 1' + columnar: true + tables: + colors: + "color:keyword": ["red", "green", "blue"] + "rgb:integer": [16711680, 65280, 255] + + - match: {columns.0.name: "color"} + - match: {columns.0.type: "text"} + - match: {columns.1.name: "rgb"} + - match: {columns.1.type: "integer"} + - match: {values.0: ["red"]} + - match: {values.1: [16711680]} + +--- +duplicate column names in table: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [lookup_command] + reason: "uses LOOKUP" + + - do: + catch: /duplicate column name \[color\]/ + esql.query: + body: + query: 'FROM test | LOOKUP colors ON color | SORT time | KEEP color, rgb | LIMIT 2' + columnar: true + tables: + colors: + "color:keyword": ["red", "green", "blue"] + "color:integer": [16711680, 65280, 255] + +--- +duplicate keys: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [lookup_command] + reason: "uses LOOKUP" + + - do: + # TODO improve this error message + catch: /found a duplicate row/ + esql.query: + body: + query: 'FROM test | LOOKUP colors ON color | SORT time | KEEP color, rgb | LIMIT 2' + columnar: true + tables: + colors: + "color:keyword": ["red", "red", "blue"] + "rgb:integer": [16711680, 65280, 255] + +--- +multivalued keys: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [lookup_command] + reason: "uses LOOKUP" + + - do: + # TODO improve this error message + catch: /only single valued keys are supported/ + esql.query: + body: + query: 'FROM test | LOOKUP colors ON color | SORT time | KEEP color, rgb | LIMIT 2' + columnar: true + tables: + colors: + "color:keyword": [["red", "blue"], "white", "blue"] + "rgb:integer": [16711680, 65280, 255] + +--- +index named lookup still works: - do: bulk: index: lookup @@ -36,7 +214,32 @@ lookup command unsupported: - { f: 1 } - do: - catch: /LOOKUP not yet supported/ esql.query: body: - query: 'FROM lookup | LOOKUP a ON foo' + query: 'FROM lookup | LIMIT 1' + - match: { columns.0.name: f } + - match: { columns.0.type: long } + - length: { values: 1 } + - match: { values.0.0: 1 } + +--- +on function: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [lookup_command] + reason: "uses LOOKUP" + + - do: + catch: "/token recognition error at: '\\('/" + esql.query: + body: + query: 'FROM test | SORT time | KEEP color | LOOKUP colors ON CONCAT(color, "foo") | LIMIT 2' + columnar: true + tables: + colors: + "color:keyword": ["red", "green", "blue"] + "rgb:integer": [16711680, 65280, 255] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml index 569c310c3b244..ccc6cd8627b53 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml @@ -1,5 +1,9 @@ --- "geo_shape": + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + - do: indices.create: index: test @@ -62,6 +66,10 @@ --- "geo_shape with ignore_malformed": + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + - do: indices.create: index: test @@ -139,8 +147,161 @@ - match: { _source.shape: ["POINT (-77.03653 1000)", "POINT (-71.34 41.12)"] } +--- +"shape": + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + shape: + type: shape + + - do: + index: + index: test + id: "1" + body: + shape: + type: "Polygon" + coordinates: [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]] + + - do: + index: + index: test + id: "2" + body: + shape: "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2))" + + - do: + index: + index: test + id: "3" + body: + shape: ["POINT (-77.03653 38.897676)", {"type" : "LineString", "coordinates" : [[-77.03653, 38.897676], [-77.009051, 38.889939]]}] + + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: { _source.shape.type: "Polygon" } + - match: { _source.shape.coordinates: [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]] } + + - do: + get: + index: test + id: "2" + + - match: { _source.shape: "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2))" } + + - do: + get: + index: test + id: "3" + + - match: { _source.shape: ["POINT (-77.03653 38.897676)", {"type" : "LineString", "coordinates" : [[-77.03653, 38.897676], [-77.009051, 38.889939]]}] } + +--- +"shape with ignore_malformed": + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + shape: + type: shape + ignore_malformed: true + + - do: + index: + index: test + id: "1" + body: + shape: 500 + + - do: + index: + index: test + id: "2" + body: + shape: + string: "string" + array: [{ "a": 1 }, { "b": 2 }] + object: { "foo": "bar" } + + - do: + index: + index: test + id: "3" + body: + shape: ["POINT (-77.03653 38.897676)", "potato", "POINT (-71.34 41.12)"] + + - do: + index: + index: test + id: "4" + body: + shape: ["POINT (-77.03653 1000)", "POINT (-71.34 41.12)"] + + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: { _source.shape: 500 } + + - do: + get: + index: test + id: "2" + + - match: { _source.shape.string: "string" } + - match: { _source.shape.array: [{ "a": 1 }, { "b": 2 }] } + - match: { _source.shape.object: { "foo": "bar" } } + + - do: + get: + index: test + id: "3" + + - match: { _source.shape: ["POINT (-77.03653 38.897676)", "potato", "POINT (-71.34 41.12)"] } + + - do: + get: + index: test + id: "4" + + - match: { _source.shape: ["POINT (-77.03653 1000)", "POINT (-71.34 41.12)"] } + --- "geo_point": + - requires: + cluster_features: ["gte_v8.3.0"] + reason: introduced in 8.3.0 + - do: indices.create: index: test @@ -249,3 +410,167 @@ - match: { _source.point.lon: -71.34000029414892 } - match: { _source.point.lat: 41.119999922811985 } + +--- +"point": + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + point: + type: point + + - do: + index: + index: test + id: "1" + body: + point: + type: "Point" + coordinates: [-71.34, 41.12] + + - do: + index: + index: test + id: "2" + body: + point: "POINT (-71.34 41.12)" + + - do: + index: + index: test + id: "3" + body: + point: + x: -71.34 + y: 41.12 + + - do: + index: + index: test + id: "4" + body: + point: [ -71.34, 41.12 ] + + - do: + index: + index: test + id: "5" + body: + point: "41.12,-71.34" + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: { _source.point.type: "Point" } + - match: { _source.point.coordinates: [-71.34, 41.12] } + + - do: + get: + index: test + id: "2" + + - match: { _source.point: "POINT (-71.34 41.12)" } + + - do: + get: + index: test + id: "3" + + - match: { _source.point.x: -71.34 } + - match: { _source.point.y: 41.12 } + + - do: + get: + index: test + id: "4" + + - match: { _source.point: [ -71.34, 41.12 ] } + + - do: + get: + index: test + id: "5" + + - match: { _source.point: "41.12,-71.34" } + +--- +"point with ignore_malformed": + - requires: + cluster_features: ["mapper.source.synthetic_source_fallback"] + reason: introduced in 8.15.0 + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + point: + type: point + ignore_malformed: true + + - do: + index: + index: test + id: "1" + body: + point: + string: "string" + array: [{ "a": 1 }, { "b": 2 }] + object: { "foo": "bar" } + + - do: + index: + index: test + id: "2" + body: + point: ["POINT (-77.03653 38.897676)", "potato", "POINT (-71.34 41.12)"] + + - do: + index: + index: test + id: "3" + body: + point: ["POINT (-77.03653 1000)", "POINT (-71.34 41.12)"] + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: { _source.point.string: "string" } + - match: { _source.point.array: [{ "a": 1 }, { "b": 2 }] } + - match: { _source.point.object: { "foo": "bar" } } + + - do: + get: + index: test + id: "2" + + - match: { _source.point: ["POINT (-77.03653 38.897676)", "potato", "POINT (-71.34 41.12)"] } + + - do: + get: + index: test + id: "3" + + - match: { _source.point: ["POINT (-77.03653 1000)", "POINT (-71.34 41.12)"] } diff --git a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java index 98cf817d6c018..97d38807f5c17 100644 --- a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java +++ b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java @@ -77,6 +77,15 @@ protected List getTransformTasksFromClusterState(String transformId) thr return tasks.stream().map(t -> (String) t.get("id")).filter(transformId::equals).toList(); } + protected Response getNodeStats() throws IOException { + return adminClient().performRequest(new Request("GET", "/_transform/_node_stats")); + } + + protected int getTotalRegisteredTransformCount() throws IOException { + Response response = getNodeStats(); + return (int) XContentMapValues.extractValue(entityAsMap(response), "total", "scheduler", "registered_transform_count"); + } + @SuppressWarnings("unchecked") protected void logAudits() throws Exception { logger.info("writing audit messages to the log"); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index 4db0d0d8baaf1..ab478dc16f224 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -245,16 +245,19 @@ public void testTransformLifecycleInALoop() throws Exception { putTransform(transformId, config, RequestOptions.DEFAULT); assertThat(getTransformTasks(), is(empty())); assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + assertThat("Node stats were: " + entityAsMap(getNodeStats()), getTotalRegisteredTransformCount(), is(equalTo(0))); startTransform(transformId, RequestOptions.DEFAULT); // There is 1 transform task after start. assertThat(getTransformTasks(), hasSize(1)); assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + assertThat("Node stats were: " + entityAsMap(getNodeStats()), getTotalRegisteredTransformCount(), is(equalTo(1))); Thread.sleep(sleepAfterStartMillis); // There should still be 1 transform task as the transform is continuous. assertThat(getTransformTasks(), hasSize(1)); assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + assertThat("Node stats were: " + entityAsMap(getNodeStats()), getTotalRegisteredTransformCount(), is(equalTo(1))); // Stop the transform with force set randomly. stopTransform(transformId, true, null, false, force); @@ -268,6 +271,7 @@ public void testTransformLifecycleInALoop() throws Exception { } // After the transform is stopped, there should be no transform task left in the cluster state. assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + assertThat("Node stats were: " + entityAsMap(getNodeStats()), getTotalRegisteredTransformCount(), is(equalTo(0))); // Delete the transform deleteTransform(transformId); @@ -586,6 +590,7 @@ public void testContinuousTransformRethrottle() throws Exception { deleteTransform(config.getId()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109101") public void testStartTransform_GivenTimeout_Returns408() throws Exception { String indexName = "start-transform-timeout-index"; String transformId = "start-transform-timeout"; diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformNodeStatsIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformNodeStatsIT.java new file mode 100644 index 0000000000000..04483873a2aff --- /dev/null +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformNodeStatsIT.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.integration; + +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.xpack.core.transform.transforms.QueryConfig; +import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; +import org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSource; +import org.junit.After; + +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; + +public class TransformNodeStatsIT extends TransformRestTestCase { + + private static final int NUM_USERS = 28; + + static Integer getUserIdForRow(int row) { + return row % NUM_USERS; + } + + static String getDateStringForRow(int row) { + int day = (11 + (row / 100)) % 28; + int hour = 10 + (row % 13); + int min = 10 + (row % 49); + int sec = 10 + (row % 49); + return "2017-01-" + (day < 10 ? "0" + day : day) + "T" + hour + ":" + min + ":" + sec + "Z"; + } + + @After + public void cleanTransforms() throws Exception { + cleanUp(); + } + + @SuppressWarnings("unchecked") + public void testTransformNodeStats() throws Exception { + var transformId = "transform-node-stats"; + createTransform("basic-stats-reviews", transformId); + + var nodesInfo = getNodesInfo(adminClient()); + assertThat("Nodes were: " + nodesInfo, nodesInfo.size(), is(equalTo(3))); + + var response = entityAsMap(getNodeStats()); + assertThat(response, hasKey("total")); + assertThat( + "Response was: " + response, + (int) XContentMapValues.extractValue(response, "total", "scheduler", "registered_transform_count"), + is(equalTo(1)) + ); + for (String nodeId : nodesInfo.keySet()) { + assertThat(response, hasKey(nodeId)); + assertThat( + "Response was: " + response, + (int) XContentMapValues.extractValue(response, nodeId, "scheduler", "registered_transform_count"), + is(greaterThanOrEqualTo(0)) + ); + } + } + + private void createTransform(String indexName, String transformId) throws Exception { + createReviewsIndex(indexName, 100, NUM_USERS, TransformNodeStatsIT::getUserIdForRow, TransformNodeStatsIT::getDateStringForRow); + + var groups = Map.of( + "by-day", + createDateHistogramGroupSourceWithCalendarInterval("timestamp", DateHistogramInterval.DAY, null), + "by-user", + new TermsGroupSource("user_id", null, false), + "by-business", + new TermsGroupSource("business_id", null, false) + ); + + var aggs = AggregatorFactories.builder() + .addAggregator(AggregationBuilders.avg("review_score").field("stars")) + .addAggregator(AggregationBuilders.max("timestamp").field("timestamp")); + + var config = createTransformConfigBuilder(transformId, "reviews-by-user-business-day", QueryConfig.matchAll(), indexName) + .setPivotConfig(createPivotConfig(groups, aggs)) + .setSyncConfig(new TimeSyncConfig("timestamp", null)) + .build(); + + putTransform(transformId, Strings.toString(config), RequestOptions.DEFAULT); + startTransform(config.getId(), RequestOptions.DEFAULT); + + waitUntilCheckpoint(config.getId(), 1L); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index e788a85562e8e..ab4652c562e22 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction; import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; import org.elasticsearch.xpack.core.transform.action.PutTransformAction; @@ -74,6 +75,7 @@ import org.elasticsearch.xpack.transform.action.TransportGetCheckpointAction; import org.elasticsearch.xpack.transform.action.TransportGetCheckpointNodeAction; import org.elasticsearch.xpack.transform.action.TransportGetTransformAction; +import org.elasticsearch.xpack.transform.action.TransportGetTransformNodeStatsAction; import org.elasticsearch.xpack.transform.action.TransportGetTransformStatsAction; import org.elasticsearch.xpack.transform.action.TransportPreviewTransformAction; import org.elasticsearch.xpack.transform.action.TransportPutTransformAction; @@ -93,6 +95,7 @@ import org.elasticsearch.xpack.transform.rest.action.RestCatTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestDeleteTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestGetTransformAction; +import org.elasticsearch.xpack.transform.rest.action.RestGetTransformNodeStatsAction; import org.elasticsearch.xpack.transform.rest.action.RestGetTransformStatsAction; import org.elasticsearch.xpack.transform.rest.action.RestPreviewTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestPutTransformAction; @@ -191,7 +194,8 @@ public List getRestHandlers( new RestCatTransformAction(), new RestUpgradeTransformsAction(), new RestResetTransformAction(), - new RestScheduleNowTransformAction() + new RestScheduleNowTransformAction(), + new RestGetTransformNodeStatsAction() ); } @@ -211,6 +215,7 @@ public List getRestHandlers( new ActionHandler<>(UpgradeTransformsAction.INSTANCE, TransportUpgradeTransformsAction.class), new ActionHandler<>(ResetTransformAction.INSTANCE, TransportResetTransformAction.class), new ActionHandler<>(ScheduleNowTransformAction.INSTANCE, TransportScheduleNowTransformAction.class), + new ActionHandler<>(GetTransformNodeStatsAction.INSTANCE, TransportGetTransformNodeStatsAction.class), // internal, no rest endpoint new ActionHandler<>(ValidateTransformAction.INSTANCE, TransportValidateTransformAction.class), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformNodeStatsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformNodeStatsAction.java new file mode 100644 index 0000000000000..bbe8f6ea05b4c --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformNodeStatsAction.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.action; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodeStatsRequest; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodeStatsResponse; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodesStatsRequest; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodesStatsResponse; +import org.elasticsearch.xpack.transform.TransformServices; +import org.elasticsearch.xpack.transform.transforms.scheduling.TransformScheduler; + +import java.io.IOException; +import java.util.List; + +/** + * {@link TransportGetTransformNodeStatsAction} class fetches transform-related metrics from all the nodes and aggregates these metrics. + */ +public class TransportGetTransformNodeStatsAction extends TransportNodesAction< + NodesStatsRequest, + NodesStatsResponse, + NodeStatsRequest, + NodeStatsResponse> { + + private final TransportService transportService; + private final TransformScheduler scheduler; + + @Inject + public TransportGetTransformNodeStatsAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + TransformServices transformServices + ) { + super( + GetTransformNodeStatsAction.NAME, + clusterService, + transportService, + actionFilters, + NodeStatsRequest::new, + threadPool.executor(ThreadPool.Names.MANAGEMENT) + ); + this.transportService = transportService; + this.scheduler = transformServices.scheduler(); + } + + @Override + protected NodesStatsResponse newResponse(NodesStatsRequest request, List nodes, List failures) { + return new NodesStatsResponse(clusterService.getClusterName(), nodes, failures); + } + + @Override + protected NodeStatsRequest newNodeRequest(NodesStatsRequest request) { + return new NodeStatsRequest(); + } + + @Override + protected NodeStatsResponse newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException { + return new NodeStatsResponse(in); + } + + @Override + protected NodeStatsResponse nodeOperation(NodeStatsRequest request, Task task) { + var localNode = transportService.getLocalNode(); + var schedulerStats = scheduler.getStats(); + return new NodeStatsResponse(localNode, schedulerStats); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformNodeStatsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformNodeStatsAction.java new file mode 100644 index 0000000000000..30d3b6dbdcaae --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformNodeStatsAction.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.rest.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.transform.TransformField; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformNodeStatsAction.NodesStatsRequest; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +@ServerlessScope(Scope.PUBLIC) +public class RestGetTransformNodeStatsAction extends BaseRestHandler { + + @Override + public List routes() { + return List.of(new Route(GET, TransformField.REST_BASE_PATH_TRANSFORMS + "_node_stats")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + NodesStatsRequest request = new NodesStatsRequest(); + return channel -> client.execute(GetTransformNodeStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "transform_get_transform_node_stats_action"; + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java index 201f20ac1eb09..712a95ded2076 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.core.transform.utils.ExceptionsHelper; +import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.CheckpointProvider; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; @@ -570,9 +571,7 @@ private void executeRetentionPolicy(ActionListener listener) { private void finalizeCheckpoint(ActionListener listener) { try { // reset the page size, so we do not memorize a low page size forever - if (function != null) { - context.setPageSize(function.getInitialPageSize()); - } + resetPageSize(); // reset the changed bucket to free memory if (changeCollector != null) { changeCollector.clear(); @@ -1234,12 +1233,17 @@ private RunState determineRunStateAtStart() { private void configurePageSize(Integer newPageSize) { initialConfiguredPageSize = newPageSize; + resetPageSize(); + } - // if the user explicitly set a page size, take it from the config, otherwise let the function decide + private void resetPageSize() { if (initialConfiguredPageSize != null && initialConfiguredPageSize > 0) { context.setPageSize(initialConfiguredPageSize); - } else { + } else if (function != null) { context.setPageSize(function.getInitialPageSize()); + } else { + // we should never be in a state where both initialConfiguredPageSize and function are null, but just in case... + context.setPageSize(Transform.DEFAULT_INITIAL_MAX_PAGE_SEARCH_SIZE); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueue.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueue.java index e11da6af1c285..cd3630a095ed1 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueue.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueue.java @@ -108,6 +108,15 @@ public synchronized TransformScheduledTask remove(String transformId) { return task; } + /** + * Returns the current queue size. + * + * @return the current queue size + */ + public synchronized int size() { + return tasks.size(); + } + // Visible for testing /** * @return the set of all the transform ids diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduler.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduler.java index a02f2aac956e2..9c7afa38a5c59 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduler.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduler.java @@ -13,6 +13,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.transform.transforms.TransformSchedulerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; import org.elasticsearch.xpack.transform.Transform; @@ -21,6 +22,7 @@ import java.time.Instant; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.core.Strings.format; @@ -270,6 +272,23 @@ public void deregisterTransform(String transformId) { scheduledTasks.remove(transformId); } + public TransformSchedulerStats getStats() { + return new TransformSchedulerStats( + scheduledTasks.size(), + Optional.ofNullable(scheduledTasks.first()).map(TransformScheduledTask::getTransformId).orElse(null) + ); + } + + // Visible for testing + /** + * Returns the number of transforms currently in the queue. + * + * @return number of transforms currently in the queue + */ + int getRegisteredTransformCount() { + return scheduledTasks.size(); + } + // Visible for testing /** * @return queue current contents diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java index abad10b148f21..1c268174f5be5 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java @@ -19,6 +19,8 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.reindex.BulkByScrollResponse; @@ -35,6 +37,7 @@ import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; +import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.TimeRetentionPolicyConfigTests; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -43,6 +46,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; +import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.CheckpointProvider; @@ -59,7 +63,9 @@ import java.time.Clock; import java.util.Collections; import java.util.Map; +import java.util.concurrent.BlockingDeque; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -107,9 +113,13 @@ class MockedTransformIndexer extends TransformIndexer { private CountDownLatch searchLatch; private CountDownLatch doProcessLatch; private CountDownLatch doSaveStateLatch; + private CountDownLatch afterFinishOrFailureLatch; private AtomicBoolean saveStateInProgress = new AtomicBoolean(false); + private BlockingDeque searchExceptions = new LinkedBlockingDeque<>(); + private BlockingDeque runBeforeOnFinish = new LinkedBlockingDeque<>(); + // how many loops to execute until reporting done private int numberOfLoops; @@ -211,7 +221,11 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener throw new IllegalStateException(e); } } - threadPool.generic().execute(() -> nextPhase.onResponse(ONE_HIT_SEARCH_RESPONSE)); + if (searchExceptions.isEmpty() == false) { + nextPhase.onFailure(searchExceptions.poll()); + } else { + threadPool.generic().execute(() -> nextPhase.onResponse(ONE_HIT_SEARCH_RESPONSE)); + } } @Override @@ -261,6 +275,22 @@ void doMaybeCreateDestIndex(Map deducedDestIndexMappings, Action listener.onResponse(null); } + @Override + protected void onFinish(ActionListener listener) { + while (runBeforeOnFinish.isEmpty() == false) { + runBeforeOnFinish.poll().run(); + } + super.onFinish(listener); + } + + @Override + protected void afterFinishOrFailure() { + super.afterFinishOrFailure(); + if (afterFinishOrFailureLatch != null) { + afterFinishOrFailureLatch.countDown(); + } + } + public boolean waitingForNextSearch() { return super.getScheduledNextSearch() != null; } @@ -278,6 +308,14 @@ void persistState(TransformState state, ActionListener listener) { void validate(ActionListener listener) { listener.onResponse(null); } + + public void addAfterFinishOrFailureLatch() { + afterFinishOrFailureLatch = new CountDownLatch(1); + } + + public void waitForAfterFinishOrFailureLatch(long timeout, TimeUnit unit) throws InterruptedException { + assertTrue(afterFinishOrFailureLatch.await(timeout, unit)); + } } @Before @@ -439,6 +477,135 @@ public void testInterActionWhileIndexerShutsdown() throws Exception { assertBusy(() -> assertEquals(IndexerState.STOPPED, indexer.getState()), 5, TimeUnit.SECONDS); } + public void testMaxPageSearchSizeIsResetToDefaultValue() throws Exception { + TransformConfig config = new TransformConfig( + randomAlphaOfLength(10), + randomSourceConfig(), + randomDestConfig(), + null, + new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1)), + null, + randomPivotConfig(), + null, + randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000), + null, + null, + null, + null, + null + ); + AtomicReference state = new AtomicReference<>(IndexerState.STARTED); + + TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, mock(TransformContext.Listener.class)); + final MockedTransformIndexer indexer = createMockIndexer( + 1, + config, + state, + null, + threadPool, + auditor, + new TransformIndexerStats(), + context + ); + + // add latches + CountDownLatch searchLatch = indexer.createAwaitForSearchLatch(1); + indexer.addAfterFinishOrFailureLatch(); + + indexer.start(); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertEquals(indexer.getState(), IndexerState.INDEXING); + + // set circuit breaker to 50% + indexer.searchExceptions.offer(new CircuitBreakingException("hello", 2, 1, CircuitBreaker.Durability.TRANSIENT)); + indexer.runBeforeOnFinish.offer(() -> { + assertEquals(Math.round(Transform.DEFAULT_INITIAL_MAX_PAGE_SEARCH_SIZE / 2.0), context.getPageSize()); + }); + assertFalse(indexer.runBeforeOnFinish.isEmpty()); + + // run and wait + searchLatch.countDown(); + indexer.waitForAfterFinishOrFailureLatch(5, TimeUnit.SECONDS); + + // rerun, don't throw an exception this time + searchLatch = indexer.createAwaitForSearchLatch(1); + indexer.addAfterFinishOrFailureLatch(); + assertBusy(() -> assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis()))); + searchLatch.countDown(); + indexer.waitForAfterFinishOrFailureLatch(5, TimeUnit.SECONDS); + + // verify that we checked the pageSize decreased + assertTrue(indexer.runBeforeOnFinish.isEmpty()); + // verify that the pageSize reset + assertEquals(Transform.DEFAULT_INITIAL_MAX_PAGE_SEARCH_SIZE.intValue(), context.getPageSize()); + } + + public void testMaxPageSearchSizeIsResetToConfiguredValue() throws Exception { + TransformConfig config = new TransformConfig( + randomAlphaOfLength(10), + randomSourceConfig(), + randomDestConfig(), + null, + new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1)), + null, + randomPivotConfig(), + null, + randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000), + null, + null, + null, + null, + null + ); + AtomicReference state = new AtomicReference<>(IndexerState.STARTED); + + TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, mock(TransformContext.Listener.class)); + final MockedTransformIndexer indexer = createMockIndexer( + 1, + config, + state, + null, + threadPool, + auditor, + new TransformIndexerStats(), + context + ); + + // add latches + CountDownLatch searchLatch = indexer.createAwaitForSearchLatch(1); + indexer.addAfterFinishOrFailureLatch(); + + indexer.start(); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertEquals(indexer.getState(), IndexerState.INDEXING); + + var configuredMaxPageSearchSize = 20_000; + indexer.applyNewSettings( + new SettingsConfig.Builder(SettingsConfig.EMPTY).setMaxPageSearchSize(configuredMaxPageSearchSize).build() + ); + + // set circuit breaker to 50% + indexer.searchExceptions.offer(new CircuitBreakingException("hello", 2, 1, CircuitBreaker.Durability.TRANSIENT)); + indexer.runBeforeOnFinish.offer(() -> { assertEquals(Math.round(configuredMaxPageSearchSize / 2.0), context.getPageSize()); }); + assertFalse(indexer.runBeforeOnFinish.isEmpty()); + + // run and wait + searchLatch.countDown(); + indexer.waitForAfterFinishOrFailureLatch(5, TimeUnit.SECONDS); + + // rerun, don't throw an exception this time + searchLatch = indexer.createAwaitForSearchLatch(1); + indexer.addAfterFinishOrFailureLatch(); + assertBusy(() -> assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis()))); + searchLatch.countDown(); + indexer.waitForAfterFinishOrFailureLatch(5, TimeUnit.SECONDS); + + // verify that we checked the pageSize decreased + assertTrue(indexer.runBeforeOnFinish.isEmpty()); + // verify that the pageSize reset + assertEquals(configuredMaxPageSearchSize, context.getPageSize()); + } + private MockedTransformIndexer createMockIndexer( int numberOfLoops, TransformConfig config, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java index 5030d42f9c17c..6c032e752613b 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java @@ -54,6 +54,7 @@ public void testEmptyQueue() { public void testNonEmptyQueue() { queue.add(createTask("task-1", 5)); assertThat(queue.first(), is(notNullValue())); + assertThat(queue.size(), is(equalTo(1))); } public void testAddAndRemove() { @@ -63,6 +64,7 @@ public void testAddAndRemove() { assertThat(queue.first(), is(notNullValue())); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1", "task-2", "task-3")); assertThat(queue.first(), is(equalTo(createTask("task-2", 1)))); + assertThat(queue.size(), is(equalTo(3))); queue.remove("task-1"); queue.remove("task-2"); @@ -86,6 +88,7 @@ public void testConcurrentAddAndRemove() throws Exception { } assertThat(queue.first(), is(notNullValue())); assertThat(queue.getTransformIds(), hasSize(100)); + assertThat(queue.size(), is(equalTo(100))); { Set removedTaskIds = new HashSet<>(); @@ -107,11 +110,13 @@ public void testConcurrentAddAndRemove() throws Exception { public void testAddNoOp() { queue.add(createTask("task-1", 5)); assertThat(queue.first(), is(equalTo(createTask("task-1", 5)))); + assertThat(queue.size(), is(equalTo(1))); // Try adding a task with a duplicate key queue.add(createTask("task-1", 6)); // Verify that the add operation had no effect assertThat(queue.first(), is(equalTo(createTask("task-1", 5)))); + assertThat(queue.size(), is(equalTo(1))); } public void testRemoveNoOp() { @@ -121,6 +126,7 @@ public void testRemoveNoOp() { assertThat(queue.first(), is(notNullValue())); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1")); assertThat(queue.first(), is(equalTo(createTask("task-1", 5)))); + assertThat(queue.size(), is(equalTo(1))); } public void testUpdateNoOp() { @@ -130,6 +136,7 @@ public void testUpdateNoOp() { assertThat(queue.first(), is(notNullValue())); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1")); assertThat(queue.first(), is(equalTo(createTask("task-1", 5)))); + assertThat(queue.size(), is(equalTo(1))); } public void testUpdateModifiesId() { @@ -154,6 +161,7 @@ public void testRemoveAll() { containsInAnyOrder("task-1", "task-2", "task-3", "task-4", "task-5", "task-6", "task-7", "task-8", "task-9") ); assertThat(queue.first(), is(equalTo(createTask("task-7", 0)))); + assertThat(queue.size(), is(equalTo(9))); List tasksByPriority = new ArrayList<>(); while (queue.first() != null) { @@ -184,15 +192,18 @@ public void testUpdatePriority() { queue.add(createTask("task-3", 9)); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1", "task-2", "task-3")); assertThat(queue.first(), is(equalTo(createTask("task-2", 1)))); + assertThat(queue.size(), is(equalTo(3))); queue.update("task-3", task -> createTask(task.getTransformId(), -999)); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1", "task-2", "task-3")); assertThat(queue.first(), is(equalTo(createTask("task-3", -999)))); + assertThat(queue.size(), is(equalTo(3))); queue.update("task-1", task -> createTask(task.getTransformId(), 0)); queue.remove("task-3"); assertThat(queue.getTransformIds(), containsInAnyOrder("task-1", "task-2")); assertThat(queue.first(), is(equalTo(createTask("task-1", 0)))); + assertThat(queue.size(), is(equalTo(2))); } private static TransformScheduledTask createTask(String transformId, long nextScheduledTimeMillis) { @@ -213,5 +224,6 @@ private static void failUnexpectedCall(Event event) { private void assertThatQueueIsEmpty() { assertThat(queue.first(), is(nullValue())); assertThat(queue.getTransformIds(), is(empty())); + assertThat(queue.size(), is(equalTo(0))); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java index 8d3220a5b4de3..06fdfd7b538b1 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java @@ -80,7 +80,9 @@ private void testScheduling(int frequencySeconds, int minFreqencySeconds) { TransformScheduler.Listener listener = events::add; TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, minFrequency); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); transformScheduler.registerTransform(transformTaskParams, listener); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(1))); assertThat( transformScheduler.getTransformScheduledTasks(), contains(new TransformScheduledTask(transformId, fiveSeconds, 0L, 0, 5000, listener)) @@ -125,6 +127,7 @@ private void testScheduling(int frequencySeconds, int minFreqencySeconds) { assertThat(events.get(2), is(equalTo(new TransformScheduler.Event(transformId, 10005, 10010)))); transformScheduler.deregisterTransform(transformId); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); assertThat(transformScheduler.getTransformScheduledTasks(), is(empty())); transformScheduler.stop(); @@ -139,7 +142,9 @@ public void testSchedulingWithFailures() { TransformScheduler.Listener listener = events::add; TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); transformScheduler.registerTransform(transformTaskParams, listener); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(1))); assertThat( transformScheduler.getTransformScheduledTasks(), contains(new TransformScheduledTask(transformId, frequency, 0L, 0, 60 * 60 * 1000, listener)) @@ -177,6 +182,7 @@ public void testSchedulingWithFailures() { ); transformScheduler.deregisterTransform(transformId); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); assertThat(transformScheduler.getTransformScheduledTasks(), is(empty())); transformScheduler.stop(); @@ -191,7 +197,9 @@ public void testScheduleNow() { TransformScheduler.Listener listener = events::add; TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); transformScheduler.registerTransform(transformTaskParams, listener); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(1))); assertThat( transformScheduler.getTransformScheduledTasks(), contains(new TransformScheduledTask(transformId, frequency, 0L, 0, 60 * 60 * 1000, listener)) @@ -226,6 +234,7 @@ public void testScheduleNow() { assertThat(events.get(2), is(equalTo(new TransformScheduler.Event(transformId, 31 * 60 * 1000, 31 * 60 * 1000)))); transformScheduler.deregisterTransform(transformId); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); assertThat(transformScheduler.getTransformScheduledTasks(), is(empty())); transformScheduler.stop(); @@ -402,9 +411,11 @@ public void testRegisterMultipleTransforms() { TransformScheduler.Listener listener = events::add; TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); transformScheduler.registerTransform(transformTaskParams1, listener); transformScheduler.registerTransform(transformTaskParams2, listener); transformScheduler.registerTransform(transformTaskParams3, listener); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(3))); assertThat( transformScheduler.getTransformScheduledTasks(), contains( @@ -432,9 +443,11 @@ public void testMultipleTransformsEligibleForProcessingAtOnce() { TransformScheduler.Listener listener = events::add; TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(0))); transformScheduler.registerTransform(transformTaskParams1, listener); transformScheduler.registerTransform(transformTaskParams2, listener); transformScheduler.registerTransform(transformTaskParams3, listener); + assertThat(transformScheduler.getRegisteredTransformCount(), is(equalTo(3))); assertThat( transformScheduler.getTransformScheduledTasks(), contains(