diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 6c8b8edfcbac..4bc72aec2097 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 69d11ef1dabb..3d6095d0b9e6 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -448,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -490,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 5514fc376a28..f92881da7fea 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,5 @@ BWC_VERSION: + - "8.15.6" - "8.16.2" - "8.17.0" - "8.18.0" diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy index 6d080e1c8076..bb100b6b2388 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy @@ -9,9 +9,10 @@ package org.elasticsearch.gradle.internal +import spock.lang.Unroll + import org.elasticsearch.gradle.fixtures.AbstractGitAwareGradleFuncTest import org.gradle.testkit.runner.TaskOutcome -import spock.lang.Unroll class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleFuncTest { @@ -23,8 +24,10 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF apply plugin: 'elasticsearch.internal-distribution-bwc-setup' """ execute("git branch origin/8.x", file("cloned")) + execute("git branch origin/8.3", file("cloned")) + execute("git branch origin/8.2", file("cloned")) + execute("git branch origin/8.1", file("cloned")) execute("git branch origin/7.16", file("cloned")) - execute("git branch origin/7.15", file("cloned")) } def "builds distribution from branches via archives extractedAssemble"() { @@ -48,10 +51,11 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF assertOutputContains(result.output, "[$bwcDistVersion] > Task :distribution:archives:darwin-tar:${expectedAssembleTaskName}") where: - bwcDistVersion | bwcProject | expectedAssembleTaskName - "8.0.0" | "minor" | "extractedAssemble" - "7.16.0" | "staged" | "extractedAssemble" - "7.15.2" | "bugfix" | "extractedAssemble" + bwcDistVersion | bwcProject | expectedAssembleTaskName + "8.4.0" | "minor" | "extractedAssemble" + "8.3.0" | "staged" | "extractedAssemble" + "8.2.1" | "bugfix" | "extractedAssemble" + "8.1.3" | "bugfix2" | "extractedAssemble" } @Unroll @@ -70,8 +74,8 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF where: bwcDistVersion | platform - "8.0.0" | "darwin" - "8.0.0" | "linux" + "8.4.0" | "darwin" + "8.4.0" | "linux" } def "bwc expanded distribution folder can be resolved as bwc project artifact"() { @@ -107,11 +111,11 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF result.task(":resolveExpandedDistribution").outcome == TaskOutcome.SUCCESS result.task(":distribution:bwc:minor:buildBwcDarwinTar").outcome == TaskOutcome.SUCCESS and: "assemble task triggered" - result.output.contains("[8.0.0] > Task :distribution:archives:darwin-tar:extractedAssemble") + result.output.contains("[8.4.0] > Task :distribution:archives:darwin-tar:extractedAssemble") result.output.contains("expandedRootPath /distribution/bwc/minor/build/bwc/checkout-8.x/" + "distribution/archives/darwin-tar/build/install") result.output.contains("nested folder /distribution/bwc/minor/build/bwc/checkout-8.x/" + - "distribution/archives/darwin-tar/build/install/elasticsearch-8.0.0-SNAPSHOT") + "distribution/archives/darwin-tar/build/install/elasticsearch-8.4.0-SNAPSHOT") } } diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy index eb6185e5aed5..fc5d432a9ef9 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy @@ -57,7 +57,7 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest elasticsearch_distributions { test_distro { - version = "8.0.0" + version = "8.4.0" type = "archive" platform = "linux" architecture = Architecture.current(); @@ -87,7 +87,7 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest elasticsearch_distributions { test_distro { - version = "8.0.0" + version = "8.4.0" type = "archive" platform = "linux" architecture = Architecture.current(); diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy index e3efe3d7ffbf..15b057a05e03 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy @@ -40,7 +40,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe given: internalBuild() - subProject(":distribution:bwc:staged") << """ + subProject(":distribution:bwc:minor") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -61,11 +61,11 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe result.task(transformTask).outcome == TaskOutcome.NO_SOURCE } - def "yamlRestCompatTest executes and copies api and transforms tests from :bwc:staged"() { + def "yamlRestCompatTest executes and copies api and transforms tests from :bwc:minor"() { given: internalBuild() - subProject(":distribution:bwc:staged") << """ + subProject(":distribution:bwc:minor") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -98,8 +98,8 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe String api = "foo.json" String test = "10_basic.yml" //add the compatible test and api files, these are the prior version's normal yaml rest tests - file("distribution/bwc/staged/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" - file("distribution/bwc/staged/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" + file("distribution/bwc/minor/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" + file("distribution/bwc/minor/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" when: def result = gradleRunner("yamlRestCompatTest").build() @@ -145,7 +145,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe given: internalBuild() withVersionCatalogue() - subProject(":distribution:bwc:staged") << """ + subProject(":distribution:bwc:minor") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -186,7 +186,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe given: internalBuild() - subProject(":distribution:bwc:staged") << """ + subProject(":distribution:bwc:minor") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -230,7 +230,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe setupRestResources([], []) - file("distribution/bwc/staged/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ + file("distribution/bwc/minor/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ "one": - do: do_.some.key_to_replace: diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix2/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix2/build.gradle new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/maintenance/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/bwc/maintenance/build.gradle new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle index 8c321294b585..e931537fcd6e 100644 --- a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle +++ b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/settings.gradle @@ -10,9 +10,11 @@ rootProject.name = "root" include ":distribution:bwc:bugfix" +include ":distribution:bwc:bugfix2" include ":distribution:bwc:minor" include ":distribution:bwc:major" include ":distribution:bwc:staged" +include ":distribution:bwc:maintenance" include ":distribution:archives:darwin-tar" include ":distribution:archives:oss-darwin-tar" include ":distribution:archives:darwin-aarch64-tar" diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java index 93c2623a23d3..37b28389ad97 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java @@ -21,14 +21,15 @@ import java.util.Optional; import java.util.Set; import java.util.TreeMap; -import java.util.TreeSet; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static java.util.Collections.reverseOrder; import static java.util.Collections.unmodifiableList; +import static java.util.Comparator.comparing; /** * A container for elasticsearch supported version information used in BWC testing. @@ -73,11 +74,11 @@ public class BwcVersions implements Serializable { private final transient List versions; private final Map unreleased; - public BwcVersions(List versionLines) { - this(versionLines, Version.fromString(VersionProperties.getElasticsearch())); + public BwcVersions(List versionLines, List developmentBranches) { + this(versionLines, Version.fromString(VersionProperties.getElasticsearch()), developmentBranches); } - public BwcVersions(Version currentVersionProperty, List allVersions) { + public BwcVersions(Version currentVersionProperty, List allVersions, List developmentBranches) { if (allVersions.isEmpty()) { throw new IllegalArgumentException("Could not parse any versions"); } @@ -86,12 +87,12 @@ public BwcVersions(Version currentVersionProperty, List allVersions) { this.currentVersion = allVersions.get(allVersions.size() - 1); assertCurrentVersionMatchesParsed(currentVersionProperty); - this.unreleased = computeUnreleased(); + this.unreleased = computeUnreleased(developmentBranches); } // Visible for testing - BwcVersions(List versionLines, Version currentVersionProperty) { - this(currentVersionProperty, parseVersionLines(versionLines)); + BwcVersions(List versionLines, Version currentVersionProperty, List developmentBranches) { + this(currentVersionProperty, parseVersionLines(versionLines), developmentBranches); } private static List parseVersionLines(List versionLines) { @@ -126,58 +127,77 @@ public void forPreviousUnreleased(Consumer consumer) { getUnreleased().stream().filter(version -> version.equals(currentVersion) == false).map(unreleased::get).forEach(consumer); } - private String getBranchFor(Version version) { - if (version.equals(currentVersion)) { - // Just assume the current branch is 'main'. It's actually not important, we never check out the current branch. - return "main"; - } else { + private String getBranchFor(Version version, List developmentBranches) { + // If the current version matches a specific feature freeze branch, use that + if (developmentBranches.contains(version.getMajor() + "." + version.getMinor())) { return version.getMajor() + "." + version.getMinor(); + } else if (developmentBranches.contains(version.getMajor() + ".x")) { // Otherwise if an n.x branch exists and we are that major + return version.getMajor() + ".x"; + } else { // otherwise we're the main branch + return "main"; } } - private Map computeUnreleased() { - Set unreleased = new TreeSet<>(); - // The current version is being worked, is always unreleased - unreleased.add(currentVersion); - // Recurse for all unreleased versions starting from the current version - addUnreleased(unreleased, currentVersion, 0); + private Map computeUnreleased(List developmentBranches) { + Map result = new TreeMap<>(); - // Grab the latest version from the previous major if necessary as well, this is going to be a maintenance release - Version maintenance = versions.stream() - .filter(v -> v.getMajor() == currentVersion.getMajor() - 1) - .max(Comparator.naturalOrder()) - .orElseThrow(); - // This is considered the maintenance release only if we haven't yet encountered it - boolean hasMaintenanceRelease = unreleased.add(maintenance); + // The current version is always in development + String currentBranch = getBranchFor(currentVersion, developmentBranches); + result.put(currentVersion, new UnreleasedVersionInfo(currentVersion, currentBranch, ":distribution")); + + // Check for an n.x branch as well + if (currentBranch.equals("main") && developmentBranches.stream().anyMatch(s -> s.endsWith(".x"))) { + // This should correspond to the latest new minor + Version version = versions.stream() + .sorted(Comparator.reverseOrder()) + .filter(v -> v.getMajor() == (currentVersion.getMajor() - 1) && v.getRevision() == 0) + .findFirst() + .orElseThrow(() -> new IllegalStateException("Unable to determine development version for branch")); + String branch = getBranchFor(version, developmentBranches); + assert branch.equals(currentVersion.getMajor() - 1 + ".x") : "Expected branch does not match development branch"; + + result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:minor")); + } - List unreleasedList = unreleased.stream().sorted(Comparator.reverseOrder()).toList(); - Map result = new TreeMap<>(); - boolean newMinor = false; - for (int i = 0; i < unreleasedList.size(); i++) { - Version esVersion = unreleasedList.get(i); - // This is either a new minor or staged release - if (currentVersion.equals(esVersion)) { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution")); - } else if (esVersion.getRevision() == 0) { - // If there are two upcoming unreleased minors then this one is the new minor - if (newMinor == false && unreleasedList.get(i + 1).getRevision() == 0) { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, esVersion.getMajor() + ".x", ":distribution:bwc:minor")); - newMinor = true; - } else if (newMinor == false - && unreleasedList.stream().filter(v -> v.getMajor() == esVersion.getMajor() && v.getRevision() == 0).count() == 1) { - // This is the only unreleased new minor which means we've not yet staged it for release - result.put(esVersion, new UnreleasedVersionInfo(esVersion, esVersion.getMajor() + ".x", ":distribution:bwc:minor")); - newMinor = true; - } else { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution:bwc:staged")); - } - } else { - // If this is the oldest unreleased version and we have a maintenance release - if (i == unreleasedList.size() - 1 && hasMaintenanceRelease) { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution:bwc:maintenance")); - } else { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution:bwc:bugfix")); - } + // Now handle all the feature freeze branches + List featureFreezeBranches = developmentBranches.stream() + .filter(b -> Pattern.matches("[0-9]+\\.[0-9]+", b)) + .sorted(reverseOrder(comparing(s -> Version.fromString(s, Version.Mode.RELAXED)))) + .toList(); + + boolean existingBugfix = false; + for (int i = 0; i < featureFreezeBranches.size(); i++) { + String branch = featureFreezeBranches.get(i); + Version version = versions.stream() + .sorted(Comparator.reverseOrder()) + .filter(v -> v.toString().startsWith(branch)) + .findFirst() + .orElse(null); + + // If we don't know about this version we can ignore it + if (version == null) { + continue; + } + + // If this is the current version we can ignore as we've already handled it + if (version.equals(currentVersion)) { + continue; + } + + // We only maintain compatibility back one major so ignore anything older + if (currentVersion.getMajor() - version.getMajor() > 1) { + continue; + } + + // This is the maintenance version + if (i == featureFreezeBranches.size() - 1) { + result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:maintenance")); + } else if (version.getRevision() == 0) { // This is the next staged minor + result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:staged")); + } else { // This is a bugfix + String project = existingBugfix ? "bugfix2" : "bugfix"; + result.put(version, new UnreleasedVersionInfo(version, branch, ":distribution:bwc:" + project)); + existingBugfix = true; } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 0535026b2594..27d2a66feb20 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -8,6 +8,9 @@ */ package org.elasticsearch.gradle.internal.info; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + import org.apache.commons.io.IOUtils; import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.internal.BwcVersions; @@ -44,11 +47,13 @@ import java.io.File; import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; import java.io.UncheckedIOException; import java.nio.file.Files; import java.time.ZoneOffset; import java.time.ZonedDateTime; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Random; @@ -68,6 +73,7 @@ public class GlobalBuildInfoPlugin implements Plugin { private final JavaInstallationRegistry javaInstallationRegistry; private final JvmMetadataDetector metadataDetector; private final ProviderFactory providers; + private final ObjectMapper objectMapper; private JavaToolchainService toolChainService; private Project project; @@ -82,7 +88,7 @@ public GlobalBuildInfoPlugin( this.javaInstallationRegistry = javaInstallationRegistry; this.metadataDetector = new ErrorTraceMetadataDetector(metadataDetector); this.providers = providers; - + this.objectMapper = new ObjectMapper(); } @Override @@ -190,12 +196,27 @@ private BwcVersions resolveBwcVersions() { ); try (var is = new FileInputStream(versionsFilePath)) { List versionLines = IOUtils.readLines(is, "UTF-8"); - return new BwcVersions(versionLines); + return new BwcVersions(versionLines, getDevelopmentBranches()); } catch (IOException e) { throw new IllegalStateException("Unable to resolve to resolve bwc versions from versionsFile.", e); } } + private List getDevelopmentBranches() { + List branches = new ArrayList<>(); + File branchesFile = new File(Util.locateElasticsearchWorkspace(project.getGradle()), "branches.json"); + try (InputStream is = new FileInputStream(branchesFile)) { + JsonNode json = objectMapper.readTree(is); + for (JsonNode node : json.get("branches")) { + branches.add(node.get("branch").asText()); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + return branches; + } + private void logGlobalBuildInfo(BuildParameterExtension buildParams) { final String osName = System.getProperty("os.name"); final String osVersion = System.getProperty("os.version"); diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy index 9c7d20d84a67..4d033564a42b 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy @@ -17,8 +17,9 @@ import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo class BwcVersionsSpec extends Specification { List versionLines = [] - def "current version is next minor with next major and last minor both staged"() { + def "current version is next major"() { given: + addVersion('7.17.10', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') @@ -29,25 +30,25 @@ class BwcVersionsSpec extends Specification { addVersion('8.16.1', '9.10.0') addVersion('8.17.0', '9.10.0') addVersion('9.0.0', '10.0.0') - addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('9.1.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0'), ['main', '8.x', '8.16', '8.15', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix2'), (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), - (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), - (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.x', ':distribution:bwc:minor'), - (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('9.0.0'), v('9.1.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0'), v('9.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0')] } - def "current is next minor with upcoming minor staged"() { + def "current version is next major with staged minor"() { given: + addVersion('7.17.10', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') @@ -57,53 +58,106 @@ class BwcVersionsSpec extends Specification { addVersion('8.16.0', '9.10.0') addVersion('8.16.1', '9.10.0') addVersion('8.17.0', '9.10.0') - addVersion('8.17.1', '9.10.0') + addVersion('8.18.0', '9.10.0') addVersion('9.0.0', '10.0.0') - addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('9.1.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix2'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), + ] + bwc.wireCompatible == [v('8.18.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0'), v('9.0.0')] + } + + def "current version is first new minor in major series"() { + given: + addVersion('7.17.10', '8.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.18.0', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.0.0') + + when: + def bwc = new BwcVersions(versionLines, v('9.1.0'), ['main', '9.0', '8.18']) + def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } + + then: + unreleased == [ + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:maintenance'), (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:staged'), - (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] + bwc.wireCompatible == [v('8.18.0'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0'), v('9.0.0'), v('9.1.0')] } - def "current version is staged major"() { + def "current version is new minor with single bugfix"() { given: - addVersion('8.14.0', '9.9.0') - addVersion('8.14.1', '9.9.0') - addVersion('8.14.2', '9.9.0') - addVersion('8.15.0', '9.9.0') - addVersion('8.15.1', '9.9.0') - addVersion('8.15.2', '9.9.0') + addVersion('7.17.10', '8.9.0') addVersion('8.16.0', '9.10.0') addVersion('8.16.1', '9.10.0') addVersion('8.17.0', '9.10.0') - addVersion('8.17.1', '9.10.0') + addVersion('8.18.0', '9.10.0') addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') + addVersion('9.1.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('9.0.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0'), ['main', '9.0', '8.18']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), - (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0')] + bwc.wireCompatible == [v('8.18.0'), v('9.0.0'), v('9.0.1'), v('9.1.0')] + bwc.indexCompatible == [v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0'), v('9.0.0'), v('9.0.1'), v('9.1.0')] } - def "current version is major with unreleased next minor"() { + def "current version is new minor with single bugfix and staged minor"() { given: + addVersion('7.17.10', '8.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.18.0', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') + addVersion('9.1.0', '10.0.0') + addVersion('9.2.0', '10.0.0') + + when: + def bwc = new BwcVersions(versionLines, v('9.2.0'), ['main', '9.1', '9.0', '8.18']) + def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } + + then: + unreleased == [ + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.18', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), '9.1', ':distribution:bwc:staged'), + (v('9.2.0')): new UnreleasedVersionInfo(v('9.2.0'), 'main', ':distribution'), + ] + bwc.wireCompatible == [v('8.18.0'), v('9.0.0'), v('9.0.1'), v('9.1.0'), v('9.2.0')] + bwc.indexCompatible == [v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0'), v('9.0.0'), v('9.0.1'), v('9.1.0'), v('9.2.0')] + } + + def "current version is next minor"() { + given: + addVersion('7.16.3', '8.9.0') + addVersion('7.17.0', '8.9.0') + addVersion('7.17.1', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') @@ -113,24 +167,29 @@ class BwcVersionsSpec extends Specification { addVersion('8.16.0', '9.10.0') addVersion('8.16.1', '9.10.0') addVersion('8.17.0', '9.10.0') - addVersion('9.0.0', '10.0.0') + addVersion('8.17.1', '9.10.0') + addVersion('8.18.0', '9.10.0') when: - def bwc = new BwcVersions(versionLines, v('9.0.0')) + def bwc = new BwcVersions(versionLines, v('8.18.0'), ['main', '8.x', '8.17', '8.16', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), - (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), - (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix2'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.x', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0')] + bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('8.18.0')] + bwc.indexCompatible == [v('7.16.3'), v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('8.18.0')] } - def "current version is major with staged next minor"() { + def "current version is new minor with staged minor"() { given: + addVersion('7.16.3', '8.9.0') + addVersion('7.17.0', '8.9.0') + addVersion('7.17.1', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') @@ -138,26 +197,31 @@ class BwcVersionsSpec extends Specification { addVersion('8.15.1', '9.9.0') addVersion('8.15.2', '9.9.0') addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') addVersion('8.17.0', '9.10.0') - addVersion('9.0.0', '10.0.0') + addVersion('8.18.0', '9.10.0') when: - def bwc = new BwcVersions(versionLines, v('9.0.0')) + def bwc = new BwcVersions(versionLines, v('8.18.0'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix'), - (v('8.16.0')): new UnreleasedVersionInfo(v('8.16.0'), '8.16', ':distribution:bwc:staged'), - (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), - (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix2'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), + (v('8.18.0')): new UnreleasedVersionInfo(v('8.18.0'), '8.x', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.17.0'), v('9.0.0')] + bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0')] + bwc.indexCompatible == [v('7.16.3'), v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.18.0')] } - def "current version is next bugfix"() { + def "current version is first bugfix"() { given: + addVersion('7.16.3', '8.9.0') + addVersion('7.17.0', '8.9.0') + addVersion('7.17.1', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') @@ -166,52 +230,44 @@ class BwcVersionsSpec extends Specification { addVersion('8.15.2', '9.9.0') addVersion('8.16.0', '9.10.0') addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.17.1', '9.10.0') - addVersion('9.0.0', '10.0.0') - addVersion('9.0.1', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('9.0.1')) + def bwc = new BwcVersions(versionLines, v('8.16.1'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), - (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), 'main', ':distribution'), + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] + bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1')] + bwc.indexCompatible == [v('7.16.3'), v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1')] } - def "current version is next minor with no staged releases"() { + def "current version is second bugfix"() { given: + addVersion('7.16.3', '8.9.0') + addVersion('7.17.0', '8.9.0') + addVersion('7.17.1', '8.9.0') addVersion('8.14.0', '9.9.0') addVersion('8.14.1', '9.9.0') addVersion('8.14.2', '9.9.0') addVersion('8.15.0', '9.9.0') addVersion('8.15.1', '9.9.0') addVersion('8.15.2', '9.9.0') - addVersion('8.16.0', '9.10.0') - addVersion('8.16.1', '9.10.0') - addVersion('8.17.0', '9.10.0') - addVersion('8.17.1', '9.10.0') - addVersion('9.0.0', '10.0.0') - addVersion('9.0.1', '10.0.0') - addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('9.1.0')) + def bwc = new BwcVersions(versionLines, v('8.15.2'), ['main', '8.x', '8.17', '8.16', '8.15', '7.17']) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), - (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), - (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') + (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution'), ] - bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] - bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] + bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2')] + bwc.indexCompatible == [v('7.16.3'), v('7.17.0'), v('7.17.1'), v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2')] } private void addVersion(String elasticsearch, String lucene) { diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java index 639dec280ae9..7512fa20814c 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java @@ -16,6 +16,7 @@ import java.io.File; import java.util.Arrays; +import java.util.List; public class AbstractDistributionDownloadPluginTests { protected static Project rootProject; @@ -28,22 +29,27 @@ public class AbstractDistributionDownloadPluginTests { protected static final Version BWC_STAGED_VERSION = Version.fromString("1.0.0"); protected static final Version BWC_BUGFIX_VERSION = Version.fromString("1.0.1"); protected static final Version BWC_MAINTENANCE_VERSION = Version.fromString("0.90.1"); + protected static final List DEVELOPMENT_BRANCHES = Arrays.asList("main", "1.1", "1.0", "0.90"); protected static final BwcVersions BWC_MINOR = new BwcVersions( BWC_MAJOR_VERSION, - Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION) + Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION), + DEVELOPMENT_BRANCHES ); protected static final BwcVersions BWC_STAGED = new BwcVersions( BWC_MAJOR_VERSION, - Arrays.asList(BWC_MAINTENANCE_VERSION, BWC_STAGED_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION) + Arrays.asList(BWC_MAINTENANCE_VERSION, BWC_STAGED_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION), + DEVELOPMENT_BRANCHES ); protected static final BwcVersions BWC_BUGFIX = new BwcVersions( BWC_MAJOR_VERSION, - Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION) + Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION), + DEVELOPMENT_BRANCHES ); protected static final BwcVersions BWC_MAINTENANCE = new BwcVersions( BWC_MINOR_VERSION, - Arrays.asList(BWC_MAINTENANCE_VERSION, BWC_BUGFIX_VERSION, BWC_MINOR_VERSION) + Arrays.asList(BWC_MAINTENANCE_VERSION, BWC_BUGFIX_VERSION, BWC_MINOR_VERSION), + DEVELOPMENT_BRANCHES ); protected static String projectName(String base, boolean bundledJdk) { diff --git a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy index f3f8e4703eba..07214b5fbf84 100644 --- a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy +++ b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy @@ -156,12 +156,12 @@ abstract class AbstractGradleFuncTest extends Specification { File internalBuild( List extraPlugins = [], - String bugfix = "7.15.2", - String bugfixLucene = "8.9.0", - String staged = "7.16.0", - String stagedLucene = "8.10.0", - String minor = "8.0.0", - String minorLucene = "9.0.0" + String maintenance = "7.16.10", + String bugfix2 = "8.1.3", + String bugfix = "8.2.1", + String staged = "8.3.0", + String minor = "8.4.0", + String current = "9.0.0" ) { buildFile << """plugins { id 'elasticsearch.global-build-info' @@ -172,15 +172,17 @@ abstract class AbstractGradleFuncTest extends Specification { import org.elasticsearch.gradle.internal.BwcVersions import org.elasticsearch.gradle.Version - Version currentVersion = Version.fromString("8.1.0") + Version currentVersion = Version.fromString("${current}") def versionList = [ + Version.fromString("$maintenance"), + Version.fromString("$bugfix2"), Version.fromString("$bugfix"), Version.fromString("$staged"), Version.fromString("$minor"), currentVersion ] - BwcVersions versions = new BwcVersions(currentVersion, versionList) + BwcVersions versions = new BwcVersions(currentVersion, versionList, ['main', '8.x', '8.3', '8.2', '8.1', '7.16']) buildParams.getBwcVersionsProperty().set(versions) """ } diff --git a/distribution/bwc/bugfix2/build.gradle b/distribution/bwc/bugfix2/build.gradle new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index 57750f2162a7..dc2bcd96b8d9 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -25,8 +25,8 @@ dependencies { implementation project(":libs:plugin-api") implementation project(":libs:plugin-scanner") // TODO: asm is picked up from the plugin scanner, we should consolidate so it is not defined twice - implementation 'org.ow2.asm:asm:9.7' - implementation 'org.ow2.asm:asm-tree:9.7' + implementation 'org.ow2.asm:asm:9.7.1' + implementation 'org.ow2.asm:asm-tree:9.7.1' api "org.bouncycastle:bcpg-fips:1.0.7.1" api "org.bouncycastle:bc-fips:1.0.2.5" diff --git a/docs/changelog/114445.yaml b/docs/changelog/114445.yaml new file mode 100644 index 000000000000..afbc080d1e0b --- /dev/null +++ b/docs/changelog/114445.yaml @@ -0,0 +1,6 @@ +pr: 114445 +summary: Wrap jackson exception on malformed json string +area: Infra/Core +type: bug +issues: + - 114142 diff --git a/docs/changelog/117359.yaml b/docs/changelog/117359.yaml new file mode 100644 index 000000000000..87d2d828ace5 --- /dev/null +++ b/docs/changelog/117359.yaml @@ -0,0 +1,5 @@ +pr: 117359 +summary: Term query for ES|QL +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/117963.yaml b/docs/changelog/117963.yaml new file mode 100644 index 000000000000..4a50dc175786 --- /dev/null +++ b/docs/changelog/117963.yaml @@ -0,0 +1,5 @@ +pr: 117963 +summary: '`SearchServiceTests.testParseSourceValidation` failure' +area: Search +type: bug +issues: [] diff --git a/docs/changelog/118094.yaml b/docs/changelog/118094.yaml new file mode 100644 index 000000000000..a8866543fa7d --- /dev/null +++ b/docs/changelog/118094.yaml @@ -0,0 +1,5 @@ +pr: 118094 +summary: Update ASM 9.7 -> 9.7.1 to support JDK 24 +area: Infra/Core +type: upgrade +issues: [] diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index 793d38e3d73b..11a2c860eb32 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -386,6 +386,9 @@ The tasks infrastructure is used to track currently executing operations in the Each individual task is local to a node, but can be related to other tasks, on the same node or other nodes, via a parent-child relationship. +> [!NOTE] +> The Task management API is experimental/beta, its status and outstanding issues can be tracked [here](https://github.com/elastic/elasticsearch/issues/51628). + ### Task tracking and registration Tasks are tracked in-memory on each node in the node's [TaskManager], new tasks are registered via one of the [TaskManager#register] methods. diff --git a/docs/reference/esql/functions/description/term.asciidoc b/docs/reference/esql/functions/description/term.asciidoc new file mode 100644 index 000000000000..c43aeb25a0ef --- /dev/null +++ b/docs/reference/esql/functions/description/term.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Performs a Term query on the specified field. Returns true if the provided term matches the row. diff --git a/docs/reference/esql/functions/examples/term.asciidoc b/docs/reference/esql/functions/examples/term.asciidoc new file mode 100644 index 000000000000..b9d57f366294 --- /dev/null +++ b/docs/reference/esql/functions/examples/term.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/term-function.csv-spec[tag=term-with-field] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/term-function.csv-spec[tag=term-with-field-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/term.json b/docs/reference/esql/functions/kibana/definition/term.json new file mode 100644 index 000000000000..d8bb61fd596a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/term.json @@ -0,0 +1,85 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "term", + "description" : "Performs a Term query on the specified field. Returns true if the provided term matches the row.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Term you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Term you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Term you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Term you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "from books \n| where term(author, \"gabriel\") \n| keep book_no, title\n| limit 3;" + ], + "preview" : true, + "snapshot_only" : true +} diff --git a/docs/reference/esql/functions/kibana/docs/term.md b/docs/reference/esql/functions/kibana/docs/term.md new file mode 100644 index 000000000000..83e61a949208 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/term.md @@ -0,0 +1,13 @@ + + +### TERM +Performs a Term query on the specified field. Returns true if the provided term matches the row. + +``` +from books +| where term(author, "gabriel") +| keep book_no, title +| limit 3; +``` diff --git a/docs/reference/esql/functions/layout/term.asciidoc b/docs/reference/esql/functions/layout/term.asciidoc new file mode 100644 index 000000000000..1fe94491bed0 --- /dev/null +++ b/docs/reference/esql/functions/layout/term.asciidoc @@ -0,0 +1,17 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-term]] +=== `TERM` + +preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +*Syntax* + +[.text-center] +image::esql/functions/signature/term.svg[Embedded,opts=inline] + +include::../parameters/term.asciidoc[] +include::../description/term.asciidoc[] +include::../types/term.asciidoc[] +include::../examples/term.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/term.asciidoc b/docs/reference/esql/functions/parameters/term.asciidoc new file mode 100644 index 000000000000..edba8625d04c --- /dev/null +++ b/docs/reference/esql/functions/parameters/term.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field`:: +Field that the query will target. + +`query`:: +Term you wish to find in the provided field. diff --git a/docs/reference/esql/functions/signature/term.svg b/docs/reference/esql/functions/signature/term.svg new file mode 100644 index 000000000000..955dd7fa215a --- /dev/null +++ b/docs/reference/esql/functions/signature/term.svg @@ -0,0 +1 @@ +TERM(field,query) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/term.asciidoc b/docs/reference/esql/functions/types/term.asciidoc new file mode 100644 index 000000000000..7523b29c62b1 --- /dev/null +++ b/docs/reference/esql/functions/types/term.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | query | result +keyword | keyword | boolean +keyword | text | boolean +text | keyword | boolean +text | text | boolean +|=== diff --git a/docs/reference/ingest/processors/inference.asciidoc b/docs/reference/ingest/processors/inference.asciidoc index 9c6f0592a1d9..e079b9d66529 100644 --- a/docs/reference/ingest/processors/inference.asciidoc +++ b/docs/reference/ingest/processors/inference.asciidoc @@ -735,3 +735,70 @@ You can also specify the target field as follows: In this case, {feat-imp} is exposed in the `my_field.foo.feature_importance` field. + + +[discrete] +[[inference-processor-examples]] +==== {infer-cap} processor examples + +The following example uses an <> in an {infer} processor named `query_helper_pipeline` to perform a chat completion task. +The processor generates an {es} query from natural language input using a prompt designed for a completion task type. +Refer to <> for the {infer} service you use and check the corresponding examples of setting up an endpoint with the chat completion task type. + + +[source,console] +-------------------------------------------------- +PUT _ingest/pipeline/query_helper_pipeline +{ + "processors": [ + { + "script": { + "source": "ctx.prompt = 'Please generate an elasticsearch search query on index `articles_index` for the following natural language query. Dates are in the field `@timestamp`, document types are in the field `type` (options are `news`, `publication`), categories in the field `category` and can be multiple (options are `medicine`, `pharmaceuticals`, `technology`), and document names are in the field `title` which should use a fuzzy match. Ignore fields which cannot be determined from the natural language query context: ' + ctx.content" <1> + } + }, + { + "inference": { + "model_id": "openai_chat_completions", <2> + "input_output": { + "input_field": "prompt", + "output_field": "query" + } + } + }, + { + "remove": { + "field": "prompt" + } + } + ] +} +-------------------------------------------------- +// TEST[skip: An inference endpoint is required.] +<1> The `prompt` field contains the prompt used for the completion task, created with <>. +`+ ctx.content` appends the natural language input to the prompt. +<2> The ID of the pre-configured {infer} endpoint, which utilizes the <> with the `completion` task type. + +The following API request will simulate running a document through the ingest pipeline created previously: + +[source,console] +-------------------------------------------------- +POST _ingest/pipeline/query_helper_pipeline/_simulate +{ + "docs": [ + { + "_source": { + "content": "artificial intelligence in medicine articles published in the last 12 months" <1> + } + } + ] +} +-------------------------------------------------- +// TEST[skip: An inference processor with an inference endpoint is required.] +<1> The natural language query used to generate an {es} query within the prompt created by the {infer} processor. + + +[discrete] +[[infer-proc-readings]] +==== Further readings + +* https://www.elastic.co/search-labs/blog/openwebcrawler-llms-semantic-text-resume-job-search[Which job is the best for you? Using LLMs and semantic_text to match resumes to jobs] \ No newline at end of file diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 37178fd9439d..9189d2a27f3f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -4383,11 +4383,6 @@ - - - - - @@ -4408,9 +4403,9 @@ - - - + + + @@ -4433,11 +4428,6 @@ - - - - - @@ -4478,11 +4468,6 @@ - - - - - @@ -4493,11 +4478,11 @@ - - - - - + + + + + diff --git a/libs/entitlement/asm-provider/build.gradle b/libs/entitlement/asm-provider/build.gradle index 5f968629fe55..dcec0579a5ba 100644 --- a/libs/entitlement/asm-provider/build.gradle +++ b/libs/entitlement/asm-provider/build.gradle @@ -11,10 +11,10 @@ apply plugin: 'elasticsearch.build' dependencies { compileOnly project(':libs:entitlement') - implementation 'org.ow2.asm:asm:9.7' + implementation 'org.ow2.asm:asm:9.7.1' testImplementation project(":test:framework") testImplementation project(":libs:entitlement:bridge") - testImplementation 'org.ow2.asm:asm-util:9.7' + testImplementation 'org.ow2.asm:asm-util:9.7.1' } tasks.named('test').configure { diff --git a/libs/entitlement/entitlements-loading.svg b/libs/entitlement/entitlements-loading.svg new file mode 100644 index 000000000000..4f0213b853be --- /dev/null +++ b/libs/entitlement/entitlements-loading.svg @@ -0,0 +1,4 @@ + + + +
ES main
ES main
Boot Loader
Boot Loader
Platform Loader
Platform Loader
System Loader
System Loader
reflection
reflection
Agent Jar
Agent Jar
Server
Server
(Instrumented)
JDK classes
(Instrumented)...
agent main
(in unnamed module)
agent main...
entitlements ready
entitlements ready
reflection
reflection
Bridge
(patched into java.base)
Bridge...
Entitlements
Entitlements
Entitlements bootstrap
Entitlements bootstrap
  • Grant access to unnamed module
  • Set (static, protected) init arguments
  • Load agent
Grant access to unnamed modu...
(reflectively) call 
entitlements init
with Instrumentation
(reflectively) call...
Entitlements init
Entitlements init
  • Load plugin policies
  • Load server policy
  • Create entitlements manager
    • Policies
    • Method to lookup plugin by Module
  • Set entitlements manager in static (accessible by bridge)
  • Instrument jdk classes
  • run self test (force bridge to capture entitlements manager)
Load plugin policiesLoad server policyCreate e...
Text is not SVG - cannot display
\ No newline at end of file diff --git a/libs/entitlement/tools/securitymanager-scanner/build.gradle b/libs/entitlement/tools/securitymanager-scanner/build.gradle index 8d035c9e847c..ebb671e5487e 100644 --- a/libs/entitlement/tools/securitymanager-scanner/build.gradle +++ b/libs/entitlement/tools/securitymanager-scanner/build.gradle @@ -47,8 +47,8 @@ repositories { dependencies { compileOnly(project(':libs:core')) - implementation 'org.ow2.asm:asm:9.7' - implementation 'org.ow2.asm:asm-util:9.7' + implementation 'org.ow2.asm:asm:9.7.1' + implementation 'org.ow2.asm:asm-util:9.7.1' implementation(project(':libs:entitlement:tools:common')) } diff --git a/libs/plugin-scanner/build.gradle b/libs/plugin-scanner/build.gradle index d04af0624b3b..44e6853140a5 100644 --- a/libs/plugin-scanner/build.gradle +++ b/libs/plugin-scanner/build.gradle @@ -20,8 +20,8 @@ dependencies { api project(':libs:plugin-api') api project(":libs:x-content") - api 'org.ow2.asm:asm:9.7' - api 'org.ow2.asm:asm-tree:9.7' + api 'org.ow2.asm:asm:9.7.1' + api 'org.ow2.asm:asm-tree:9.7.1' testImplementation "junit:junit:${versions.junit}" testImplementation(project(":test:framework")) { diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java index d42c56845d03..38ef8bc2e4ef 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java @@ -108,7 +108,11 @@ public String text() throws IOException { if (currentToken().isValue() == false) { throwOnNoText(); } - return parser.getText(); + try { + return parser.getText(); + } catch (JsonParseException e) { + throw newXContentParseException(e); + } } private void throwOnNoText() { diff --git a/muted-tests.yml b/muted-tests.yml index b8d82f00bc43..99d1e170a4f4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -2,12 +2,6 @@ tests: - class: "org.elasticsearch.client.RestClientSingleHostIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/102717" method: "testRequestResetAndAbort" -- class: org.elasticsearch.xpack.restart.FullClusterRestartIT - method: testSingleDoc {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/111434 -- class: org.elasticsearch.xpack.restart.FullClusterRestartIT - method: testDataStreams {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/111448 - class: org.elasticsearch.smoketest.WatcherYamlRestIT method: test {p0=watcher/usage/10_basic/Test watcher usage stats output} issue: https://github.com/elastic/elasticsearch/issues/112189 @@ -103,9 +97,6 @@ tests: - class: org.elasticsearch.search.StressSearchServiceReaperIT method: testStressReaper issue: https://github.com/elastic/elasticsearch/issues/115816 -- class: org.elasticsearch.search.SearchServiceTests - method: testParseSourceValidation - issue: https://github.com/elastic/elasticsearch/issues/115936 - class: org.elasticsearch.xpack.application.connector.ConnectorIndexServiceTests issue: https://github.com/elastic/elasticsearch/issues/116087 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT @@ -254,6 +245,14 @@ tests: - class: org.elasticsearch.packaging.test.ArchiveTests method: test40AutoconfigurationNotTriggeredWhenNodeIsMeantToJoinExistingCluster issue: https://github.com/elastic/elasticsearch/issues/118029 +- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT + issue: https://github.com/elastic/elasticsearch/issues/117981 +- class: org.elasticsearch.packaging.test.ConfigurationTests + method: test30SymlinkedDataPath + issue: https://github.com/elastic/elasticsearch/issues/118111 +- class: org.elasticsearch.datastreams.ResolveClusterDataStreamIT + method: testClusterResolveWithDataStreamsUsingAlias + issue: https://github.com/elastic/elasticsearch/issues/118124 # Examples: # diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java index 369d0824bdb2..3faa88339f0a 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/BulkRestIT.java @@ -74,8 +74,7 @@ public void testBulkInvalidIndexNameString() throws IOException { ResponseException responseException = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request)); assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(BAD_REQUEST.getStatus())); - assertThat(responseException.getMessage(), containsString("could not parse bulk request body")); - assertThat(responseException.getMessage(), containsString("json_parse_exception")); + assertThat(responseException.getMessage(), containsString("x_content_parse_exception")); assertThat(responseException.getMessage(), containsString("Invalid UTF-8")); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java index a09fcbd0c527..7e4d23db7028 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; @@ -40,9 +39,6 @@ */ @ServerlessScope(Scope.PUBLIC) public class RestGetSourceAction extends BaseRestHandler { - private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetSourceAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get_source and exist_source " - + "requests is deprecated."; @Override public List routes() { diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java index 65aa1869a41e..9d39bf7f343c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -28,8 +27,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestMultiTermVectorsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiTermVectorsAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in multi term vector requests is deprecated."; @Override public List routes() { diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java index c1a55874bfc5..b0e08b376f9d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -36,8 +35,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestCountAction extends BaseRestHandler { - private final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCountAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in count requests is deprecated."; @Override public List routes() { diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index ff062084a3cb..a9c2ff7576b0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; import org.elasticsearch.features.NodeFeature; @@ -56,8 +55,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestSearchAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSearchAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in search requests is deprecated."; /** * Indicates whether hits.total should be rendered as an integer or an object diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 098a2b2f45d2..3554a6dc08b9 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Booleans; @@ -92,7 +91,6 @@ * @see SearchRequest#source(SearchSourceBuilder) */ public final class SearchSourceBuilder implements Writeable, ToXContentObject, Rewriteable { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(SearchSourceBuilder.class); public static final ParseField FROM_FIELD = new ParseField("from"); public static final ParseField SIZE_FIELD = new ParseField("size"); diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 6640f0f85840..2aaade35fb8f 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.fielddata.FieldData; @@ -67,7 +66,6 @@ * A geo distance based sorting on a geo point like field. */ public class GeoDistanceSortBuilder extends SortBuilder { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(GeoDistanceSortBuilder.class); public static final String NAME = "_geo_distance"; public static final String ALTERNATIVE_NAME = "_geoDistance"; diff --git a/server/src/test/java/org/elasticsearch/VersionTests.java b/server/src/test/java/org/elasticsearch/VersionTests.java index 0b35a3cc23c1..5e10a7d37aea 100644 --- a/server/src/test/java/org/elasticsearch/VersionTests.java +++ b/server/src/test/java/org/elasticsearch/VersionTests.java @@ -179,8 +179,7 @@ public void testParseVersion() { } public void testAllVersionsMatchId() throws Exception { - final Set releasedVersions = new HashSet<>(VersionUtils.allReleasedVersions()); - final Set unreleasedVersions = new HashSet<>(VersionUtils.allUnreleasedVersions()); + final Set versions = new HashSet<>(VersionUtils.allVersions()); Map maxBranchVersions = new HashMap<>(); for (java.lang.reflect.Field field : Version.class.getFields()) { if (field.getName().matches("_ID")) { @@ -195,43 +194,15 @@ public void testAllVersionsMatchId() throws Exception { Version v = (Version) versionConstant.get(null); logger.debug("Checking {}", v); - if (field.getName().endsWith("_UNRELEASED")) { - assertTrue(unreleasedVersions.contains(v)); - } else { - assertTrue(releasedVersions.contains(v)); - } + assertTrue(versions.contains(v)); assertEquals("Version id " + field.getName() + " does not point to " + constantName, v, Version.fromId(versionId)); assertEquals("Version " + constantName + " does not have correct id", versionId, v.id); String number = v.toString(); assertEquals("V_" + number.replace('.', '_'), constantName); - - // only the latest version for a branch should be a snapshot (ie unreleased) - String branchName = "" + v.major + "." + v.minor; - Version maxBranchVersion = maxBranchVersions.get(branchName); - if (maxBranchVersion == null) { - maxBranchVersions.put(branchName, v); - } else if (v.after(maxBranchVersion)) { - if (v == Version.CURRENT) { - // Current is weird - it counts as released even though it shouldn't. - continue; - } - assertFalse( - "Version " + maxBranchVersion + " cannot be a snapshot because version " + v + " exists", - VersionUtils.allUnreleasedVersions().contains(maxBranchVersion) - ); - maxBranchVersions.put(branchName, v); - } } } } - public static void assertUnknownVersion(Version version) { - assertFalse( - "Version " + version + " has been releaed don't use a new instance of this version", - VersionUtils.allReleasedVersions().contains(version) - ); - } - public void testIsCompatible() { assertTrue(isCompatible(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion())); assertFalse(isCompatible(Version.V_7_0_0, Version.V_8_0_0)); @@ -279,14 +250,6 @@ public boolean isCompatible(Version left, Version right) { return result; } - // This exists because 5.1.0 was never released due to a mistake in the release process. - // This verifies that we never declare the version as "released" accidentally. - // It would never pass qa tests later on, but those come very far in the build and this is quick to check now. - public void testUnreleasedVersion() { - Version VERSION_5_1_0_UNRELEASED = Version.fromString("5.1.0"); - VersionTests.assertUnknownVersion(VERSION_5_1_0_UNRELEASED); - } - public void testIllegalMinorAndPatchNumbers() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> Version.fromString("8.2.999")); assertThat( diff --git a/server/src/test/java/org/elasticsearch/bootstrap/PluginsResolverTests.java b/server/src/test/java/org/elasticsearch/bootstrap/PluginsResolverTests.java index 331f0f7ad13e..798b576500d7 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/PluginsResolverTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/PluginsResolverTests.java @@ -136,25 +136,28 @@ public void testResolveMultipleNonModularPlugins() throws IOException, ClassNotF Path jar1 = createNonModularPluginJar(home, "plugin1", "p", "A"); Path jar2 = createNonModularPluginJar(home, "plugin2", "q", "B"); - var loader1 = createClassLoader(jar1); - var loader2 = createClassLoader(jar2); - - PluginBundle bundle1 = createMockBundle("plugin1", null, "p.A"); - PluginBundle bundle2 = createMockBundle("plugin2", null, "q.B"); - PluginsLoader mockPluginsLoader = mock(PluginsLoader.class); - - when(mockPluginsLoader.pluginLayers()).thenReturn( - Stream.of(new TestPluginLayer(bundle1, loader1, ModuleLayer.boot()), new TestPluginLayer(bundle2, loader2, ModuleLayer.boot())) - ); - PluginsResolver pluginsResolver = PluginsResolver.create(mockPluginsLoader); - - var testClass1 = loader1.loadClass("p.A"); - var testClass2 = loader2.loadClass("q.B"); - var resolvedPluginName1 = pluginsResolver.resolveClassToPluginName(testClass1); - var resolvedPluginName2 = pluginsResolver.resolveClassToPluginName(testClass2); - - assertEquals("plugin1", resolvedPluginName1); - assertEquals("plugin2", resolvedPluginName2); + try (var loader1 = createClassLoader(jar1); var loader2 = createClassLoader(jar2)) { + + PluginBundle bundle1 = createMockBundle("plugin1", null, "p.A"); + PluginBundle bundle2 = createMockBundle("plugin2", null, "q.B"); + PluginsLoader mockPluginsLoader = mock(PluginsLoader.class); + + when(mockPluginsLoader.pluginLayers()).thenReturn( + Stream.of( + new TestPluginLayer(bundle1, loader1, ModuleLayer.boot()), + new TestPluginLayer(bundle2, loader2, ModuleLayer.boot()) + ) + ); + PluginsResolver pluginsResolver = PluginsResolver.create(mockPluginsLoader); + + var testClass1 = loader1.loadClass("p.A"); + var testClass2 = loader2.loadClass("q.B"); + var resolvedPluginName1 = pluginsResolver.resolveClassToPluginName(testClass1); + var resolvedPluginName2 = pluginsResolver.resolveClassToPluginName(testClass2); + + assertEquals("plugin1", resolvedPluginName1); + assertEquals("plugin2", resolvedPluginName2); + } } public void testResolveNonModularPlugin() throws IOException, ClassNotFoundException { @@ -164,22 +167,22 @@ public void testResolveNonModularPlugin() throws IOException, ClassNotFoundExcep Path jar = createNonModularPluginJar(home, pluginName, "p", "A"); - var loader = createClassLoader(jar); - - PluginBundle bundle = createMockBundle(pluginName, null, "p.A"); - PluginsLoader mockPluginsLoader = mock(PluginsLoader.class); + try (var loader = createClassLoader(jar)) { + PluginBundle bundle = createMockBundle(pluginName, null, "p.A"); + PluginsLoader mockPluginsLoader = mock(PluginsLoader.class); - when(mockPluginsLoader.pluginLayers()).thenReturn(Stream.of(new TestPluginLayer(bundle, loader, ModuleLayer.boot()))); - PluginsResolver pluginsResolver = PluginsResolver.create(mockPluginsLoader); + when(mockPluginsLoader.pluginLayers()).thenReturn(Stream.of(new TestPluginLayer(bundle, loader, ModuleLayer.boot()))); + PluginsResolver pluginsResolver = PluginsResolver.create(mockPluginsLoader); - var testClass = loader.loadClass("p.A"); - var resolvedPluginName = pluginsResolver.resolveClassToPluginName(testClass); - var unresolvedPluginName1 = pluginsResolver.resolveClassToPluginName(PluginsResolver.class); - var unresolvedPluginName2 = pluginsResolver.resolveClassToPluginName(String.class); + var testClass = loader.loadClass("p.A"); + var resolvedPluginName = pluginsResolver.resolveClassToPluginName(testClass); + var unresolvedPluginName1 = pluginsResolver.resolveClassToPluginName(PluginsResolver.class); + var unresolvedPluginName2 = pluginsResolver.resolveClassToPluginName(String.class); - assertEquals(pluginName, resolvedPluginName); - assertNull(unresolvedPluginName1); - assertNull(unresolvedPluginName2); + assertEquals(pluginName, resolvedPluginName); + assertNull(unresolvedPluginName1); + assertNull(unresolvedPluginName2); + } } private static URLClassLoader createClassLoader(Path jar) throws MalformedURLException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/json/JsonXContentTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/json/JsonXContentTests.java index 55f6cc5498d8..4135ead545e0 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/json/JsonXContentTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/json/JsonXContentTests.java @@ -11,6 +11,9 @@ import org.elasticsearch.common.xcontent.BaseXContentTestCase; import org.elasticsearch.xcontent.XContentGenerator; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -28,4 +31,14 @@ public void testBigInteger() throws Exception { XContentGenerator generator = JsonXContent.jsonXContent.createGenerator(os); doTestBigInteger(generator, os); } + + public void testMalformedJsonFieldThrowsXContentException() throws Exception { + String json = "{\"test\":\"/*/}"; + try (XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, json)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + assertThrows(XContentParseException.class, () -> parser.text()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index d1ccfcbe7873..89fd25f638e1 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -95,7 +95,6 @@ import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; @@ -124,7 +123,6 @@ import org.elasticsearch.search.rank.feature.RankFeatureResult; import org.elasticsearch.search.rank.feature.RankFeatureShardRequest; import org.elasticsearch.search.rank.feature.RankFeatureShardResult; -import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.tasks.TaskCancelHelper; import org.elasticsearch.tasks.TaskCancelledException; @@ -2930,119 +2928,6 @@ public void testSlicingBehaviourForParallelCollection() throws Exception { } } - /** - * This method tests validation that happens on the data nodes, which is now performed on the coordinating node. - * We still need the validation to cover for mixed cluster scenarios where the coordinating node does not perform the check yet. - */ - public void testParseSourceValidation() { - String index = randomAlphaOfLengthBetween(5, 10).toLowerCase(Locale.ROOT); - IndexService indexService = createIndex(index); - final SearchService service = getInstanceFromNode(SearchService.class); - { - // scroll and search_after - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.scroll(new TimeValue(1000)); - searchRequest.source().searchAfter(new String[] { "value" }); - assertCreateContextValidation(searchRequest, "`search_after` cannot be used in a scroll context.", indexService, service); - } - { - // scroll and collapse - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.scroll(new TimeValue(1000)); - searchRequest.source().collapse(new CollapseBuilder("field")); - assertCreateContextValidation(searchRequest, "cannot use `collapse` in a scroll context", indexService, service); - } - { - // search_after and `from` isn't valid - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.source().searchAfter(new String[] { "value" }); - searchRequest.source().from(10); - assertCreateContextValidation( - searchRequest, - "`from` parameter must be set to 0 when `search_after` is used", - indexService, - service - ); - } - { - // slice without scroll or pit - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.source().slice(new SliceBuilder(1, 10)); - assertCreateContextValidation( - searchRequest, - "[slice] can only be used with [scroll] or [point-in-time] requests", - indexService, - service - ); - } - { - // stored fields disabled with _source requested - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.source().storedField("_none_"); - searchRequest.source().fetchSource(true); - assertCreateContextValidation( - searchRequest, - "[stored_fields] cannot be disabled if [_source] is requested", - indexService, - service - ); - } - { - // stored fields disabled with fetch fields requested - SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); - searchRequest.source().storedField("_none_"); - searchRequest.source().fetchSource(false); - searchRequest.source().fetchField("field"); - assertCreateContextValidation( - searchRequest, - "[stored_fields] cannot be disabled when using the [fields] option", - indexService, - service - ); - } - } - - private static void assertCreateContextValidation( - SearchRequest searchRequest, - String errorMessage, - IndexService indexService, - SearchService searchService - ) { - ShardId shardId = new ShardId(indexService.index(), 0); - long nowInMillis = System.currentTimeMillis(); - String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10); - searchRequest.allowPartialSearchResults(randomBoolean()); - ShardSearchRequest request = new ShardSearchRequest( - OriginalIndices.NONE, - searchRequest, - shardId, - 0, - indexService.numberOfShards(), - AliasFilter.EMPTY, - 1f, - nowInMillis, - clusterAlias - ); - - SearchShardTask task = new SearchShardTask(1, "type", "action", "description", null, emptyMap()); - - ReaderContext readerContext = null; - try { - ReaderContext createOrGetReaderContext = searchService.createOrGetReaderContext(request); - readerContext = createOrGetReaderContext; - IllegalArgumentException exception = expectThrows( - IllegalArgumentException.class, - () -> searchService.createContext(createOrGetReaderContext, request, task, ResultsType.QUERY, randomBoolean()) - ); - assertThat(exception.getMessage(), containsString(errorMessage)); - } finally { - if (readerContext != null) { - readerContext.close(); - searchService.freeReaderContext(readerContext.id()); - } - } - } - private static ReaderContext createReaderContext(IndexService indexService, IndexShard indexShard) { return new ReaderContext( new ShardSearchContextId(UUIDs.randomBase64UUID(), randomNonNegativeLong()), diff --git a/settings.gradle b/settings.gradle index 4722fc311480..747fbb3e439f 100644 --- a/settings.gradle +++ b/settings.gradle @@ -73,6 +73,7 @@ List projects = [ 'distribution:packages:aarch64-rpm', 'distribution:packages:rpm', 'distribution:bwc:bugfix', + 'distribution:bwc:bugfix2', 'distribution:bwc:maintenance', 'distribution:bwc:minor', 'distribution:bwc:staged', diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 126b95041da1..c7e08eb3cdfa 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -86,7 +86,6 @@ tasks.named("thirdPartyAudit").configure { tasks.named("test").configure { systemProperty 'tests.gradle_index_compat_versions', buildParams.bwcVersions.indexCompatible.join(',') systemProperty 'tests.gradle_wire_compat_versions', buildParams.bwcVersions.wireCompatible.join(',') - systemProperty 'tests.gradle_unreleased_versions', buildParams.bwcVersions.unreleased.join(',') } tasks.register("integTest", Test) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java index d561c5512b61..8b7ab620774b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java @@ -12,132 +12,15 @@ import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Tuple; -import java.util.ArrayList; -import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.Optional; import java.util.Random; -import java.util.stream.Collectors; -import java.util.stream.Stream; /** Utilities for selecting versions in tests */ public class VersionUtils { - /** - * Sort versions that have backwards compatibility guarantees from - * those that don't. Doesn't actually check whether or not the versions - * are released, instead it relies on gradle to have already checked - * this which it does in {@code :core:verifyVersions}. So long as the - * rules here match up with the rules in gradle then this should - * produce sensible results. - * @return a tuple containing versions with backwards compatibility - * guarantees in v1 and versions without the guranteees in v2 - */ - static Tuple, List> resolveReleasedVersions(Version current, Class versionClass) { - // group versions into major version - Map> majorVersions = Version.getDeclaredVersions(versionClass) - .stream() - .collect(Collectors.groupingBy(v -> (int) v.major)); - // this breaks b/c 5.x is still in version list but master doesn't care about it! - // assert majorVersions.size() == 2; - // TODO: remove oldVersions, we should only ever have 2 majors in Version - List> oldVersions = splitByMinor(majorVersions.getOrDefault((int) current.major - 2, Collections.emptyList())); - List> previousMajor = splitByMinor(majorVersions.get((int) current.major - 1)); - List> currentMajor = splitByMinor(majorVersions.get((int) current.major)); - - List unreleasedVersions = new ArrayList<>(); - final List> stableVersions; - if (currentMajor.size() == 1) { - // on master branch - stableVersions = previousMajor; - // remove current - moveLastToUnreleased(currentMajor, unreleasedVersions); - } else { - // on a stable or release branch, ie N.x - stableVersions = currentMajor; - // remove the next maintenance bugfix - moveLastToUnreleased(previousMajor, unreleasedVersions); - } - - // remove next minor - Version lastMinor = moveLastToUnreleased(stableVersions, unreleasedVersions); - if (lastMinor.revision == 0) { - if (stableVersions.get(stableVersions.size() - 1).size() == 1) { - // a minor is being staged, which is also unreleased - moveLastToUnreleased(stableVersions, unreleasedVersions); - } - // remove the next bugfix - if (stableVersions.isEmpty() == false) { - moveLastToUnreleased(stableVersions, unreleasedVersions); - } - } - - // If none of the previous major was released, then the last minor and bugfix of the old version was not released either. - if (previousMajor.isEmpty()) { - assert currentMajor.isEmpty() : currentMajor; - // minor of the old version is being staged - moveLastToUnreleased(oldVersions, unreleasedVersions); - // bugix of the old version is also being staged - moveLastToUnreleased(oldVersions, unreleasedVersions); - } - List releasedVersions = Stream.of(oldVersions, previousMajor, currentMajor) - .flatMap(List::stream) - .flatMap(List::stream) - .collect(Collectors.toList()); - Collections.sort(unreleasedVersions); // we add unreleased out of order, so need to sort here - return new Tuple<>(Collections.unmodifiableList(releasedVersions), Collections.unmodifiableList(unreleasedVersions)); - } - - // split the given versions into sub lists grouped by minor version - private static List> splitByMinor(List versions) { - Map> byMinor = versions.stream().collect(Collectors.groupingBy(v -> (int) v.minor)); - return byMinor.entrySet().stream().sorted(Map.Entry.comparingByKey()).map(Map.Entry::getValue).collect(Collectors.toList()); - } - - // move the last version of the last minor in versions to the unreleased versions - private static Version moveLastToUnreleased(List> versions, List unreleasedVersions) { - List lastMinor = new ArrayList<>(versions.get(versions.size() - 1)); - Version lastVersion = lastMinor.remove(lastMinor.size() - 1); - if (lastMinor.isEmpty()) { - versions.remove(versions.size() - 1); - } else { - versions.set(versions.size() - 1, lastMinor); - } - unreleasedVersions.add(lastVersion); - return lastVersion; - } - - private static final List RELEASED_VERSIONS; - private static final List UNRELEASED_VERSIONS; - private static final List ALL_VERSIONS; - - static { - Tuple, List> versions = resolveReleasedVersions(Version.CURRENT, Version.class); - RELEASED_VERSIONS = versions.v1(); - UNRELEASED_VERSIONS = versions.v2(); - List allVersions = new ArrayList<>(RELEASED_VERSIONS.size() + UNRELEASED_VERSIONS.size()); - allVersions.addAll(RELEASED_VERSIONS); - allVersions.addAll(UNRELEASED_VERSIONS); - Collections.sort(allVersions); - ALL_VERSIONS = Collections.unmodifiableList(allVersions); - } - - /** - * Returns an immutable, sorted list containing all released versions. - */ - public static List allReleasedVersions() { - return RELEASED_VERSIONS; - } - - /** - * Returns an immutable, sorted list containing all unreleased versions. - */ - public static List allUnreleasedVersions() { - return UNRELEASED_VERSIONS; - } + private static final List ALL_VERSIONS = Version.getDeclaredVersions(Version.class); /** * Returns an immutable, sorted list containing all versions, both released and unreleased. @@ -147,16 +30,16 @@ public static List allVersions() { } /** - * Get the released version before {@code version}. + * Get the version before {@code version}. */ public static Version getPreviousVersion(Version version) { - for (int i = RELEASED_VERSIONS.size() - 1; i >= 0; i--) { - Version v = RELEASED_VERSIONS.get(i); + for (int i = ALL_VERSIONS.size() - 1; i >= 0; i--) { + Version v = ALL_VERSIONS.get(i); if (v.before(version)) { return v; } } - throw new IllegalArgumentException("couldn't find any released versions before [" + version + "]"); + throw new IllegalArgumentException("couldn't find any versions before [" + version + "]"); } /** @@ -169,22 +52,22 @@ public static Version getPreviousVersion() { } /** - * Returns the released {@link Version} before the {@link Version#CURRENT} + * Returns the {@link Version} before the {@link Version#CURRENT} * where the minor version is less than the currents minor version. */ public static Version getPreviousMinorVersion() { - for (int i = RELEASED_VERSIONS.size() - 1; i >= 0; i--) { - Version v = RELEASED_VERSIONS.get(i); + for (int i = ALL_VERSIONS.size() - 1; i >= 0; i--) { + Version v = ALL_VERSIONS.get(i); if (v.minor < Version.CURRENT.minor || v.major < Version.CURRENT.major) { return v; } } - throw new IllegalArgumentException("couldn't find any released versions of the minor before [" + Build.current().version() + "]"); + throw new IllegalArgumentException("couldn't find any versions of the minor before [" + Build.current().version() + "]"); } - /** Returns the oldest released {@link Version} */ + /** Returns the oldest {@link Version} */ public static Version getFirstVersion() { - return RELEASED_VERSIONS.get(0); + return ALL_VERSIONS.get(0); } /** Returns a random {@link Version} from all available versions. */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index b4f4243fb90f..4428afaaeabe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -333,8 +333,11 @@ public void initClient() throws IOException { assert testFeatureServiceInitialized() == false; clusterHosts = parseClusterHosts(getTestRestCluster()); logger.info("initializing REST clients against {}", clusterHosts); - client = buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); - adminClient = buildClient(restAdminSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); + var clientSettings = restClientSettings(); + var adminSettings = restAdminSettings(); + var hosts = clusterHosts.toArray(new HttpHost[0]); + client = buildClient(clientSettings, hosts); + adminClient = clientSettings.equals(adminSettings) ? client : buildClient(adminSettings, hosts); availableFeatures = EnumSet.of(ProductFeature.LEGACY_TEMPLATES); Set versions = new HashSet<>(); diff --git a/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java index e0013e06f324..5ae7e5640fc9 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java @@ -9,19 +9,11 @@ package org.elasticsearch.test; import org.elasticsearch.Version; -import org.elasticsearch.core.Booleans; -import org.elasticsearch.core.Tuple; import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; import java.util.List; -import java.util.Set; import static org.elasticsearch.Version.fromId; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * Tests VersionUtils. Note: this test should remain unchanged across major versions @@ -30,7 +22,7 @@ public class VersionUtilsTests extends ESTestCase { public void testAllVersionsSorted() { - List allVersions = VersionUtils.allReleasedVersions(); + List allVersions = VersionUtils.allVersions(); for (int i = 0, j = 1; j < allVersions.size(); ++i, ++j) { assertTrue(allVersions.get(i).before(allVersions.get(j))); } @@ -58,9 +50,9 @@ public void testRandomVersionBetween() { got = VersionUtils.randomVersionBetween(random(), null, fromId(7000099)); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(fromId(7000099))); - got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allReleasedVersions().get(0)); + got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().get(0)); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); - assertTrue(got.onOrBefore(VersionUtils.allReleasedVersions().get(0))); + assertTrue(got.onOrBefore(VersionUtils.allVersions().get(0))); // unbounded upper got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), null); @@ -83,265 +75,34 @@ public void testRandomVersionBetween() { assertEquals(got, VersionUtils.getFirstVersion()); got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, null); assertEquals(got, Version.CURRENT); - - if (Booleans.parseBoolean(System.getProperty("build.snapshot", "true"))) { - // max or min can be an unreleased version - final Version unreleased = randomFrom(VersionUtils.allUnreleasedVersions()); - assertThat(VersionUtils.randomVersionBetween(random(), null, unreleased), lessThanOrEqualTo(unreleased)); - assertThat(VersionUtils.randomVersionBetween(random(), unreleased, null), greaterThanOrEqualTo(unreleased)); - assertEquals(unreleased, VersionUtils.randomVersionBetween(random(), unreleased, unreleased)); - } - } - - public static class TestReleaseBranch { - public static final Version V_4_0_0 = Version.fromString("4.0.0"); - public static final Version V_4_0_1 = Version.fromString("4.0.1"); - public static final Version V_5_3_0 = Version.fromString("5.3.0"); - public static final Version V_5_3_1 = Version.fromString("5.3.1"); - public static final Version V_5_3_2 = Version.fromString("5.3.2"); - public static final Version V_5_4_0 = Version.fromString("5.4.0"); - public static final Version V_5_4_1 = Version.fromString("5.4.1"); - public static final Version CURRENT = V_5_4_1; - } - - public void testResolveReleasedVersionsForReleaseBranch() { - Tuple, List> t = VersionUtils.resolveReleasedVersions(TestReleaseBranch.CURRENT, TestReleaseBranch.class); - List released = t.v1(); - List unreleased = t.v2(); - - assertThat( - released, - equalTo( - Arrays.asList( - TestReleaseBranch.V_4_0_0, - TestReleaseBranch.V_5_3_0, - TestReleaseBranch.V_5_3_1, - TestReleaseBranch.V_5_3_2, - TestReleaseBranch.V_5_4_0 - ) - ) - ); - assertThat(unreleased, equalTo(Arrays.asList(TestReleaseBranch.V_4_0_1, TestReleaseBranch.V_5_4_1))); - } - - public static class TestStableBranch { - public static final Version V_4_0_0 = Version.fromString("4.0.0"); - public static final Version V_4_0_1 = Version.fromString("4.0.1"); - public static final Version V_5_0_0 = Version.fromString("5.0.0"); - public static final Version V_5_0_1 = Version.fromString("5.0.1"); - public static final Version V_5_0_2 = Version.fromString("5.0.2"); - public static final Version V_5_1_0 = Version.fromString("5.1.0"); - public static final Version CURRENT = V_5_1_0; - } - - public void testResolveReleasedVersionsForUnreleasedStableBranch() { - Tuple, List> t = VersionUtils.resolveReleasedVersions(TestStableBranch.CURRENT, TestStableBranch.class); - List released = t.v1(); - List unreleased = t.v2(); - - assertThat(released, equalTo(Arrays.asList(TestStableBranch.V_4_0_0, TestStableBranch.V_5_0_0, TestStableBranch.V_5_0_1))); - assertThat(unreleased, equalTo(Arrays.asList(TestStableBranch.V_4_0_1, TestStableBranch.V_5_0_2, TestStableBranch.V_5_1_0))); - } - - public static class TestStableBranchBehindStableBranch { - public static final Version V_4_0_0 = Version.fromString("4.0.0"); - public static final Version V_4_0_1 = Version.fromString("4.0.1"); - public static final Version V_5_3_0 = Version.fromString("5.3.0"); - public static final Version V_5_3_1 = Version.fromString("5.3.1"); - public static final Version V_5_3_2 = Version.fromString("5.3.2"); - public static final Version V_5_4_0 = Version.fromString("5.4.0"); - public static final Version V_5_5_0 = Version.fromString("5.5.0"); - public static final Version CURRENT = V_5_5_0; - } - - public void testResolveReleasedVersionsForStableBranchBehindStableBranch() { - Tuple, List> t = VersionUtils.resolveReleasedVersions( - TestStableBranchBehindStableBranch.CURRENT, - TestStableBranchBehindStableBranch.class - ); - List released = t.v1(); - List unreleased = t.v2(); - - assertThat( - released, - equalTo( - Arrays.asList( - TestStableBranchBehindStableBranch.V_4_0_0, - TestStableBranchBehindStableBranch.V_5_3_0, - TestStableBranchBehindStableBranch.V_5_3_1 - ) - ) - ); - assertThat( - unreleased, - equalTo( - Arrays.asList( - TestStableBranchBehindStableBranch.V_4_0_1, - TestStableBranchBehindStableBranch.V_5_3_2, - TestStableBranchBehindStableBranch.V_5_4_0, - TestStableBranchBehindStableBranch.V_5_5_0 - ) - ) - ); - } - - public static class TestUnstableBranch { - public static final Version V_5_3_0 = Version.fromString("5.3.0"); - public static final Version V_5_3_1 = Version.fromString("5.3.1"); - public static final Version V_5_3_2 = Version.fromString("5.3.2"); - public static final Version V_5_4_0 = Version.fromString("5.4.0"); - public static final Version V_6_0_0 = Version.fromString("6.0.0"); - public static final Version CURRENT = V_6_0_0; - } - - public void testResolveReleasedVersionsForUnstableBranch() { - Tuple, List> t = VersionUtils.resolveReleasedVersions(TestUnstableBranch.CURRENT, TestUnstableBranch.class); - List released = t.v1(); - List unreleased = t.v2(); - - assertThat(released, equalTo(Arrays.asList(TestUnstableBranch.V_5_3_0, TestUnstableBranch.V_5_3_1))); - assertThat(unreleased, equalTo(Arrays.asList(TestUnstableBranch.V_5_3_2, TestUnstableBranch.V_5_4_0, TestUnstableBranch.V_6_0_0))); - } - - public static class TestNewMajorRelease { - public static final Version V_5_6_0 = Version.fromString("5.6.0"); - public static final Version V_5_6_1 = Version.fromString("5.6.1"); - public static final Version V_5_6_2 = Version.fromString("5.6.2"); - public static final Version V_6_0_0 = Version.fromString("6.0.0"); - public static final Version V_6_0_1 = Version.fromString("6.0.1"); - public static final Version CURRENT = V_6_0_1; - } - - public void testResolveReleasedVersionsAtNewMajorRelease() { - Tuple, List> t = VersionUtils.resolveReleasedVersions( - TestNewMajorRelease.CURRENT, - TestNewMajorRelease.class - ); - List released = t.v1(); - List unreleased = t.v2(); - - assertThat(released, equalTo(Arrays.asList(TestNewMajorRelease.V_5_6_0, TestNewMajorRelease.V_5_6_1, TestNewMajorRelease.V_6_0_0))); - assertThat(unreleased, equalTo(Arrays.asList(TestNewMajorRelease.V_5_6_2, TestNewMajorRelease.V_6_0_1))); - } - - public static class TestVersionBumpIn6x { - public static final Version V_5_6_0 = Version.fromString("5.6.0"); - public static final Version V_5_6_1 = Version.fromString("5.6.1"); - public static final Version V_5_6_2 = Version.fromString("5.6.2"); - public static final Version V_6_0_0 = Version.fromString("6.0.0"); - public static final Version V_6_0_1 = Version.fromString("6.0.1"); - public static final Version V_6_1_0 = Version.fromString("6.1.0"); - public static final Version CURRENT = V_6_1_0; - } - - public void testResolveReleasedVersionsAtVersionBumpIn6x() { - Tuple, List> t = VersionUtils.resolveReleasedVersions( - TestVersionBumpIn6x.CURRENT, - TestVersionBumpIn6x.class - ); - List released = t.v1(); - List unreleased = t.v2(); - - assertThat(released, equalTo(Arrays.asList(TestVersionBumpIn6x.V_5_6_0, TestVersionBumpIn6x.V_5_6_1, TestVersionBumpIn6x.V_6_0_0))); - assertThat( - unreleased, - equalTo(Arrays.asList(TestVersionBumpIn6x.V_5_6_2, TestVersionBumpIn6x.V_6_0_1, TestVersionBumpIn6x.V_6_1_0)) - ); - } - - public static class TestNewMinorBranchIn6x { - public static final Version V_5_6_0 = Version.fromString("5.6.0"); - public static final Version V_5_6_1 = Version.fromString("5.6.1"); - public static final Version V_5_6_2 = Version.fromString("5.6.2"); - public static final Version V_6_0_0 = Version.fromString("6.0.0"); - public static final Version V_6_0_1 = Version.fromString("6.0.1"); - public static final Version V_6_1_0 = Version.fromString("6.1.0"); - public static final Version V_6_1_1 = Version.fromString("6.1.1"); - public static final Version V_6_1_2 = Version.fromString("6.1.2"); - public static final Version V_6_2_0 = Version.fromString("6.2.0"); - public static final Version CURRENT = V_6_2_0; - } - - public void testResolveReleasedVersionsAtNewMinorBranchIn6x() { - Tuple, List> t = VersionUtils.resolveReleasedVersions( - TestNewMinorBranchIn6x.CURRENT, - TestNewMinorBranchIn6x.class - ); - List released = t.v1(); - List unreleased = t.v2(); - - assertThat( - released, - equalTo( - Arrays.asList( - TestNewMinorBranchIn6x.V_5_6_0, - TestNewMinorBranchIn6x.V_5_6_1, - TestNewMinorBranchIn6x.V_6_0_0, - TestNewMinorBranchIn6x.V_6_0_1, - TestNewMinorBranchIn6x.V_6_1_0, - TestNewMinorBranchIn6x.V_6_1_1 - ) - ) - ); - assertThat( - unreleased, - equalTo(Arrays.asList(TestNewMinorBranchIn6x.V_5_6_2, TestNewMinorBranchIn6x.V_6_1_2, TestNewMinorBranchIn6x.V_6_2_0)) - ); } /** - * Tests that {@link Version#minimumCompatibilityVersion()} and {@link VersionUtils#allReleasedVersions()} + * Tests that {@link Version#minimumCompatibilityVersion()} and {@link VersionUtils#allVersions()} * agree with the list of wire compatible versions we build in gradle. */ public void testGradleVersionsMatchVersionUtils() { // First check the index compatible versions - List released = VersionUtils.allReleasedVersions() + List versions = VersionUtils.allVersions() .stream() /* Java lists all versions from the 5.x series onwards, but we only want to consider * ones that we're supposed to be compatible with. */ .filter(v -> v.onOrAfter(Version.CURRENT.minimumCompatibilityVersion())) + .map(Version::toString) .toList(); - VersionsFromProperty wireCompatible = new VersionsFromProperty("tests.gradle_wire_compat_versions"); - - Version minimumCompatibleVersion = Version.CURRENT.minimumCompatibilityVersion(); - List releasedWireCompatible = released.stream() - .filter(v -> Version.CURRENT.equals(v) == false) - .filter(v -> v.onOrAfter(minimumCompatibleVersion)) - .map(Object::toString) - .toList(); - assertEquals(releasedWireCompatible, wireCompatible.released); - - List unreleasedWireCompatible = VersionUtils.allUnreleasedVersions() - .stream() - .filter(v -> v.onOrAfter(minimumCompatibleVersion)) - .map(Object::toString) - .toList(); - assertEquals(unreleasedWireCompatible, wireCompatible.unreleased); + List gradleVersions = versionFromProperty("tests.gradle_wire_compat_versions"); + assertEquals(versions, gradleVersions); } - /** - * Read a versions system property as set by gradle into a tuple of {@code (releasedVersion, unreleasedVersion)}. - */ - private class VersionsFromProperty { - private final List released = new ArrayList<>(); - private final List unreleased = new ArrayList<>(); - - private VersionsFromProperty(String property) { - Set allUnreleased = new HashSet<>(Arrays.asList(System.getProperty("tests.gradle_unreleased_versions", "").split(","))); - if (allUnreleased.isEmpty()) { - fail("[tests.gradle_unreleased_versions] not set or empty. Gradle should set this before running."); - } - String versions = System.getProperty(property); - assertNotNull("Couldn't find [" + property + "]. Gradle should set this before running the tests.", versions); - logger.info("Looked up versions [{}={}]", property, versions); - - for (String version : versions.split(",")) { - if (allUnreleased.contains(version)) { - unreleased.add(version); - } else { - released.add(version); - } - } + private List versionFromProperty(String property) { + List versions = new ArrayList<>(); + String versionsString = System.getProperty(property); + assertNotNull("Couldn't find [" + property + "]. Gradle should set this before running the tests.", versionsString); + logger.info("Looked up versions [{}={}]", property, versionsString); + for (String version : versionsString.split(",")) { + versions.add(version); } + + return versions; } } diff --git a/test/immutable-collections-patch/build.gradle b/test/immutable-collections-patch/build.gradle index 85a199af2d47..852a19116fb7 100644 --- a/test/immutable-collections-patch/build.gradle +++ b/test/immutable-collections-patch/build.gradle @@ -17,8 +17,8 @@ configurations { } dependencies { - implementation 'org.ow2.asm:asm:9.7' - implementation 'org.ow2.asm:asm-tree:9.7' + implementation 'org.ow2.asm:asm:9.7.1' + implementation 'org.ow2.asm:asm-tree:9.7.1' } def outputDir = layout.buildDirectory.dir("jdk-patches") diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle index 8677b1404a72..6d6c5ff889a4 100644 --- a/test/logger-usage/build.gradle +++ b/test/logger-usage/build.gradle @@ -10,9 +10,9 @@ apply plugin: 'elasticsearch.java' dependencies { - api 'org.ow2.asm:asm:9.7' - api 'org.ow2.asm:asm-tree:9.7' - api 'org.ow2.asm:asm-analysis:9.7' + api 'org.ow2.asm:asm:9.7.1' + api 'org.ow2.asm:asm-tree:9.7.1' + api 'org.ow2.asm:asm-analysis:9.7.1' api "org.apache.logging.log4j:log4j-api:${versions.log4j}" testImplementation project(":test:framework") } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index a63571093ba5..d86cdb0de038 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -32,6 +32,113 @@ import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; +/** + * This enum represents data types the ES|QL query processing layer is able to + * interact with in some way. This includes fully representable types (e.g. + * {@link DataType#LONG}, numeric types which we promote (e.g. {@link DataType#SHORT}) + * or fold into other types (e.g. {@link DataType#DATE_PERIOD}) early in the + * processing pipeline, types for internal use + * cases (e.g. {@link DataType#PARTIAL_AGG}), and types which the language + * doesn't support, but require special handling anyway (e.g. + * {@link DataType#OBJECT}) + * + *

Process for adding a new data type

+ * Note: it is not expected that all the following steps be done in a single PR. + * Use capabilities to gate tests as you go, and use as many PRs as you think + * appropriate. New data types are complex, and smaller PRs will make reviews + * easier. + *
    + *
  • + * Create a new feature flag for the type in {@link EsqlCorePlugin}. We + * recommend developing the data type over a series of smaller PRs behind + * a feature flag; even for relatively simple data types.
  • + *
  • + * Add a capability to EsqlCapabilities related to the new type, and + * gated by the feature flag you just created. Again, using the feature + * flag is preferred over snapshot-only. As development progresses, you may + * need to add more capabilities related to the new type, e.g. for + * supporting specific functions. This is fine, and expected.
  • + *
  • + * Create a new CSV test file for the new type. You'll either need to + * create a new data file as well, or add values of the new type to + * and existing data file. See CsvTestDataLoader for creating a new data + * set.
  • + *
  • + * In the new CSV test file, start adding basic functionality tests. + * These should include reading and returning values, both from indexed data + * and from the ROW command. It should also include functions that support + * "every" type, such as Case or MvFirst.
  • + *
  • + * Add the new type to the CsvTestUtils#Type enum, if it isn't already + * there. You also need to modify CsvAssert to support reading values + * of the new type.
  • + *
  • + * At this point, the CSV tests should fail with a sensible ES|QL error + * message. Make sure they're failing in ES|QL, not in the test + * framework.
  • + *
  • + * Add the new data type to this enum. This will cause a bunch of + * compile errors for switch statements throughout the code. Resolve those + * as appropriate. That is the main way in which the new type will be tied + * into the framework.
  • + *
  • + * Add the new type to the {@link DataType#UNDER_CONSTRUCTION} + * collection. This is used by the test framework to disable some checks + * around how functions report their supported types, which would otherwise + * generate a lot of noise while the type is still in development.
  • + *
  • + * Add typed data generators to TestCaseSupplier, and make sure all + * functions that support the new type have tests for it.
  • + *
  • + * Work to support things all types should do. Equality and the + * "typeless" MV functions (MvFirst, MvLast, and MvCount) should work for + * most types. Case and Coalesce should also support all types. + * If the type has a natural ordering, make sure to test + * sorting and the other binary comparisons. Make sure these functions all + * have CSV tests that run against indexed data.
  • + *
  • + * Add conversion functions as appropriate. Almost all types should + * support ToString, and should have a "ToType" function that accepts a + * string. There may be other logical conversions depending on the nature + * of the type. Make sure to add the conversion function to the + * TYPE_TO_CONVERSION_FUNCTION map in EsqlDataTypeConverter. Make sure the + * conversion functions have CSV tests that run against indexed data.
  • + *
  • + * Support the new type in aggregations that are type independent. + * This includes Values, Count, and Count Distinct. Make sure there are + * CSV tests against indexed data for these.
  • + *
  • + * Support other functions and aggregations as appropriate, making sure + * to included CSV tests.
  • + *
  • + * Consider how the type will interact with other types. For example, + * if the new type is numeric, it may be good for it to be comparable with + * other numbers. Supporting this may require new logic in + * EsqlDataTypeConverter#commonType, individual function type checking, the + * verifier rules, or other places. We suggest starting with CSV tests and + * seeing where they fail.
  • + *
+ * There are some additional steps that should be taken when removing the + * feature flag and getting ready for a release: + *
    + *
  • + * Ensure the capabilities for this type are always enabled + *
  • + *
  • + * Remove the type from the {@link DataType#UNDER_CONSTRUCTION} + * collection
  • + *
  • + * Fix new test failures related to declared function types + *
  • + *
  • + * Make sure to run the full test suite locally via gradle to generate + * the function type tables and helper files with the new type. Ensure all + * the functions that support the type have appropriate docs for it.
  • + *
  • + * If appropriate, remove the type from the ESQL limitations list of + * unsupported types.
  • + *
+ */ public enum DataType { /** * Fields of this type are unsupported by any functions and are always diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/term-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/term-function.csv-spec new file mode 100644 index 000000000000..0c72cad02eed --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/term-function.csv-spec @@ -0,0 +1,206 @@ +############################################### +# Tests for Term function +# + +termWithTextField +required_capability: term_function + +// tag::term-with-field[] +FROM books +| WHERE TERM(author, "gabriel") +| KEEP book_no, title +| LIMIT 3; +// end::term-with-field[] +ignoreOrder:true + +book_no:keyword | title:text +4814 | El Coronel No Tiene Quien Le Escriba / No One Writes to the Colonel (Spanish Edition) +4917 | Autumn of the Patriarch +6380 | La hojarasca (Spanish Edition) +; + +termWithKeywordField +required_capability: term_function + +from employees +| where term(first_name, "Guoxiang") +| keep emp_no, first_name; + +// tag::term-with-keyword-field-result[] +emp_no:integer | first_name:keyword +10015 | Guoxiang +; +// end::term-with-keyword-field-result[] + +termWithQueryExpressions +required_capability: term_function + +from books +| where term(author, CONCAT("gab", "riel")) +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +4814 | El Coronel No Tiene Quien Le Escriba / No One Writes to the Colonel (Spanish Edition) +4917 | Autumn of the Patriarch +6380 | La hojarasca (Spanish Edition) +; + +termAfterKeep +required_capability: term_function + +from books +| keep book_no, author +| where term(author, "faulkner") +| sort book_no +| limit 5; + +book_no:keyword | author:text +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] +2713 | William Faulkner +2847 | Colleen Faulkner +2883 | William Faulkner +3293 | Danny Faulkner +; + +termAfterDrop +required_capability: term_function + +from books +| drop ratings, description, year, publisher, title, author.keyword +| where term(author, "william") +| keep book_no, author +| sort book_no +| limit 2; + +book_no:keyword | author:text +2713 | William Faulkner +2883 | William Faulkner +; + +termAfterEval +required_capability: term_function + +from books +| eval stars = to_long(ratings / 2.0) +| where term(author, "colleen") +| sort book_no +| keep book_no, author, stars +| limit 2; + +book_no:keyword | author:text | stars:long +2847 | Colleen Faulkner | 3 +4502 | Colleen Faulkner | 3 +; + +termWithConjunction +required_capability: term_function + +from books +| where term(author, "tolkien") and ratings > 4.95 +| eval author = mv_sort(author) +| keep book_no, ratings, author; +ignoreOrder:true + +book_no:keyword | ratings:double | author:keyword +2301 | 5.0 | John Ronald Reuel Tolkien +3254 | 5.0 | [Christopher Tolkien, John Ronald Reuel Tolkien] +7350 | 5.0 | [Christopher Tolkien, John Ronald Reuel Tolkien] +; + +termWithConjunctionAndSort +required_capability: term_function + +from books +| where term(author, "tolkien") and ratings > 4.95 +| eval author = mv_sort(author) +| keep book_no, ratings, author +| sort book_no; + +book_no:keyword | ratings:double | author:keyword +2301 | 5.0 | John Ronald Reuel Tolkien +3254 | 5.0 | [Christopher Tolkien, John Ronald Reuel Tolkien] +7350 | 5.0 | [Christopher Tolkien, John Ronald Reuel Tolkien] +; + +termWithFunctionPushedToLucene +required_capability: term_function + +from hosts +| where term(host, "beta") and cidr_match(ip1, "127.0.0.2/32", "127.0.0.3/32") +| keep card, host, ip0, ip1; +ignoreOrder:true + +card:keyword |host:keyword |ip0:ip |ip1:ip +eth1 |beta |127.0.0.1 |127.0.0.2 +; + +termWithNonPushableConjunction +required_capability: term_function + +from books +| where term(title, "rings") and length(title) > 75 +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +; + +termWithMultipleWhereClauses +required_capability: term_function + +from books +| where term(title, "rings") +| where term(title, "lord") +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +2675 | The Lord of the Rings - Boxed Set +2714 | Return of the King Being the Third Part of The Lord of the Rings +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +7140 | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) +; + +termWithMultivaluedField +required_capability: term_function + +from employees +| where term(job_positions, "Data Scientist") +| keep emp_no, first_name, last_name +| sort emp_no asc +| limit 2; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10014 | Berni | Genin +10017 | Cristinel | Bouloucos +; + +testWithMultiValuedFieldWithConjunction +required_capability: term_function + +from employees +| where term(job_positions, "Data Scientist") and term(first_name, "Cristinel") +| keep emp_no, first_name, last_name +| limit 1; + +emp_no:integer | first_name:keyword | last_name:keyword +10017 | Cristinel | Bouloucos +; + +termWithConjQueryStringFunctions +required_capability: term_function +required_capability: qstr_function + +from employees +| where term(job_positions, "Data Scientist") and qstr("first_name: Cristinel and gender: F") +| keep emp_no, first_name, last_name +| sort emp_no ASC +| limit 1; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10017 | Cristinel | Bouloucos +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/TermIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/TermIT.java new file mode 100644 index 000000000000..4bb4897c9db5 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/TermIT.java @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; +import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; +import org.junit.Before; + +import java.util.List; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.CoreMatchers.containsString; + +public class TermIT extends AbstractEsqlIntegTestCase { + + @Before + public void setupIndex() { + createAndPopulateIndex(); + } + + @Override + protected EsqlQueryResponse run(EsqlQueryRequest request) { + assumeTrue("term function capability not available", EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()); + return super.run(request); + } + + public void testSimpleTermQuery() throws Exception { + var query = """ + FROM test + | WHERE term(content,"dog") + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(1), List.of(3), List.of(4), List.of(5))); + } + } + + public void testTermWithinEval() { + var query = """ + FROM test + | EVAL term_query = term(title,"fox") + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("[Term] function is only supported in WHERE commands")); + } + + public void testMultipleTerm() { + var query = """ + FROM test + | WHERE term(content,"fox") AND term(content,"brown") + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(2), List.of(4), List.of(5))); + } + } + + public void testNotWhereTerm() { + var query = """ + FROM test + | WHERE NOT term(content,"brown") + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(3))); + } + } + + private void createAndPopulateIndex() { + var indexName = "test"; + var client = client().admin().indices(); + var CreateRequest = client.prepareCreate(indexName) + .setSettings(Settings.builder().put("index.number_of_shards", 1)) + .setMapping("id", "type=integer", "content", "type=text"); + assertAcked(CreateRequest); + client().prepareBulk() + .add( + new IndexRequest(indexName).id("1") + .source("id", 1, "content", "The quick brown animal swiftly jumps over a lazy dog", "title", "A Swift Fox's Journey") + ) + .add( + new IndexRequest(indexName).id("2") + .source("id", 2, "content", "A speedy brown fox hops effortlessly over a sluggish canine", "title", "The Fox's Leap") + ) + .add( + new IndexRequest(indexName).id("3") + .source("id", 3, "content", "Quick and nimble, the fox vaults over the lazy dog", "title", "Brown Fox in Action") + ) + .add( + new IndexRequest(indexName).id("4") + .source( + "id", + 4, + "content", + "A fox that is quick and brown jumps over a dog that is quite lazy", + "title", + "Speedy Animals" + ) + ) + .add( + new IndexRequest(indexName).id("5") + .source( + "id", + 5, + "content", + "With agility, a quick brown fox bounds over a slow-moving dog", + "title", + "Foxes and Canines" + ) + ) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + ensureYellow(indexName); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 19ba6a5151ea..ee3f5be185b4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -550,7 +550,12 @@ public enum Cap { /** * Support the "METADATA _score" directive to enable _score column. */ - METADATA_SCORE(Build.current().isSnapshot()); + METADATA_SCORE(Build.current().isSnapshot()), + + /** + * Term function + */ + TERM_FUNCTION(Build.current().isSnapshot()); private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index f5fd82d742bc..d6f0ff766eb4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.esql.expression.function.fulltext.Kql; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Term; import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; @@ -837,6 +838,14 @@ private static void checkFullTextQueryFunctions(LogicalPlan plan, Set f m -> "[" + m.functionName() + "] " + m.functionType(), failures ); + checkCommandsBeforeExpression( + plan, + condition, + Term.class, + lp -> (lp instanceof Limit == false) && (lp instanceof Aggregate == false), + m -> "[" + m.functionName() + "] " + m.functionType(), + failures + ); checkNotPresentInDisjunctions(condition, ftf -> "[" + ftf.functionName() + "] " + ftf.functionType(), failures); checkFullTextFunctionsParents(condition, failures); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index c66a5293eb14..3749b4687935 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.esql.expression.function.fulltext.Kql; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Term; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; @@ -424,7 +425,8 @@ private static FunctionDefinition[][] snapshotFunctions() { // This is an experimental function and can be removed without notice. def(Delay.class, Delay::new, "delay"), def(Kql.class, Kql::new, "kql"), - def(Rate.class, Rate::withUnresolvedTimestamp, "rate") } }; + def(Rate.class, Rate::withUnresolvedTimestamp, "rate"), + def(Term.class, Term::new, "term") } }; } public EsqlFunctionRegistry snapshotRegistry() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java index 8804a031de78..d6b79d16b74f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java @@ -29,6 +29,9 @@ public static List getNamedWriteables() { if (EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()) { entries.add(Kql.ENTRY); } + if (EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()) { + entries.add(Term.ENTRY); + } return Collections.unmodifiableList(entries); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java new file mode 100644 index 000000000000..125a5b02b6e1 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.Validatable; +import org.elasticsearch.xpack.esql.common.Failure; +import org.elasticsearch.xpack.esql.common.Failures; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.querydsl.query.TermQuery; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNull; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; + +/** + * Full text function that performs a {@link TermQuery} . + */ +public class Term extends FullTextFunction implements Validatable { + + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Term", Term::readFrom); + + private final Expression field; + + @FunctionInfo( + returnType = "boolean", + preview = true, + description = "Performs a Term query on the specified field. Returns true if the provided term matches the row.", + examples = { @Example(file = "term-function", tag = "term-with-field") } + ) + public Term( + Source source, + @Param(name = "field", type = { "keyword", "text" }, description = "Field that the query will target.") Expression field, + @Param( + name = "query", + type = { "keyword", "text" }, + description = "Term you wish to find in the provided field." + ) Expression termQuery + ) { + super(source, termQuery, List.of(field, termQuery)); + this.field = field; + } + + private static Term readFrom(StreamInput in) throws IOException { + Source source = Source.readFrom((PlanStreamInput) in); + Expression field = in.readNamedWriteable(Expression.class); + Expression query = in.readNamedWriteable(Expression.class); + return new Term(source, field, query); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(field()); + out.writeNamedWriteable(query()); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + protected TypeResolution resolveNonQueryParamTypes() { + return isNotNull(field, sourceText(), FIRST).and(isString(field, sourceText(), FIRST)).and(super.resolveNonQueryParamTypes()); + } + + @Override + public void validate(Failures failures) { + if (field instanceof FieldAttribute == false) { + failures.add( + Failure.fail( + field, + "[{}] {} cannot operate on [{}], which is not a field from an index mapping", + functionName(), + functionType(), + field.sourceText() + ) + ); + } + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Term(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Term::new, field, query()); + } + + protected TypeResolutions.ParamOrdinal queryParamOrdinal() { + return SECOND; + } + + public Expression field() { + return field; + } + + @Override + public String functionName() { + return ENTRY.name; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index 3d6c35e91429..9d02af0efbab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.esql.core.util.Queries; import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Term; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.BinarySpatialFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; @@ -254,6 +255,8 @@ static boolean canPushToSource(Expression exp, LucenePushdownPredicates lucenePu return canPushSpatialFunctionToSource(spatial, lucenePushdownPredicates); } else if (exp instanceof Match mf) { return mf.field() instanceof FieldAttribute && DataType.isString(mf.field().dataType()); + } else if (exp instanceof Term term) { + return term.field() instanceof FieldAttribute && DataType.isString(term.field().dataType()); } else if (exp instanceof FullTextFunction) { return true; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 1580b7793124..1aee8f029e47 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -37,6 +37,7 @@ import org.elasticsearch.xpack.esql.expression.function.fulltext.Kql; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Term; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils; @@ -92,6 +93,7 @@ public final class EsqlExpressionTranslators { new MatchFunctionTranslator(), new QueryStringFunctionTranslator(), new KqlFunctionTranslator(), + new TermFunctionTranslator(), new Scalars() ); @@ -548,4 +550,12 @@ protected Query asQuery(Kql kqlFunction, TranslatorHandler handler) { return new KqlQuery(kqlFunction.source(), kqlFunction.queryAsText()); } } + + public static class TermFunctionTranslator extends ExpressionTranslator { + @Override + protected Query asQuery(Term term, TranslatorHandler handler) { + return new TermQuery(term.source(), ((FieldAttribute) term.field()).name(), term.queryAsText()); + } + } + } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 2e8b856cf82a..2834e5f3f835 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -265,6 +265,10 @@ public final void test() throws Throwable { "lookup join disabled for csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V4.capabilityName()) ); + assumeFalse( + "can't use TERM function in csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.TERM_FUNCTION.capabilityName()) + ); if (Build.current().isSnapshot()) { assertThat( "Capability is not included in the enabled list capabilities on a snapshot build. Spelling mistake?", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 882b8b7dbfd7..7e3ef4f1f5f8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1337,6 +1337,11 @@ public void testMatchFunctionOnlyAllowedInWhere() throws Exception { checkFullTextFunctionsOnlyAllowedInWhere("MATCH", "match(first_name, \"Anna\")", "function"); } + public void testTermFunctionOnlyAllowedInWhere() throws Exception { + assumeTrue("term function capability not available", EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()); + checkFullTextFunctionsOnlyAllowedInWhere("Term", "term(first_name, \"Anna\")", "function"); + } + public void testMatchOperatornOnlyAllowedInWhere() throws Exception { checkFullTextFunctionsOnlyAllowedInWhere(":", "first_name:\"Anna\"", "operator"); } @@ -1401,6 +1406,11 @@ public void testMatchFunctionWithDisjunctions() { checkWithDisjunctions("MATCH", "match(first_name, \"Anna\")", "function"); } + public void testTermFunctionWithDisjunctions() { + assumeTrue("term function capability not available", EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()); + checkWithDisjunctions("Term", "term(first_name, \"Anna\")", "function"); + } + public void testMatchOperatorWithDisjunctions() { checkWithDisjunctions(":", "first_name : \"Anna\"", "operator"); } @@ -1463,6 +1473,11 @@ public void testMatchFunctionWithNonBooleanFunctions() { checkFullTextFunctionsWithNonBooleanFunctions("MATCH", "match(first_name, \"Anna\")", "function"); } + public void testTermFunctionWithNonBooleanFunctions() { + assumeTrue("term function capability not available", EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()); + checkFullTextFunctionsWithNonBooleanFunctions("Term", "term(first_name, \"Anna\")", "function"); + } + public void testMatchOperatorWithNonBooleanFunctions() { checkFullTextFunctionsWithNonBooleanFunctions(":", "first_name:\"Anna\"", "operator"); } @@ -1563,6 +1578,45 @@ public void testMatchTargetsExistingField() throws Exception { assertEquals("1:33: Unknown column [first_name]", error("from test | keep emp_no | where first_name : \"Anna\"")); } + public void testTermFunctionArgNotConstant() throws Exception { + assumeTrue("term function capability not available", EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()); + assertEquals( + "1:19: second argument of [term(first_name, first_name)] must be a constant, received [first_name]", + error("from test | where term(first_name, first_name)") + ); + assertEquals( + "1:59: second argument of [term(first_name, query)] must be a constant, received [query]", + error("from test | eval query = concat(\"first\", \" name\") | where term(first_name, query)") + ); + // Other value types are tested in QueryStringFunctionTests + } + + // These should pass eventually once we lift some restrictions on match function + public void testTermFunctionCurrentlyUnsupportedBehaviour() throws Exception { + assumeTrue("term function capability not available", EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()); + assertEquals( + "1:67: Unknown column [first_name]", + error("from test | stats max_salary = max(salary) by emp_no | where term(first_name, \"Anna\")") + ); + } + + public void testTermFunctionNullArgs() throws Exception { + assumeTrue("term function capability not available", EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()); + assertEquals( + "1:19: first argument of [term(null, \"query\")] cannot be null, received [null]", + error("from test | where term(null, \"query\")") + ); + assertEquals( + "1:19: second argument of [term(first_name, null)] cannot be null, received [null]", + error("from test | where term(first_name, null)") + ); + } + + public void testTermTargetsExistingField() throws Exception { + assumeTrue("term function capability not available", EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()); + assertEquals("1:38: Unknown column [first_name]", error("from test | keep emp_no | where term(first_name, \"Anna\")")); + } + public void testCoalesceWithMixedNumericTypes() { assertEquals( "1:22: second argument of [coalesce(languages, height)] must be [integer], found value [height] type [double]", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java new file mode 100644 index 000000000000..c1c0dc26880a --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +@FunctionName("term") +public class TermTests extends AbstractFunctionTestCase { + + public TermTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List> supportedPerPosition = supportedParams(); + List suppliers = new LinkedList<>(); + for (DataType fieldType : DataType.stringTypes()) { + for (DataType queryType : DataType.stringTypes()) { + addPositiveTestCase(List.of(fieldType, queryType), suppliers); + addNonFieldTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); + } + } + + List suppliersWithErrors = errorsForCasesWithoutExamples(suppliers, (v, p) -> "string"); + + // Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests + return parameterSuppliersFromTypedData( + suppliersWithErrors.stream().filter(s -> s.types().contains(DataType.NULL) == false).toList() + ); + } + + protected static List> supportedParams() { + Set supportedTextParams = Set.of(DataType.KEYWORD, DataType.TEXT); + Set supportedNumericParams = Set.of(DataType.DOUBLE, DataType.INTEGER); + Set supportedFuzzinessParams = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.TEXT); + List> supportedPerPosition = List.of( + supportedTextParams, + supportedTextParams, + supportedNumericParams, + supportedFuzzinessParams + ); + return supportedPerPosition; + } + + protected static void addPositiveTestCase(List paramDataTypes, List suppliers) { + + // Positive case - creates an ES field from the field parameter type + suppliers.add( + new TestCaseSupplier( + getTestCaseName(paramDataTypes, "-ES field"), + paramDataTypes, + () -> new TestCaseSupplier.TestCase( + getTestParams(paramDataTypes), + "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", + DataType.BOOLEAN, + equalTo(true) + ) + ) + ); + } + + private static void addNonFieldTestCase( + List paramDataTypes, + List> supportedPerPosition, + List suppliers + ) { + // Negative case - use directly the field parameter type + suppliers.add( + new TestCaseSupplier( + getTestCaseName(paramDataTypes, "-non ES field"), + paramDataTypes, + typeErrorSupplier(true, supportedPerPosition, paramDataTypes, TermTests::matchTypeErrorSupplier) + ) + ); + } + + private static List getTestParams(List paramDataTypes) { + String fieldName = randomIdentifier(); + List params = new ArrayList<>(); + params.add( + new TestCaseSupplier.TypedData( + new FieldExpression(fieldName, List.of(new FieldExpression.FieldValue(fieldName))), + paramDataTypes.get(0), + "field" + ) + ); + params.add(new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), paramDataTypes.get(1), "query")); + return params; + } + + private static String getTestCaseName(List paramDataTypes, String fieldType) { + StringBuilder sb = new StringBuilder(); + sb.append("<"); + sb.append(paramDataTypes.get(0)).append(fieldType).append(", "); + sb.append(paramDataTypes.get(1)); + sb.append(">"); + return sb.toString(); + } + + private static String matchTypeErrorSupplier(boolean includeOrdinal, List> validPerPosition, List types) { + return "[] cannot operate on [" + types.getFirst().typeName() + "], which is not a field from an index mapping"; + } + + @Override + protected Expression build(Source source, List args) { + return new Match(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 86f5c812737b..d32124c1aaf3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -1391,6 +1391,35 @@ public void testMultipleMatchFilterPushdown() { assertThat(actualLuceneQuery.toString(), is(expectedLuceneQuery.toString())); } + /** + * Expecting + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na + * me{f}#6, long_noidx{f}#11, salary{f}#7],false] + * \_ProjectExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na + * me{f}#6, long_noidx{f}#11, salary{f}#7]] + * \_FieldExtractExec[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gen] + * \_EsQueryExec[test], indexMode[standard], query[{"term":{"last_name":{"query":"Smith"}}}] + */ + public void testTermFunction() { + // Skip test if the term function is not enabled. + assumeTrue("term function capability not available", EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()); + + var plan = plannerOptimizer.plan(""" + from test + | where term(last_name, "Smith") + """, IS_SV_STATS); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(1000)); + var expected = QueryBuilders.termQuery("last_name", "Smith"); + assertThat(query.query().toString(), is(expected.toString())); + } + private QueryBuilder wrapWithSingleQuery(String query, QueryBuilder inner, String fieldName, Source source) { return FilterTests.singleValueQuery(query, inner, fieldName, source); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index e6c061f44a9e..81f65668722f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -92,7 +92,7 @@ setup: - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} # Testing for the entire function set isn't feasbile, so we just check that we return the correct count as an approximation. - - length: {esql.functions: 127} # check the "sister" test below for a likely update to the same esql.functions length check + - length: {esql.functions: 128} # check the "sister" test below for a likely update to the same esql.functions length check --- "Basic ESQL usage output (telemetry) non-snapshot version":